context
stringlengths
2.52k
185k
gt
stringclasses
1 value
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; using Hyak.Common; using Microsoft.Azure; using Microsoft.Azure.Management.Network; using Microsoft.Azure.Management.Network.Models; using Newtonsoft.Json.Linq; namespace Microsoft.Azure.Management.Network { /// <summary> /// The Windows Azure Network management API provides a RESTful set of web /// services that interact with Windows Azure Networks service to manage /// your network resrources. The API has entities that capture the /// relationship between an end user and the Windows Azure Networks /// service. /// </summary> public partial class NetworkResourceProviderClient : ServiceClient<NetworkResourceProviderClient>, INetworkResourceProviderClient { private string _apiVersion; /// <summary> /// Gets the API version. /// </summary> public string ApiVersion { get { return this._apiVersion; } } private Uri _baseUri; /// <summary> /// Gets the URI used as the base for all cloud service requests. /// </summary> public Uri BaseUri { get { return this._baseUri; } } private SubscriptionCloudCredentials _credentials; /// <summary> /// Gets subscription credentials which uniquely identify Microsoft /// Azure subscription. The subscription ID forms part of the URI for /// every service call. /// </summary> public SubscriptionCloudCredentials Credentials { get { return this._credentials; } } private int _longRunningOperationInitialTimeout; /// <summary> /// Gets or sets the initial timeout for Long Running Operations. /// </summary> public int LongRunningOperationInitialTimeout { get { return this._longRunningOperationInitialTimeout; } set { this._longRunningOperationInitialTimeout = value; } } private int _longRunningOperationRetryTimeout; /// <summary> /// Gets or sets the retry timeout for Long Running Operations. /// </summary> public int LongRunningOperationRetryTimeout { get { return this._longRunningOperationRetryTimeout; } set { this._longRunningOperationRetryTimeout = value; } } private IApplicationGatewayOperations _applicationGateways; /// <summary> /// The Network Resource Provider API includes operations managing the /// application gateways for your subscription. /// </summary> public virtual IApplicationGatewayOperations ApplicationGateways { get { return this._applicationGateways; } } private IExpressRouteCircuitAuthorizationOperations _expressRouteCircuitAuthorizations; /// <summary> /// The Network Resource Provider API includes operations for managing /// the Authorizations for your subscription. /// </summary> public virtual IExpressRouteCircuitAuthorizationOperations ExpressRouteCircuitAuthorizations { get { return this._expressRouteCircuitAuthorizations; } } private IExpressRouteCircuitOperations _expressRouteCircuits; /// <summary> /// The Network Resource Provider API includes operations for managing /// the ExpressRouteCircuits for your subscription. /// </summary> public virtual IExpressRouteCircuitOperations ExpressRouteCircuits { get { return this._expressRouteCircuits; } } private IExpressRouteCircuitPeeringOperations _expressRouteCircuitPeerings; /// <summary> /// The Network Resource Provider API includes operations for managing /// the Peerings for your subscription. /// </summary> public virtual IExpressRouteCircuitPeeringOperations ExpressRouteCircuitPeerings { get { return this._expressRouteCircuitPeerings; } } private IExpressRouteServiceProviderOperations _expressRouteServiceProviders; /// <summary> /// The Network Resource Provider API includes operations for managing /// the ExpressRouteServiceProviders for your subscription. /// </summary> public virtual IExpressRouteServiceProviderOperations ExpressRouteServiceProviders { get { return this._expressRouteServiceProviders; } } private ILoadBalancerOperations _loadBalancers; /// <summary> /// The Network Resource Provider API includes operations for managing /// the load balancers for your subscription. /// </summary> public virtual ILoadBalancerOperations LoadBalancers { get { return this._loadBalancers; } } private ILocalNetworkGatewayOperations _localNetworkGateways; /// <summary> /// The Network Resource Provider API includes operations for managing /// the Virtual network Gateway for your subscription. /// </summary> public virtual ILocalNetworkGatewayOperations LocalNetworkGateways { get { return this._localNetworkGateways; } } private INetworkInterfaceOperations _networkInterfaces; /// <summary> /// The Network Resource Provider API includes operations for managing /// the subnets for your subscription. /// </summary> public virtual INetworkInterfaceOperations NetworkInterfaces { get { return this._networkInterfaces; } } private INetworkSecurityGroupOperations _networkSecurityGroups; /// <summary> /// The Network Resource Provider API includes operations for managing /// the NetworkSecurityGroups for your subscription. /// </summary> public virtual INetworkSecurityGroupOperations NetworkSecurityGroups { get { return this._networkSecurityGroups; } } private IPublicIpAddressOperations _publicIpAddresses; /// <summary> /// The Network Resource Provider API includes operations for managing /// the PublicIPAddress for your subscription. /// </summary> public virtual IPublicIpAddressOperations PublicIpAddresses { get { return this._publicIpAddresses; } } private IRouteOperations _routes; /// <summary> /// The Network Resource Provider API includes operations for managing /// the Routes for your subscription. /// </summary> public virtual IRouteOperations Routes { get { return this._routes; } } private IRouteTableOperations _routeTables; /// <summary> /// The Network Resource Provider API includes operations for managing /// the RouteTables for your subscription. /// </summary> public virtual IRouteTableOperations RouteTables { get { return this._routeTables; } } private ISecurityRuleOperations _securityRules; /// <summary> /// The Network Resource Provider API includes operations for managing /// the SecurityRules for your subscription. /// </summary> public virtual ISecurityRuleOperations SecurityRules { get { return this._securityRules; } } private ISubnetOperations _subnets; /// <summary> /// The Network Resource Provider API includes operations for managing /// the subnets for your subscription. /// </summary> public virtual ISubnetOperations Subnets { get { return this._subnets; } } private IUsageOperations _usages; /// <summary> /// Operations for listing usage. /// </summary> public virtual IUsageOperations Usages { get { return this._usages; } } private IVirtualNetworkGatewayConnectionOperations _virtualNetworkGatewayConnections; /// <summary> /// The Network Resource Provider API includes operations for managing /// the Virtual network Gateway for your subscription. /// </summary> public virtual IVirtualNetworkGatewayConnectionOperations VirtualNetworkGatewayConnections { get { return this._virtualNetworkGatewayConnections; } } private IVirtualNetworkGatewayOperations _virtualNetworkGateways; /// <summary> /// The Network Resource Provider API includes operations for managing /// the Virtual network Gateway for your subscription. /// </summary> public virtual IVirtualNetworkGatewayOperations VirtualNetworkGateways { get { return this._virtualNetworkGateways; } } private IVirtualNetworkOperations _virtualNetworks; /// <summary> /// The Network Resource Provider API includes operations for managing /// the Virtual Networks for your subscription. /// </summary> public virtual IVirtualNetworkOperations VirtualNetworks { get { return this._virtualNetworks; } } /// <summary> /// Initializes a new instance of the NetworkResourceProviderClient /// class. /// </summary> public NetworkResourceProviderClient() : base() { this._applicationGateways = new ApplicationGatewayOperations(this); this._expressRouteCircuitAuthorizations = new ExpressRouteCircuitAuthorizationOperations(this); this._expressRouteCircuits = new ExpressRouteCircuitOperations(this); this._expressRouteCircuitPeerings = new ExpressRouteCircuitPeeringOperations(this); this._expressRouteServiceProviders = new ExpressRouteServiceProviderOperations(this); this._loadBalancers = new LoadBalancerOperations(this); this._localNetworkGateways = new LocalNetworkGatewayOperations(this); this._networkInterfaces = new NetworkInterfaceOperations(this); this._networkSecurityGroups = new NetworkSecurityGroupOperations(this); this._publicIpAddresses = new PublicIpAddressOperations(this); this._routes = new RouteOperations(this); this._routeTables = new RouteTableOperations(this); this._securityRules = new SecurityRuleOperations(this); this._subnets = new SubnetOperations(this); this._usages = new UsageOperations(this); this._virtualNetworkGatewayConnections = new VirtualNetworkGatewayConnectionOperations(this); this._virtualNetworkGateways = new VirtualNetworkGatewayOperations(this); this._virtualNetworks = new VirtualNetworkOperations(this); this._apiVersion = "2015-05-01-preview"; this._longRunningOperationInitialTimeout = -1; this._longRunningOperationRetryTimeout = -1; this.HttpClient.Timeout = TimeSpan.FromSeconds(300); } /// <summary> /// Initializes a new instance of the NetworkResourceProviderClient /// class. /// </summary> /// <param name='credentials'> /// Required. Gets subscription credentials which uniquely identify /// Microsoft Azure subscription. The subscription ID forms part of /// the URI for every service call. /// </param> /// <param name='baseUri'> /// Optional. Gets the URI used as the base for all cloud service /// requests. /// </param> public NetworkResourceProviderClient(SubscriptionCloudCredentials credentials, Uri baseUri) : this() { if (credentials == null) { throw new ArgumentNullException("credentials"); } if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this._credentials = credentials; this._baseUri = baseUri; this.Credentials.InitializeServiceClient(this); } /// <summary> /// Initializes a new instance of the NetworkResourceProviderClient /// class. /// </summary> /// <param name='credentials'> /// Required. Gets subscription credentials which uniquely identify /// Microsoft Azure subscription. The subscription ID forms part of /// the URI for every service call. /// </param> public NetworkResourceProviderClient(SubscriptionCloudCredentials credentials) : this() { if (credentials == null) { throw new ArgumentNullException("credentials"); } this._credentials = credentials; this._baseUri = new Uri("https://management.azure.com/"); this.Credentials.InitializeServiceClient(this); } /// <summary> /// Initializes a new instance of the NetworkResourceProviderClient /// class. /// </summary> /// <param name='httpClient'> /// The Http client /// </param> public NetworkResourceProviderClient(HttpClient httpClient) : base(httpClient) { this._applicationGateways = new ApplicationGatewayOperations(this); this._expressRouteCircuitAuthorizations = new ExpressRouteCircuitAuthorizationOperations(this); this._expressRouteCircuits = new ExpressRouteCircuitOperations(this); this._expressRouteCircuitPeerings = new ExpressRouteCircuitPeeringOperations(this); this._expressRouteServiceProviders = new ExpressRouteServiceProviderOperations(this); this._loadBalancers = new LoadBalancerOperations(this); this._localNetworkGateways = new LocalNetworkGatewayOperations(this); this._networkInterfaces = new NetworkInterfaceOperations(this); this._networkSecurityGroups = new NetworkSecurityGroupOperations(this); this._publicIpAddresses = new PublicIpAddressOperations(this); this._routes = new RouteOperations(this); this._routeTables = new RouteTableOperations(this); this._securityRules = new SecurityRuleOperations(this); this._subnets = new SubnetOperations(this); this._usages = new UsageOperations(this); this._virtualNetworkGatewayConnections = new VirtualNetworkGatewayConnectionOperations(this); this._virtualNetworkGateways = new VirtualNetworkGatewayOperations(this); this._virtualNetworks = new VirtualNetworkOperations(this); this._apiVersion = "2015-05-01-preview"; this._longRunningOperationInitialTimeout = -1; this._longRunningOperationRetryTimeout = -1; this.HttpClient.Timeout = TimeSpan.FromSeconds(300); } /// <summary> /// Initializes a new instance of the NetworkResourceProviderClient /// class. /// </summary> /// <param name='credentials'> /// Required. Gets subscription credentials which uniquely identify /// Microsoft Azure subscription. The subscription ID forms part of /// the URI for every service call. /// </param> /// <param name='baseUri'> /// Optional. Gets the URI used as the base for all cloud service /// requests. /// </param> /// <param name='httpClient'> /// The Http client /// </param> public NetworkResourceProviderClient(SubscriptionCloudCredentials credentials, Uri baseUri, HttpClient httpClient) : this(httpClient) { if (credentials == null) { throw new ArgumentNullException("credentials"); } if (baseUri == null) { throw new ArgumentNullException("baseUri"); } this._credentials = credentials; this._baseUri = baseUri; this.Credentials.InitializeServiceClient(this); } /// <summary> /// Initializes a new instance of the NetworkResourceProviderClient /// class. /// </summary> /// <param name='credentials'> /// Required. Gets subscription credentials which uniquely identify /// Microsoft Azure subscription. The subscription ID forms part of /// the URI for every service call. /// </param> /// <param name='httpClient'> /// The Http client /// </param> public NetworkResourceProviderClient(SubscriptionCloudCredentials credentials, HttpClient httpClient) : this(httpClient) { if (credentials == null) { throw new ArgumentNullException("credentials"); } this._credentials = credentials; this._baseUri = new Uri("https://management.azure.com/"); this.Credentials.InitializeServiceClient(this); } /// <summary> /// Clones properties from current instance to another /// NetworkResourceProviderClient instance /// </summary> /// <param name='client'> /// Instance of NetworkResourceProviderClient to clone to /// </param> protected override void Clone(ServiceClient<NetworkResourceProviderClient> client) { base.Clone(client); if (client is NetworkResourceProviderClient) { NetworkResourceProviderClient clonedClient = ((NetworkResourceProviderClient)client); clonedClient._credentials = this._credentials; clonedClient._baseUri = this._baseUri; clonedClient._apiVersion = this._apiVersion; clonedClient._longRunningOperationInitialTimeout = this._longRunningOperationInitialTimeout; clonedClient._longRunningOperationRetryTimeout = this._longRunningOperationRetryTimeout; clonedClient.Credentials.InitializeServiceClient(clonedClient); } } /// <summary> /// Checks whether a domain name in the cloudapp.net zone is available /// for use. /// </summary> /// <param name='location'> /// Required. The location of the domain name /// </param> /// <param name='domainNameLabel'> /// Required. The domain name to be verified. It must conform to the /// following regular expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// Response for CheckDnsNameAvailability Api servive call /// </returns> public async Task<DnsNameAvailabilityResponse> CheckDnsNameAvailabilityAsync(string location, string domainNameLabel, CancellationToken cancellationToken) { // Validate if (location == null) { throw new ArgumentNullException("location"); } if (domainNameLabel == null) { throw new ArgumentNullException("domainNameLabel"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("location", location); tracingParameters.Add("domainNameLabel", domainNameLabel); TracingAdapter.Enter(invocationId, this, "CheckDnsNameAvailabilityAsync", tracingParameters); } // Construct URL string url = ""; url = url + "/subscriptions/"; if (this.Credentials.SubscriptionId != null) { url = url + Uri.EscapeDataString(this.Credentials.SubscriptionId); } url = url + "/providers/"; url = url + "Microsoft.Network"; url = url + "/locations/"; url = url + Uri.EscapeDataString(location); url = url + "/CheckDnsNameAvailability"; List<string> queryParameters = new List<string>(); queryParameters.Add("domainNameLabel=" + Uri.EscapeDataString(domainNameLabel)); queryParameters.Add("api-version=2015-05-01-preview"); if (queryParameters.Count > 0) { url = url + "?" + string.Join("&", queryParameters); } string baseUrl = this.BaseUri.AbsoluteUri; // Trim '/' character from the end of baseUrl and beginning of url. if (baseUrl[baseUrl.Length - 1] == '/') { baseUrl = baseUrl.Substring(0, baseUrl.Length - 1); } if (url[0] == '/') { url = url.Substring(1); } url = baseUrl + "/" + url; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result DnsNameAvailabilityResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new DnsNameAvailabilityResponse(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { JToken availableValue = responseDoc["available"]; if (availableValue != null && availableValue.Type != JTokenType.Null) { bool availableInstance = ((bool)availableValue); result.DnsNameAvailability = availableInstance; } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } /// <summary> /// The Get Operation Status operation returns the status of the /// specified operation. After calling an asynchronous operation, you /// can call Get Operation Status to determine whether the operation /// has succeeded, failed, or is still in progress. /// </summary> /// <param name='azureAsyncOperation'> /// Required. Location value returned by the Begin operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request and error information regarding /// the failure. /// </returns> public async Task<AzureAsyncOperationResponse> GetLongRunningOperationStatusAsync(string azureAsyncOperation, CancellationToken cancellationToken) { // Validate if (azureAsyncOperation == null) { throw new ArgumentNullException("azureAsyncOperation"); } // Tracing bool shouldTrace = TracingAdapter.IsEnabled; string invocationId = null; if (shouldTrace) { invocationId = TracingAdapter.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("azureAsyncOperation", azureAsyncOperation); TracingAdapter.Enter(invocationId, this, "GetLongRunningOperationStatusAsync", tracingParameters); } // Construct URL string url = ""; url = url + azureAsyncOperation; url = url.Replace(" ", "%20"); // Create HTTP transport objects HttpRequestMessage httpRequest = null; try { httpRequest = new HttpRequestMessage(); httpRequest.Method = HttpMethod.Get; httpRequest.RequestUri = new Uri(url); // Set Headers httpRequest.Headers.Add("x-ms-version", "2015-05-01-preview"); // Set Credentials cancellationToken.ThrowIfCancellationRequested(); await this.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); // Send Request HttpResponseMessage httpResponse = null; try { if (shouldTrace) { TracingAdapter.SendRequest(invocationId, httpRequest); } cancellationToken.ThrowIfCancellationRequested(); httpResponse = await this.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); if (shouldTrace) { TracingAdapter.ReceiveResponse(invocationId, httpResponse); } HttpStatusCode statusCode = httpResponse.StatusCode; if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted) { cancellationToken.ThrowIfCancellationRequested(); CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false)); if (shouldTrace) { TracingAdapter.Error(invocationId, ex); } throw ex; } // Create Result AzureAsyncOperationResponse result = null; // Deserialize Response if (statusCode == HttpStatusCode.OK || statusCode == HttpStatusCode.Accepted) { cancellationToken.ThrowIfCancellationRequested(); string responseContent = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); result = new AzureAsyncOperationResponse(); JToken responseDoc = null; if (string.IsNullOrEmpty(responseContent) == false) { responseDoc = JToken.Parse(responseContent); } if (responseDoc != null && responseDoc.Type != JTokenType.Null) { JToken statusValue = responseDoc["status"]; if (statusValue != null && statusValue.Type != JTokenType.Null) { string statusInstance = ((string)statusValue); result.Status = statusInstance; } JToken errorValue = responseDoc["error"]; if (errorValue != null && errorValue.Type != JTokenType.Null) { Error errorInstance = new Error(); result.Error = errorInstance; JToken codeValue = errorValue["code"]; if (codeValue != null && codeValue.Type != JTokenType.Null) { string codeInstance = ((string)codeValue); errorInstance.Code = codeInstance; } JToken messageValue = errorValue["message"]; if (messageValue != null && messageValue.Type != JTokenType.Null) { string messageInstance = ((string)messageValue); errorInstance.Message = messageInstance; } JToken targetValue = errorValue["target"]; if (targetValue != null && targetValue.Type != JTokenType.Null) { string targetInstance = ((string)targetValue); errorInstance.Target = targetInstance; } JToken detailsArray = errorValue["details"]; if (detailsArray != null && detailsArray.Type != JTokenType.Null) { foreach (JToken detailsValue in ((JArray)detailsArray)) { ErrorDetails errorDetailsInstance = new ErrorDetails(); errorInstance.Details.Add(errorDetailsInstance); JToken codeValue2 = detailsValue["code"]; if (codeValue2 != null && codeValue2.Type != JTokenType.Null) { string codeInstance2 = ((string)codeValue2); errorDetailsInstance.Code = codeInstance2; } JToken targetValue2 = detailsValue["target"]; if (targetValue2 != null && targetValue2.Type != JTokenType.Null) { string targetInstance2 = ((string)targetValue2); errorDetailsInstance.Target = targetInstance2; } JToken messageValue2 = detailsValue["message"]; if (messageValue2 != null && messageValue2.Type != JTokenType.Null) { string messageInstance2 = ((string)messageValue2); errorDetailsInstance.Message = messageInstance2; } } } JToken innerErrorValue = errorValue["innerError"]; if (innerErrorValue != null && innerErrorValue.Type != JTokenType.Null) { string innerErrorInstance = ((string)innerErrorValue); errorInstance.InnerError = innerErrorInstance; } } } } result.StatusCode = statusCode; if (httpResponse.Headers.Contains("x-ms-request-id")) { result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (shouldTrace) { TracingAdapter.Exit(invocationId, result); } return result; } finally { if (httpResponse != null) { httpResponse.Dispose(); } } } finally { if (httpRequest != null) { httpRequest.Dispose(); } } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Diagnostics.Contracts; using System.Linq; using System.Runtime.Serialization; using System.Text; using System.Threading; using Validation; using BCL = System.Collections.Generic; namespace System.Collections.Immutable { /// <summary> /// An immutable unordered dictionary implementation. /// </summary> /// <typeparam name="TKey">The type of the key.</typeparam> /// <typeparam name="TValue">The type of the value.</typeparam> [DebuggerDisplay("Count = {Count}")] [DebuggerTypeProxy(typeof(ImmutableDictionary<,>.DebuggerProxy))] public sealed partial class ImmutableDictionary<TKey, TValue> : IImmutableDictionary<TKey, TValue>, IImmutableDictionaryInternal<TKey, TValue>, IHashKeyCollection<TKey>, IDictionary<TKey, TValue>, IDictionary { /// <summary> /// An empty immutable dictionary with default equality comparers. /// </summary> [SuppressMessage("Microsoft.Security", "CA2104:DoNotDeclareReadOnlyMutableReferenceTypes")] public static readonly ImmutableDictionary<TKey, TValue> Empty = new ImmutableDictionary<TKey, TValue>(); /// <summary> /// The singleton delegate that freezes the contents of hash buckets when the root of the data structure is frozen. /// </summary> private static readonly Action<KeyValuePair<int, HashBucket>> FreezeBucketAction = (kv) => kv.Value.Freeze(); /// <summary> /// The number of elements in the collection. /// </summary> private readonly int count; /// <summary> /// The root node of the tree that stores this map. /// </summary> private readonly ImmutableSortedDictionary<int, HashBucket>.Node root; /// <summary> /// The comparer used when comparing hash buckets. /// </summary> private readonly Comparers comparers; /// <summary> /// Initializes a new instance of the <see cref="ImmutableDictionary&lt;TKey, TValue&gt;"/> class. /// </summary> /// <param name="root">The root.</param> /// <param name="comparers">The comparers.</param> /// <param name="count">The number of elements in the map.</param> private ImmutableDictionary(ImmutableSortedDictionary<int, HashBucket>.Node root, Comparers comparers, int count) : this(Requires.NotNull(comparers, "comparers")) { Requires.NotNull(root, "root"); root.Freeze(FreezeBucketAction); this.root = root; this.count = count; } /// <summary> /// Initializes a new instance of the <see cref="ImmutableDictionary&lt;TKey, TValue&gt;"/> class. /// </summary> /// <param name="comparers">The comparers.</param> private ImmutableDictionary(Comparers comparers = null) { this.comparers = comparers ?? Comparers.Get(EqualityComparer<TKey>.Default, EqualityComparer<TValue>.Default); this.root = ImmutableSortedDictionary<int, HashBucket>.Node.EmptyNode; } /// <summary> /// How to respond when a key collision is discovered. /// </summary> internal enum KeyCollisionBehavior { /// <summary> /// Sets the value for the given key, even if that overwrites an existing value. /// </summary> SetValue, /// <summary> /// Skips the mutating operation if a key conflict is detected. /// </summary> Skip, /// <summary> /// Throw an exception if the key already exists with a different key. /// </summary> ThrowIfValueDifferent, /// <summary> /// Throw an exception if the key already exists regardless of its value. /// </summary> ThrowAlways, } /// <summary> /// The result of a mutation operation. /// </summary> internal enum OperationResult { /// <summary> /// The change was applied and did not require a change to the number of elements in the collection. /// </summary> AppliedWithoutSizeChange, /// <summary> /// The change required element(s) to be added or removed from the collection. /// </summary> SizeChanged, /// <summary> /// No change was required (the operation ended in a no-op). /// </summary> NoChangeRequired, } #region Public Properties /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> public ImmutableDictionary<TKey, TValue> Clear() { return this.IsEmpty ? this : EmptyWithComparers(this.comparers); } /// <summary> /// Gets the number of elements in this collection. /// </summary> public int Count { get { return this.count; } } /// <summary> /// Gets a value indicating whether this instance is empty. /// </summary> /// <value> /// <c>true</c> if this instance is empty; otherwise, <c>false</c>. /// </value> public bool IsEmpty { get { return this.Count == 0; } } /// <summary> /// Gets the key comparer. /// </summary> public IEqualityComparer<TKey> KeyComparer { get { return this.comparers.KeyComparer; } } /// <summary> /// Gets the value comparer used to determine whether values are equal. /// </summary> public IEqualityComparer<TValue> ValueComparer { get { return this.comparers.ValueComparer; } } /// <summary> /// Gets the keys in the map. /// </summary> public IEnumerable<TKey> Keys { get { foreach (var bucket in this.root) { foreach (var item in bucket.Value) { yield return item.Key; } } } } /// <summary> /// Gets the values in the map. /// </summary> public IEnumerable<TValue> Values { get { foreach (var bucket in this.root) { foreach (var item in bucket.Value) { yield return item.Value; } } } } #endregion #region IImmutableDictionary<TKey,TValue> Properties /// <summary> /// Gets the empty instance. /// </summary> [ExcludeFromCodeCoverage] IImmutableDictionary<TKey, TValue> IImmutableDictionary<TKey, TValue>.Clear() { return this.Clear(); } #endregion #region IDictionary<TKey, TValue> Properties /// <summary> /// Gets the keys. /// </summary> ICollection<TKey> IDictionary<TKey, TValue>.Keys { get { return new KeysCollectionAccessor<TKey, TValue>(this); } } /// <summary> /// Gets the values. /// </summary> ICollection<TValue> IDictionary<TKey, TValue>.Values { get { return new ValuesCollectionAccessor<TKey, TValue>(this); } } #endregion /// <summary> /// Gets a data structure that captures the current state of this map, as an input into a query or mutating function. /// </summary> private MutationInput Origin { get { return new MutationInput(this); } } /// <summary> /// Gets the <typeparamref name="TValue"/> with the specified key. /// </summary> public TValue this[TKey key] { get { Requires.NotNullAllowStructs(key, "key"); TValue value; if (this.TryGetValue(key, out value)) { return value; } throw new KeyNotFoundException(); } } /// <summary> /// Gets or sets the <typeparamref name="TValue"/> with the specified key. /// </summary> TValue IDictionary<TKey, TValue>.this[TKey key] { get { return this[key]; } set { throw new NotSupportedException(); } } #region ICollection<KeyValuePair<TKey, TValue>> Properties bool ICollection<KeyValuePair<TKey, TValue>>.IsReadOnly { get { return true; } } #endregion #region Public methods /// <summary> /// Creates a collection with the same contents as this collection that /// can be efficiently mutated across multiple operations using standard /// mutable interfaces. /// </summary> /// <remarks> /// This is an O(1) operation and results in only a single (small) memory allocation. /// The mutable collection that is returned is *not* thread-safe. /// </remarks> [Pure] public Builder ToBuilder() { // We must not cache the instance created here and return it to various callers. // Those who request a mutable collection must get references to the collection // that version independently of each other. return new Builder(this); } /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> [Pure] public ImmutableDictionary<TKey, TValue> Add(TKey key, TValue value) { Requires.NotNullAllowStructs(key, "key"); Contract.Ensures(Contract.Result<ImmutableDictionary<TKey, TValue>>() != null); var result = Add(key, value, KeyCollisionBehavior.ThrowIfValueDifferent, new MutationInput(this)); return result.Finalize(this); } /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> [Pure] [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures")] public ImmutableDictionary<TKey, TValue> AddRange(IEnumerable<KeyValuePair<TKey, TValue>> pairs) { Requires.NotNull(pairs, "pairs"); Contract.Ensures(Contract.Result<ImmutableDictionary<TKey, TValue>>() != null); return this.AddRange(pairs, false); } /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> [Pure] public ImmutableDictionary<TKey, TValue> SetItem(TKey key, TValue value) { Requires.NotNullAllowStructs(key, "key"); Contract.Ensures(Contract.Result<ImmutableDictionary<TKey, TValue>>() != null); Contract.Ensures(!Contract.Result<ImmutableDictionary<TKey, TValue>>().IsEmpty); var result = Add(key, value, KeyCollisionBehavior.SetValue, new MutationInput(this)); return result.Finalize(this); } /// <summary> /// Applies a given set of key=value pairs to an immutable dictionary, replacing any conflicting keys in the resulting dictionary. /// </summary> /// <param name="items">The key=value pairs to set on the map. Any keys that conflict with existing keys will overwrite the previous values.</param> /// <returns>An immutable dictionary.</returns> [Pure] [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures")] public ImmutableDictionary<TKey, TValue> SetItems(IEnumerable<KeyValuePair<TKey, TValue>> items) { Requires.NotNull(items, "items"); Contract.Ensures(Contract.Result<ImmutableDictionary<TKey, TValue>>() != null); var result = AddRange(items, this.Origin, KeyCollisionBehavior.SetValue); return result.Finalize(this); } /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> [Pure] public ImmutableDictionary<TKey, TValue> Remove(TKey key) { Requires.NotNullAllowStructs(key, "key"); Contract.Ensures(Contract.Result<ImmutableDictionary<TKey, TValue>>() != null); var result = Remove(key, new MutationInput(this)); return result.Finalize(this); } /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> [Pure] public ImmutableDictionary<TKey, TValue> RemoveRange(IEnumerable<TKey> keys) { Requires.NotNull(keys, "keys"); Contract.Ensures(Contract.Result<ImmutableDictionary<TKey, TValue>>() != null); int count = this.count; var root = this.root; foreach (var key in keys) { int hashCode = this.KeyComparer.GetHashCode(key); HashBucket bucket; if (root.TryGetValue(hashCode, Comparer<int>.Default, out bucket)) { OperationResult result; var newBucket = bucket.Remove(key, this.comparers.KeyOnlyComparer, out result); root = UpdateRoot(root, hashCode, newBucket, this.comparers.HashBucketEqualityComparer); if (result == OperationResult.SizeChanged) { count--; } } } return this.Wrap(root, count); } /// <summary> /// Determines whether the specified key contains key. /// </summary> /// <param name="key">The key.</param> /// <returns> /// <c>true</c> if the specified key contains key; otherwise, <c>false</c>. /// </returns> public bool ContainsKey(TKey key) { Requires.NotNullAllowStructs(key, "key"); return ContainsKey(key, new MutationInput(this)); } /// <summary> /// Determines whether [contains] [the specified key value pair]. /// </summary> /// <param name="pair">The key value pair.</param> /// <returns> /// <c>true</c> if [contains] [the specified key value pair]; otherwise, <c>false</c>. /// </returns> public bool Contains(KeyValuePair<TKey, TValue> pair) { return Contains(pair, this.Origin); } /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> public bool TryGetValue(TKey key, out TValue value) { Requires.NotNullAllowStructs(key, "key"); return TryGetValue(key, this.Origin, out value); } /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> public bool TryGetKey(TKey equalKey, out TKey actualKey) { Requires.NotNullAllowStructs(equalKey, "equalKey"); return TryGetKey(equalKey, this.Origin, out actualKey); } /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> [Pure] public ImmutableDictionary<TKey, TValue> WithComparers(IEqualityComparer<TKey> keyComparer, IEqualityComparer<TValue> valueComparer) { if (keyComparer == null) { keyComparer = EqualityComparer<TKey>.Default; } if (valueComparer == null) { valueComparer = EqualityComparer<TValue>.Default; } if (this.KeyComparer == keyComparer) { if (this.ValueComparer == valueComparer) { return this; } else { // When the key comparer is the same but the value comparer is different, we don't need a whole new tree // because the structure of the tree does not depend on the value comparer. // We just need a new root node to store the new value comparer. var comparers = this.comparers.WithValueComparer(valueComparer); return new ImmutableDictionary<TKey, TValue>(this.root, comparers, this.count); } } else { var comparers = Comparers.Get(keyComparer, valueComparer); var set = new ImmutableDictionary<TKey, TValue>(comparers); set = set.AddRange(this, avoidToHashMap: true); return set; } } /// <summary> /// See the <see cref="IImmutableDictionary&lt;TKey, TValue&gt;"/> interface. /// </summary> [Pure] public ImmutableDictionary<TKey, TValue> WithComparers(IEqualityComparer<TKey> keyComparer) { return this.WithComparers(keyComparer, this.comparers.ValueComparer); } /// <summary> /// Determines whether the ImmutableSortedMap&lt;TKey,TValue&gt; /// contains an element with the specified value. /// </summary> /// <param name="value"> /// The value to locate in the ImmutableSortedMap&lt;TKey,TValue&gt;. /// The value can be null for reference types. /// </param> /// <returns> /// true if the ImmutableSortedMap&lt;TKey,TValue&gt; contains /// an element with the specified value; otherwise, false. /// </returns> [Pure] public bool ContainsValue(TValue value) { return this.Values.Contains(value, this.ValueComparer); } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="T:System.Collections.Generic.IEnumerator`1"/> that can be used to iterate through the collection. /// </returns> public Enumerator GetEnumerator() { return new Enumerator(this.root); } #endregion #region IImmutableDictionary<TKey,TValue> Methods /// <summary> /// See the IImmutableMap{TKey,TValue} interface /// </summary> [ExcludeFromCodeCoverage] IImmutableDictionary<TKey, TValue> IImmutableDictionary<TKey, TValue>.Add(TKey key, TValue value) { return this.Add(key, value); } /// <summary> /// See the IImmutableMap{TKey,TValue} interface /// </summary> [ExcludeFromCodeCoverage] IImmutableDictionary<TKey, TValue> IImmutableDictionary<TKey, TValue>.SetItem(TKey key, TValue value) { return this.SetItem(key, value); } /// <summary> /// Applies a given set of key=value pairs to an immutable dictionary, replacing any conflicting keys in the resulting dictionary. /// </summary> /// <param name="items">The key=value pairs to set on the map. Any keys that conflict with existing keys will overwrite the previous values.</param> /// <returns>An immutable dictionary.</returns> IImmutableDictionary<TKey, TValue> IImmutableDictionary<TKey, TValue>.SetItems(IEnumerable<KeyValuePair<TKey, TValue>> items) { return this.SetItems(items); } /// <summary> /// See the IImmutableMap{TKey,TValue} interface /// </summary> [ExcludeFromCodeCoverage] IImmutableDictionary<TKey, TValue> IImmutableDictionary<TKey, TValue>.AddRange(IEnumerable<KeyValuePair<TKey, TValue>> pairs) { return this.AddRange(pairs); } /// <summary> /// See the IImmutableMap{TKey,TValue} interface /// </summary> [ExcludeFromCodeCoverage] IImmutableDictionary<TKey, TValue> IImmutableDictionary<TKey, TValue>.RemoveRange(IEnumerable<TKey> keys) { return this.RemoveRange(keys); } /// <summary> /// See the IImmutableMap{TKey,TValue} interface /// </summary> [ExcludeFromCodeCoverage] IImmutableDictionary<TKey, TValue> IImmutableDictionary<TKey, TValue>.Remove(TKey key) { return this.Remove(key); } #endregion #region IDictionary<TKey, TValue> Methods /// <summary> /// Adds an element with the provided key and value to the <see cref="T:System.Collections.Generic.IDictionary`2"/>. /// </summary> /// <param name="key">The object to use as the key of the element to add.</param> /// <param name="value">The object to use as the value of the element to add.</param> /// <exception cref="T:System.ArgumentNullException"><paramref name="key"/> is null. /// </exception> /// <exception cref="T:System.ArgumentException"> /// An element with the same key already exists in the <see cref="T:System.Collections.Generic.IDictionary`2"/>. /// </exception> /// <exception cref="T:System.NotSupportedException"> /// The <see cref="T:System.Collections.Generic.IDictionary`2"/> is read-only. /// </exception> void IDictionary<TKey, TValue>.Add(TKey key, TValue value) { throw new NotSupportedException(); } /// <summary> /// Removes the element with the specified key from the <see cref="T:System.Collections.Generic.IDictionary`2"/>. /// </summary> /// <param name="key">The key of the element to remove.</param> /// <returns> /// true if the element is successfully removed; otherwise, false. This method also returns false if <paramref name="key"/> was not found in the original <see cref="T:System.Collections.Generic.IDictionary`2"/>. /// </returns> /// <exception cref="T:System.ArgumentNullException"><paramref name="key"/> is null. /// </exception> /// <exception cref="T:System.NotSupportedException"> /// The <see cref="T:System.Collections.Generic.IDictionary`2"/> is read-only. /// </exception> bool IDictionary<TKey, TValue>.Remove(TKey key) { throw new NotSupportedException(); } #endregion #region ICollection<KeyValuePair<TKey, TValue>> Methods void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> item) { throw new NotSupportedException(); } void ICollection<KeyValuePair<TKey, TValue>>.Clear() { throw new NotSupportedException(); } bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> item) { throw new NotSupportedException(); } void ICollection<KeyValuePair<TKey, TValue>>.CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex) { Requires.NotNull(array, "array"); Requires.Range(arrayIndex >= 0, "arrayIndex"); Requires.Range(array.Length >= arrayIndex + this.Count, "arrayIndex"); foreach (var item in this) { array[arrayIndex++] = item; } } #endregion #region IDictionary Properties /// <summary> /// Gets a value indicating whether the <see cref="T:System.Collections.IDictionary" /> object has a fixed size. /// </summary> /// <returns>true if the <see cref="T:System.Collections.IDictionary" /> object has a fixed size; otherwise, false.</returns> bool IDictionary.IsFixedSize { get { return true; } } /// <summary> /// Gets a value indicating whether the <see cref="T:System.Collections.Generic.ICollection`1" /> is read-only. /// </summary> /// <returns>true if the <see cref="T:System.Collections.Generic.ICollection`1" /> is read-only; otherwise, false. /// </returns> bool IDictionary.IsReadOnly { get { return true; } } /// <summary> /// Gets an <see cref="T:System.Collections.Generic.ICollection`1" /> containing the keys of the <see cref="T:System.Collections.Generic.IDictionary`2" />. /// </summary> /// <returns> /// An <see cref="T:System.Collections.Generic.ICollection`1" /> containing the keys of the object that implements <see cref="T:System.Collections.Generic.IDictionary`2" />. /// </returns> ICollection IDictionary.Keys { get { return new KeysCollectionAccessor<TKey, TValue>(this); } } /// <summary> /// Gets an <see cref="T:System.Collections.Generic.ICollection`1" /> containing the values in the <see cref="T:System.Collections.Generic.IDictionary`2" />. /// </summary> /// <returns> /// An <see cref="T:System.Collections.Generic.ICollection`1" /> containing the values in the object that implements <see cref="T:System.Collections.Generic.IDictionary`2" />. /// </returns> ICollection IDictionary.Values { get { return new ValuesCollectionAccessor<TKey, TValue>(this); } } #endregion #region IDictionary Methods /// <summary> /// Adds an element with the provided key and value to the <see cref="T:System.Collections.IDictionary" /> object. /// </summary> /// <param name="key">The <see cref="T:System.Object" /> to use as the key of the element to add.</param> /// <param name="value">The <see cref="T:System.Object" /> to use as the value of the element to add.</param> void IDictionary.Add(object key, object value) { throw new NotSupportedException(); } /// <summary> /// Determines whether the <see cref="T:System.Collections.IDictionary" /> object contains an element with the specified key. /// </summary> /// <param name="key">The key to locate in the <see cref="T:System.Collections.IDictionary" /> object.</param> /// <returns> /// true if the <see cref="T:System.Collections.IDictionary" /> contains an element with the key; otherwise, false. /// </returns> bool IDictionary.Contains(object key) { return this.ContainsKey((TKey)key); } /// <summary> /// Returns an <see cref="T:System.Collections.IDictionaryEnumerator" /> object for the <see cref="T:System.Collections.IDictionary" /> object. /// </summary> /// <returns> /// An <see cref="T:System.Collections.IDictionaryEnumerator" /> object for the <see cref="T:System.Collections.IDictionary" /> object. /// </returns> /// <exception cref="System.NotImplementedException"></exception> IDictionaryEnumerator IDictionary.GetEnumerator() { return new DictionaryEnumerator<TKey, TValue>(this.GetEnumerator()); } /// <summary> /// Removes the element with the specified key from the <see cref="T:System.Collections.IDictionary" /> object. /// </summary> /// <param name="key">The key of the element to remove.</param> void IDictionary.Remove(object key) { throw new NotSupportedException(); } /// <summary> /// Gets or sets the element with the specified key. /// </summary> /// <param name="key">The key.</param> /// <returns></returns> object IDictionary.this[object key] { get { return this[(TKey)key]; } set { throw new NotSupportedException(); } } /// <summary> /// Clears this instance. /// </summary> /// <exception cref="System.NotSupportedException"></exception> void IDictionary.Clear() { throw new NotSupportedException(); } #endregion #region ICollection Methods /// <summary> /// Copies the elements of the <see cref="T:System.Collections.ICollection" /> to an <see cref="T:System.Array" />, starting at a particular <see cref="T:System.Array" /> index. /// </summary> /// <param name="array">The one-dimensional <see cref="T:System.Array" /> that is the destination of the elements copied from <see cref="T:System.Collections.ICollection" />. The <see cref="T:System.Array" /> must have zero-based indexing.</param> /// <param name="arrayIndex">The zero-based index in <paramref name="array" /> at which copying begins.</param> void ICollection.CopyTo(Array array, int arrayIndex) { Requires.NotNull(array, "array"); Requires.Range(arrayIndex >= 0, "arrayIndex"); Requires.Range(array.Length >= arrayIndex + this.Count, "arrayIndex"); if (this.count == 0) { return; } int[] indices = new int[1]; // SetValue takes a params array; lifting out the implicit allocation from the loop foreach (var item in this) { indices[0] = arrayIndex++; array.SetValue(new DictionaryEntry(item.Key, item.Value), indices); } } #endregion #region ICollection Properties /// <summary> /// Gets an object that can be used to synchronize access to the <see cref="T:System.Collections.ICollection" />. /// </summary> /// <returns>An object that can be used to synchronize access to the <see cref="T:System.Collections.ICollection" />.</returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] object ICollection.SyncRoot { get { return this; } } /// <summary> /// Gets a value indicating whether access to the <see cref="T:System.Collections.ICollection" /> is synchronized (thread safe). /// </summary> /// <returns>true if access to the <see cref="T:System.Collections.ICollection" /> is synchronized (thread safe); otherwise, false.</returns> [DebuggerBrowsable(DebuggerBrowsableState.Never)] bool ICollection.IsSynchronized { get { // This is immutable, so it is always thread-safe. return true; } } #endregion #region IEnumerable<KeyValuePair<TKey, TValue>> Members /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="T:System.Collections.Generic.IEnumerator`1"/> that can be used to iterate through the collection. /// </returns> IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator() { return this.GetEnumerator(); } #endregion #region IEnumerable Members /// <summary> /// Returns an enumerator that iterates through a collection. /// </summary> /// <returns> /// An <see cref="T:System.Collections.IEnumerator"/> object that can be used to iterate through the collection. /// </returns> [ExcludeFromCodeCoverage] IEnumerator IEnumerable.GetEnumerator() { return this.GetEnumerator(); } #endregion /// <summary> /// Gets an empty collection with the specified comparers. /// </summary> /// <param name="comparers">The comparers.</param> /// <returns>The empty dictionary.</returns> [Pure] private static ImmutableDictionary<TKey, TValue> EmptyWithComparers(Comparers comparers) { Requires.NotNull(comparers, "comparers"); return Empty.comparers == comparers ? Empty : new ImmutableDictionary<TKey, TValue>(comparers); } /// <summary> /// Attempts to discover an <see cref="ImmutableDictionary&lt;TKey, TValue&gt;"/> instance beneath some enumerable sequence /// if one exists. /// </summary> /// <param name="sequence">The sequence that may have come from an immutable map.</param> /// <param name="other">Receives the concrete <see cref="ImmutableDictionary&lt;TKey, TValue&gt;"/> typed value if one can be found.</param> /// <returns><c>true</c> if the cast was successful; <c>false</c> otherwise.</returns> private static bool TryCastToImmutableMap(IEnumerable<KeyValuePair<TKey, TValue>> sequence, out ImmutableDictionary<TKey, TValue> other) { other = sequence as ImmutableDictionary<TKey, TValue>; if (other != null) { return true; } var builder = sequence as Builder; if (builder != null) { other = builder.ToImmutable(); return true; } return false; } #region Static query and manipulator methods /// <summary> /// Performs the operation on a given data structure. /// </summary> private static bool ContainsKey(TKey key, MutationInput origin) { int hashCode = origin.KeyComparer.GetHashCode(key); HashBucket bucket; if (origin.Root.TryGetValue(hashCode, Comparer<int>.Default, out bucket)) { TValue value; return bucket.TryGetValue(key, origin.KeyOnlyComparer, out value); } return false; } /// <summary> /// Performs the operation on a given data structure. /// </summary> private static bool Contains(KeyValuePair<TKey, TValue> keyValuePair, MutationInput origin) { int hashCode = origin.KeyComparer.GetHashCode(keyValuePair.Key); HashBucket bucket; if (origin.Root.TryGetValue(hashCode, Comparer<int>.Default, out bucket)) { TValue value; return bucket.TryGetValue(keyValuePair.Key, origin.KeyOnlyComparer, out value) && origin.ValueComparer.Equals(value, keyValuePair.Value); } return false; } /// <summary> /// Performs the operation on a given data structure. /// </summary> private static bool TryGetValue(TKey key, MutationInput origin, out TValue value) { int hashCode = origin.KeyComparer.GetHashCode(key); HashBucket bucket; if (origin.Root.TryGetValue(hashCode, Comparer<int>.Default, out bucket)) { return bucket.TryGetValue(key, origin.KeyOnlyComparer, out value); } value = default(TValue); return false; } /// <summary> /// Performs the operation on a given data structure. /// </summary> private static bool TryGetKey(TKey equalKey, MutationInput origin, out TKey actualKey) { int hashCode = origin.KeyComparer.GetHashCode(equalKey); HashBucket bucket; if (origin.Root.TryGetValue(hashCode, Comparer<int>.Default, out bucket)) { return bucket.TryGetKey(equalKey, origin.KeyOnlyComparer, out actualKey); } actualKey = equalKey; return false; } /// <summary> /// Performs the operation on a given data structure. /// </summary> private static MutationResult Add(TKey key, TValue value, KeyCollisionBehavior behavior, MutationInput origin) { Requires.NotNullAllowStructs(key, "key"); OperationResult result; int hashCode = origin.KeyComparer.GetHashCode(key); HashBucket bucket = origin.Root.GetValueOrDefault(hashCode, Comparer<int>.Default); var newBucket = bucket.Add(key, value, origin.KeyOnlyComparer, origin.ValueComparer, behavior, out result); if (result == OperationResult.NoChangeRequired) { return new MutationResult(origin); } var newRoot = UpdateRoot(origin.Root, hashCode, newBucket, origin.HashBucketComparer); return new MutationResult(newRoot, result == OperationResult.SizeChanged ? +1 : 0); } /// <summary> /// Performs the operation on a given data structure. /// </summary> private static MutationResult AddRange(IEnumerable<KeyValuePair<TKey, TValue>> items, MutationInput origin, KeyCollisionBehavior collisionBehavior = KeyCollisionBehavior.ThrowIfValueDifferent) { Requires.NotNull(items, "items"); int countAdjustment = 0; var newRoot = origin.Root; foreach (var pair in items) { int hashCode = origin.KeyComparer.GetHashCode(pair.Key); HashBucket bucket = newRoot.GetValueOrDefault(hashCode, Comparer<int>.Default); OperationResult result; var newBucket = bucket.Add(pair.Key, pair.Value, origin.KeyOnlyComparer, origin.ValueComparer, collisionBehavior, out result); newRoot = UpdateRoot(newRoot, hashCode, newBucket, origin.HashBucketComparer); if (result == OperationResult.SizeChanged) { countAdjustment++; } } return new MutationResult(newRoot, countAdjustment); } /// <summary> /// Performs the operation on a given data structure. /// </summary> private static MutationResult Remove(TKey key, MutationInput origin) { int hashCode = origin.KeyComparer.GetHashCode(key); HashBucket bucket; if (origin.Root.TryGetValue(hashCode, Comparer<int>.Default, out bucket)) { OperationResult result; var newRoot = UpdateRoot(origin.Root, hashCode, bucket.Remove(key, origin.KeyOnlyComparer, out result), origin.HashBucketComparer); return new MutationResult(newRoot, result == OperationResult.SizeChanged ? -1 : 0); } return new MutationResult(origin); } /// <summary> /// Performs the set operation on a given data structure. /// </summary> private static ImmutableSortedDictionary<int, HashBucket>.Node UpdateRoot(ImmutableSortedDictionary<int, HashBucket>.Node root, int hashCode, HashBucket newBucket, IEqualityComparer<HashBucket> hashBucketComparer) { bool mutated; if (newBucket.IsEmpty) { return root.Remove(hashCode, Comparer<int>.Default, out mutated); } else { bool replacedExistingValue; return root.SetItem(hashCode, newBucket, Comparer<int>.Default, hashBucketComparer, out replacedExistingValue, out mutated); } } #endregion /// <summary> /// Wraps the specified data structure with an immutable collection wrapper. /// </summary> /// <param name="root">The root of the data structure.</param> /// <param name="comparers">The comparers.</param> /// <param name="count">The number of elements in the data structure.</param> /// <returns> /// The immutable collection. /// </returns> private static ImmutableDictionary<TKey, TValue> Wrap(ImmutableSortedDictionary<int, HashBucket>.Node root, Comparers comparers, int count) { Requires.NotNull(root, "root"); Requires.NotNull(comparers, "comparers"); Requires.Range(count >= 0, "count"); return new ImmutableDictionary<TKey, TValue>(root, comparers, count); } /// <summary> /// Wraps the specified data structure with an immutable collection wrapper. /// </summary> /// <param name="root">The root of the data structure.</param> /// <param name="adjustedCountIfDifferentRoot">The adjusted count if the root has changed.</param> /// <returns>The immutable collection.</returns> private ImmutableDictionary<TKey, TValue> Wrap(ImmutableSortedDictionary<int, HashBucket>.Node root, int adjustedCountIfDifferentRoot) { if (root == null) { return this.Clear(); } if (this.root != root) { return root.IsEmpty ? this.Clear() : new ImmutableDictionary<TKey, TValue>(root, this.comparers, adjustedCountIfDifferentRoot); } return this; } /// <summary> /// Bulk adds entries to the map. /// </summary> /// <param name="pairs">The entries to add.</param> /// <param name="avoidToHashMap"><c>true</c> when being called from ToHashMap to avoid StackOverflow.</param> [Pure] private ImmutableDictionary<TKey, TValue> AddRange(IEnumerable<KeyValuePair<TKey, TValue>> pairs, bool avoidToHashMap) { Requires.NotNull(pairs, "pairs"); Contract.Ensures(Contract.Result<ImmutableDictionary<TKey, TValue>>() != null); // Some optimizations may apply if we're an empty list. if (this.IsEmpty && !avoidToHashMap) { // If the items being added actually come from an ImmutableHashMap<TKey, TValue> // then there is no value in reconstructing it. ImmutableDictionary<TKey, TValue> other; if (TryCastToImmutableMap(pairs, out other)) { return other.WithComparers(this.KeyComparer, this.ValueComparer); } } var result = AddRange(pairs, this.Origin); return result.Finalize(this); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // namespace System.Globalization { using System.Text; using System; using System.Diagnostics.Contracts; using System.Globalization; internal static class TimeSpanFormat { [System.Security.SecuritySafeCritical] // auto-generated private static String IntToString(int n, int digits) { return ParseNumbers.IntToString(n, 10, digits, '0', 0); } internal static readonly FormatLiterals PositiveInvariantFormatLiterals = TimeSpanFormat.FormatLiterals.InitInvariant(false /*isNegative*/); internal static readonly FormatLiterals NegativeInvariantFormatLiterals = TimeSpanFormat.FormatLiterals.InitInvariant(true /*isNegative*/); internal enum Pattern { None = 0, Minimum = 1, Full = 2, } // // Format // // Actions: Main method called from TimeSpan.ToString // internal static String Format(TimeSpan value, String format, IFormatProvider formatProvider) { if (format == null || format.Length == 0) format = "c"; // standard formats if (format.Length == 1) { char f = format[0]; if (f == 'c' || f == 't' || f == 'T') return FormatStandard(value, true, format, Pattern.Minimum); if (f == 'g' || f == 'G') { Pattern pattern; DateTimeFormatInfo dtfi = DateTimeFormatInfo.GetInstance(formatProvider); if (value._ticks < 0) format = dtfi.FullTimeSpanNegativePattern; else format = dtfi.FullTimeSpanPositivePattern; if (f == 'g') pattern = Pattern.Minimum; else pattern = Pattern.Full; return FormatStandard(value, false, format, pattern); } throw new FormatException(Environment.GetResourceString("Format_InvalidString")); } return FormatCustomized(value, format, DateTimeFormatInfo.GetInstance(formatProvider)); } // // FormatStandard // // Actions: Format the TimeSpan instance using the specified format. // private static String FormatStandard(TimeSpan value, bool isInvariant, String format, Pattern pattern) { StringBuilder sb = StringBuilderCache.Acquire(); int day = (int)(value._ticks / TimeSpan.TicksPerDay); long time = value._ticks % TimeSpan.TicksPerDay; if (value._ticks < 0) { day = -day; time = -time; } int hours = (int)(time / TimeSpan.TicksPerHour % 24); int minutes = (int)(time / TimeSpan.TicksPerMinute % 60); int seconds = (int)(time / TimeSpan.TicksPerSecond % 60); int fraction = (int)(time % TimeSpan.TicksPerSecond); FormatLiterals literal; if (isInvariant) { if (value._ticks < 0) literal = NegativeInvariantFormatLiterals; else literal = PositiveInvariantFormatLiterals; } else { literal = new FormatLiterals(); literal.Init(format, pattern == Pattern.Full); } if (fraction != 0) { // truncate the partial second to the specified length fraction = (int)((long)fraction / (long)Math.Pow(10, DateTimeFormat.MaxSecondsFractionDigits - literal.ff)); } // Pattern.Full: [-]dd.hh:mm:ss.fffffff // Pattern.Minimum: [-][d.]hh:mm:ss[.fffffff] sb.Append(literal.Start); // [-] if (pattern == Pattern.Full || day != 0) { // sb.Append(day); // [dd] sb.Append(literal.DayHourSep); // [.] } // sb.Append(IntToString(hours, literal.hh)); // hh sb.Append(literal.HourMinuteSep); // : sb.Append(IntToString(minutes, literal.mm)); // mm sb.Append(literal.MinuteSecondSep); // : sb.Append(IntToString(seconds, literal.ss)); // ss if (!isInvariant && pattern == Pattern.Minimum) { int effectiveDigits = literal.ff; while (effectiveDigits > 0) { if (fraction % 10 == 0) { fraction = fraction / 10; effectiveDigits--; } else { break; } } if (effectiveDigits > 0) { sb.Append(literal.SecondFractionSep); // [.FFFFFFF] sb.Append((fraction).ToString(DateTimeFormat.fixedNumberFormats[effectiveDigits - 1], CultureInfo.InvariantCulture)); } } else if (pattern == Pattern.Full || fraction != 0) { sb.Append(literal.SecondFractionSep); // [.] sb.Append(IntToString(fraction, literal.ff)); // [fffffff] } // sb.Append(literal.End); // return StringBuilderCache.GetStringAndRelease(sb); } // // FormatCustomized // // Actions: Format the TimeSpan instance using the specified format. // internal static String FormatCustomized(TimeSpan value, String format, DateTimeFormatInfo dtfi) { Contract.Assert(dtfi != null, "dtfi == null"); int day = (int)(value._ticks / TimeSpan.TicksPerDay); long time = value._ticks % TimeSpan.TicksPerDay; if (value._ticks < 0) { day = -day; time = -time; } int hours = (int)(time / TimeSpan.TicksPerHour % 24); int minutes = (int)(time / TimeSpan.TicksPerMinute % 60); int seconds = (int)(time / TimeSpan.TicksPerSecond % 60); int fraction = (int)(time % TimeSpan.TicksPerSecond); long tmp = 0; int i = 0; int tokenLen; StringBuilder result = StringBuilderCache.Acquire(); while (i < format.Length) { char ch = format[i]; int nextChar; switch (ch) { case 'h': tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > 2) throw new FormatException(Environment.GetResourceString("Format_InvalidString")); DateTimeFormat.FormatDigits(result, hours, tokenLen); break; case 'm': tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > 2) throw new FormatException(Environment.GetResourceString("Format_InvalidString")); DateTimeFormat.FormatDigits(result, minutes, tokenLen); break; case 's': tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > 2) throw new FormatException(Environment.GetResourceString("Format_InvalidString")); DateTimeFormat.FormatDigits(result, seconds, tokenLen); break; case 'f': // // The fraction of a second in single-digit precision. The remaining digits are truncated. // tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > DateTimeFormat.MaxSecondsFractionDigits) throw new FormatException(Environment.GetResourceString("Format_InvalidString")); tmp = (long)fraction; tmp /= (long)Math.Pow(10, DateTimeFormat.MaxSecondsFractionDigits - tokenLen); result.Append((tmp).ToString(DateTimeFormat.fixedNumberFormats[tokenLen - 1], CultureInfo.InvariantCulture)); break; case 'F': // // Displays the most significant digit of the seconds fraction. Nothing is displayed if the digit is zero. // tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > DateTimeFormat.MaxSecondsFractionDigits) throw new FormatException(Environment.GetResourceString("Format_InvalidString")); tmp = (long)fraction; tmp /= (long)Math.Pow(10, DateTimeFormat.MaxSecondsFractionDigits - tokenLen); int effectiveDigits = tokenLen; while (effectiveDigits > 0) { if (tmp % 10 == 0) { tmp = tmp / 10; effectiveDigits--; } else { break; } } if (effectiveDigits > 0) { result.Append((tmp).ToString(DateTimeFormat.fixedNumberFormats[effectiveDigits - 1], CultureInfo.InvariantCulture)); } break; case 'd': // // tokenLen == 1 : Day as digits with no leading zero. // tokenLen == 2+: Day as digits with leading zero for single-digit days. // tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > 8) throw new FormatException(Environment.GetResourceString("Format_InvalidString")); DateTimeFormat.FormatDigits(result, day, tokenLen, true); break; case '\'': case '\"': tokenLen = DateTimeFormat.ParseQuoteString(format, i); result.Append(format, i + 1, tokenLen - 2); break; case '%': // Optional format character. // For example, format string "%d" will print day // Most of the cases, "%" can be ignored. nextChar = DateTimeFormat.ParseNextChar(format, i); // nextChar will be -1 if we already reach the end of the format string. // Besides, we will not allow "%%" appear in the pattern. if (nextChar >= 0 && nextChar != (int)'%') { result.Append(TimeSpanFormat.FormatCustomized(value, ((char)nextChar).ToString(), dtfi)); tokenLen = 2; } else { // // This means that '%' is at the end of the format string or // "%%" appears in the format string. // throw new FormatException(Environment.GetResourceString("Format_InvalidString")); } break; case '\\': // Escaped character. Can be used to insert character into the format string. // For example, "\d" will insert the character 'd' into the string. // nextChar = DateTimeFormat.ParseNextChar(format, i); if (nextChar >= 0) { result.Append(((char)nextChar)); tokenLen = 2; } else { // // This means that '\' is at the end of the formatting string. // throw new FormatException(Environment.GetResourceString("Format_InvalidString")); } break; default: throw new FormatException(Environment.GetResourceString("Format_InvalidString")); } i += tokenLen; } return StringBuilderCache.GetStringAndRelease(result); } internal struct FormatLiterals { internal String Start { get { return literals[0]; } } internal String DayHourSep { get { return literals[1]; } } internal String HourMinuteSep { get { return literals[2]; } } internal String MinuteSecondSep { get { return literals[3]; } } internal String SecondFractionSep { get { return literals[4]; } } internal String End { get { return literals[5]; } } internal String AppCompatLiteral; internal int dd; internal int hh; internal int mm; internal int ss; internal int ff; private String[] literals; /* factory method for static invariant FormatLiterals */ internal static FormatLiterals InitInvariant(bool isNegative) { FormatLiterals x = new FormatLiterals(); x.literals = new String[6]; x.literals[0] = isNegative ? "-" : String.Empty; x.literals[1] = "."; x.literals[2] = ":"; x.literals[3] = ":"; x.literals[4] = "."; x.literals[5] = String.Empty; x.AppCompatLiteral = ":."; // MinuteSecondSep+SecondFractionSep; x.dd = 2; x.hh = 2; x.mm = 2; x.ss = 2; x.ff = DateTimeFormat.MaxSecondsFractionDigits; return x; } // For the "v1" TimeSpan localized patterns, the data is simply literal field separators with // the constants guaranteed to include DHMSF ordered greatest to least significant. // Once the data becomes more complex than this we will need to write a proper tokenizer for // parsing and formatting internal void Init(String format, bool useInvariantFieldLengths) { literals = new String[6]; for (int i = 0; i < literals.Length; i++) literals[i] = String.Empty; dd = 0; hh = 0; mm = 0; ss = 0; ff = 0; StringBuilder sb = StringBuilderCache.Acquire(); bool inQuote = false; char quote = '\''; int field = 0; for (int i = 0; i < format.Length; i++) { switch (format[i]) { case '\'': case '\"': if (inQuote && (quote == format[i])) { /* we were in a quote and found a matching exit quote, so we are outside a quote now */ Contract.Assert(field >= 0 && field <= 5, "field >= 0 && field <= 5"); if (field >= 0 && field <= 5) { literals[field] = sb.ToString(); sb.Length = 0; inQuote = false; } else { return; // how did we get here? } } else if (!inQuote) { /* we are at the start of a new quote block */ quote = format[i]; inQuote = true; } else { /* we were in a quote and saw the other type of quote character, so we are still in a quote */ } break; case '%': Contract.Assert(false, "Unexpected special token '%', Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); goto default; case '\\': if (!inQuote) { i++; /* skip next character that is escaped by this backslash or percent sign */ break; } goto default; case 'd': if (!inQuote) { Contract.Assert((field == 0 && sb.Length == 0) || field == 1, "field == 0 || field == 1, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 1; // DayHourSep dd++; } break; case 'h': if (!inQuote) { Contract.Assert((field == 1 && sb.Length == 0) || field == 2, "field == 1 || field == 2, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 2; // HourMinuteSep hh++; } break; case 'm': if (!inQuote) { Contract.Assert((field == 2 && sb.Length == 0) || field == 3, "field == 2 || field == 3, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 3; // MinuteSecondSep mm++; } break; case 's': if (!inQuote) { Contract.Assert((field == 3 && sb.Length == 0) || field == 4, "field == 3 || field == 4, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 4; // SecondFractionSep ss++; } break; case 'f': case 'F': if (!inQuote) { Contract.Assert((field == 4 && sb.Length == 0) || field == 5, "field == 4 || field == 5, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 5; // End ff++; } break; default: sb.Append(format[i]); break; } } Contract.Assert(field == 5); AppCompatLiteral = MinuteSecondSep + SecondFractionSep; Contract.Assert(0 < dd && dd < 3, "0 < dd && dd < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); Contract.Assert(0 < hh && hh < 3, "0 < hh && hh < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); Contract.Assert(0 < mm && mm < 3, "0 < mm && mm < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); Contract.Assert(0 < ss && ss < 3, "0 < ss && ss < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); Contract.Assert(0 < ff && ff < 8, "0 < ff && ff < 8, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); if (useInvariantFieldLengths) { dd = 2; hh = 2; mm = 2; ss = 2; ff = DateTimeFormat.MaxSecondsFractionDigits; } else { if (dd < 1 || dd > 2) dd = 2; // The DTFI property has a problem. let's try to make the best of the situation. if (hh < 1 || hh > 2) hh = 2; if (mm < 1 || mm > 2) mm = 2; if (ss < 1 || ss > 2) ss = 2; if (ff < 1 || ff > 7) ff = 7; } StringBuilderCache.Release(sb); } } //end of struct FormatLiterals } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.ComponentModel.Composition; using System.Diagnostics; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis.CodeActions; using Microsoft.CodeAnalysis.CodeFixes; using Microsoft.CodeAnalysis.CodeFixes.Suppression; using Microsoft.CodeAnalysis.CodeRefactorings; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Editor.Host; using Microsoft.CodeAnalysis.Editor.Shared; using Microsoft.CodeAnalysis.Editor.Shared.Extensions; using Microsoft.CodeAnalysis.Editor.Shared.Options; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.Internal.Log; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.TestHooks; using Microsoft.CodeAnalysis.Text; using Microsoft.CodeAnalysis.Text.Shared.Extensions; using Microsoft.VisualStudio.Language.Intellisense; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Editor.Implementation.Suggestions { [Export(typeof(ISuggestedActionsSourceProvider))] [VisualStudio.Utilities.ContentType(ContentTypeNames.RoslynContentType)] [VisualStudio.Utilities.Name("Roslyn Code Fix")] [VisualStudio.Utilities.Order] internal class SuggestedActionsSourceProvider : ISuggestedActionsSourceProvider { private static readonly Guid s_CSharpSourceGuid = new Guid("b967fea8-e2c3-4984-87d4-71a38f49e16a"); private static readonly Guid s_visualBasicSourceGuid = new Guid("4de30e93-3e0c-40c2-a4ba-1124da4539f6"); private const int InvalidSolutionVersion = -1; private readonly ICodeRefactoringService _codeRefactoringService; private readonly IDiagnosticAnalyzerService _diagnosticService; private readonly ICodeFixService _codeFixService; private readonly ICodeActionEditHandlerService _editHandler; private readonly IAsynchronousOperationListener _listener; private readonly IWaitIndicator _waitIndicator; [ImportingConstructor] public SuggestedActionsSourceProvider( ICodeRefactoringService codeRefactoringService, IDiagnosticAnalyzerService diagnosticService, ICodeFixService codeFixService, ICodeActionEditHandlerService editHandler, IWaitIndicator waitIndicator, [ImportMany] IEnumerable<Lazy<IAsynchronousOperationListener, FeatureMetadata>> asyncListeners) { _codeRefactoringService = codeRefactoringService; _diagnosticService = diagnosticService; _codeFixService = codeFixService; _editHandler = editHandler; _waitIndicator = waitIndicator; _listener = new AggregateAsynchronousOperationListener(asyncListeners, FeatureAttribute.LightBulb); } public ISuggestedActionsSource CreateSuggestedActionsSource(ITextView textView, ITextBuffer textBuffer) { Contract.ThrowIfNull(textView); Contract.ThrowIfNull(textBuffer); return new Source(this, textView, textBuffer); } private class Source : ForegroundThreadAffinitizedObject, ISuggestedActionsSource { // state that will be only reset when source is disposed. private SuggestedActionsSourceProvider _owner; private ITextView _textView; private ITextBuffer _subjectBuffer; private WorkspaceRegistration _registration; // mutable state private Workspace _workspace; private int _lastSolutionVersionReported; public Source(SuggestedActionsSourceProvider owner, ITextView textView, ITextBuffer textBuffer) { _owner = owner; _textView = textView; _textView.Closed += OnTextViewClosed; _subjectBuffer = textBuffer; _registration = Workspace.GetWorkspaceRegistration(textBuffer.AsTextContainer()); _lastSolutionVersionReported = InvalidSolutionVersion; var updateSource = (IDiagnosticUpdateSource)_owner._diagnosticService; updateSource.DiagnosticsUpdated += OnDiagnosticsUpdated; if (_registration.Workspace != null) { _workspace = _registration.Workspace; _workspace.DocumentActiveContextChanged += OnActiveContextChanged; } _registration.WorkspaceChanged += OnWorkspaceChanged; } public event EventHandler<EventArgs> SuggestedActionsChanged; public bool TryGetTelemetryId(out Guid telemetryId) { telemetryId = default(Guid); var workspace = _workspace; if (workspace == null || _subjectBuffer == null) { return false; } var documentId = workspace.GetDocumentIdInCurrentContext(_subjectBuffer.AsTextContainer()); if (documentId == null) { return false; } var project = workspace.CurrentSolution.GetProject(documentId.ProjectId); if (project == null) { return false; } switch (project.Language) { case LanguageNames.CSharp: telemetryId = s_CSharpSourceGuid; return true; case LanguageNames.VisualBasic: telemetryId = s_visualBasicSourceGuid; return true; default: return false; } } public IEnumerable<SuggestedActionSet> GetSuggestedActions(ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, CancellationToken cancellationToken) { AssertIsForeground(); using (Logger.LogBlock(FunctionId.SuggestedActions_GetSuggestedActions, cancellationToken)) { var documentAndSnapshot = GetMatchingDocumentAndSnapshotAsync(range.Snapshot, cancellationToken).WaitAndGetResult(cancellationToken); if (!documentAndSnapshot.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return null; } var document = documentAndSnapshot.Value.Item1; var workspace = document.Project.Solution.Workspace; var supportsFeatureService = workspace.Services.GetService<IDocumentSupportsFeatureService>(); var fixes = GetCodeFixes(supportsFeatureService, requestedActionCategories, workspace, document, range, cancellationToken); var refactorings = GetRefactorings(supportsFeatureService, requestedActionCategories, workspace, document, range, cancellationToken); var result = fixes == null ? refactorings : refactorings == null ? fixes : fixes.Concat(refactorings); if (result == null) { return null; } var allActionSets = result.ToList(); allActionSets = InlineActionSetsIfDesirable(allActionSets); return allActionSets; } } private List<SuggestedActionSet> InlineActionSetsIfDesirable(List<SuggestedActionSet> allActionSets) { // If we only have a single set of items, and that set only has three max suggestion // offered. Then we can consider inlining any nested actions into the top level list. // (but we only do this if the parent of the nested actions isn't invokable itself). if (allActionSets.Sum(a => a.Actions.Count()) > 3) { return allActionSets; } return allActionSets.Select(InlineActions).ToList(); } private bool IsInlineable(ISuggestedAction action) { var suggestedAction = action as SuggestedAction; return suggestedAction != null && !suggestedAction.CodeAction.IsInvokable && suggestedAction.CodeAction.HasCodeActions; } private SuggestedActionSet InlineActions(SuggestedActionSet actionSet) { if (!actionSet.Actions.Any(IsInlineable)) { return actionSet; } var newActions = new List<ISuggestedAction>(); foreach (var action in actionSet.Actions) { if (IsInlineable(action)) { // Looks like something we can inline. var childActionSets = ((SuggestedAction)action).GetActionSets(); if (childActionSets.Length != 1) { return actionSet; } newActions.AddRange(childActionSets[0].Actions); continue; } newActions.Add(action); } return new SuggestedActionSet(newActions, actionSet.Title, actionSet.Priority, actionSet.ApplicableToSpan); } private IEnumerable<SuggestedActionSet> GetCodeFixes( IDocumentSupportsFeatureService supportsFeatureService, ISuggestedActionCategorySet requestedActionCategories, Workspace workspace, Document document, SnapshotSpan range, CancellationToken cancellationToken) { if (_owner._codeFixService != null && supportsFeatureService.SupportsCodeFixes(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.CodeFix)) { // We only include suppressions if lightbulb is asking for everything. // If the light bulb is only asking for code fixes, then we don't include suppressions. var includeSuppressionFixes = requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Any); var fixes = Task.Run( async () => await _owner._codeFixService.GetFixesAsync( document, range.Span.ToTextSpan(), includeSuppressionFixes, cancellationToken).ConfigureAwait(false), cancellationToken).WaitAndGetResult(cancellationToken); return OrganizeFixes(workspace, fixes, hasSuppressionFixes: includeSuppressionFixes); } return null; } /// <summary> /// Arrange fixes into groups based on the issue (diagnostic being fixed) and prioritize these groups. /// </summary> private IEnumerable<SuggestedActionSet> OrganizeFixes(Workspace workspace, IEnumerable<CodeFixCollection> fixCollections, bool hasSuppressionFixes) { var map = ImmutableDictionary.CreateBuilder<DiagnosticData, IList<SuggestedAction>>(); var order = ImmutableArray.CreateBuilder<DiagnosticData>(); // First group fixes by issue (diagnostic). GroupFixes(workspace, fixCollections, map, order, hasSuppressionFixes); // Then prioritize between the groups. return PrioritizeFixGroups(map.ToImmutable(), order.ToImmutable()); } /// <summary> /// Groups fixes by the diagnostic being addressed by each fix. /// </summary> private void GroupFixes(Workspace workspace, IEnumerable<CodeFixCollection> fixCollections, IDictionary<DiagnosticData, IList<SuggestedAction>> map, IList<DiagnosticData> order, bool hasSuppressionFixes) { foreach (var fixCollection in fixCollections) { var fixes = fixCollection.Fixes; var fixCount = fixes.Length; Func<CodeAction, SuggestedActionSet> getFixAllSuggestedActionSet = codeAction => CodeFixSuggestedAction.GetFixAllSuggestedActionSet(codeAction, fixCount, fixCollection.FixAllContext, workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator); foreach (var fix in fixes) { // Suppression fixes are handled below. if (!(fix.Action is SuppressionCodeAction)) { SuggestedAction suggestedAction; if (fix.Action.HasCodeActions) { var nestedActions = new List<SuggestedAction>(); foreach (var nestedAction in fix.Action.GetCodeActions()) { nestedActions.Add(new CodeFixSuggestedAction(workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix, nestedAction, fixCollection.Provider, getFixAllSuggestedActionSet(nestedAction))); } var diag = fix.PrimaryDiagnostic; var set = new SuggestedActionSet(nestedActions, SuggestedActionSetPriority.Medium, diag.Location.SourceSpan.ToSpan()); suggestedAction = new SuggestedAction(workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix.Action, fixCollection.Provider, new[] { set }); } else { suggestedAction = new CodeFixSuggestedAction(workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix, fix.Action, fixCollection.Provider, getFixAllSuggestedActionSet(fix.Action)); } AddFix(fix, suggestedAction, map, order); } } if (hasSuppressionFixes) { // Add suppression fixes to the end of a given SuggestedActionSet so that they always show up last in a group. foreach (var fix in fixes) { if (fix.Action is SuppressionCodeAction) { SuggestedAction suggestedAction; if (fix.Action.HasCodeActions) { suggestedAction = new SuppressionSuggestedAction(workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix, fixCollection.Provider, getFixAllSuggestedActionSet); } else { suggestedAction = new CodeFixSuggestedAction(workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, fix, fix.Action, fixCollection.Provider, getFixAllSuggestedActionSet(fix.Action)); } AddFix(fix, suggestedAction, map, order); } } } } } private static void AddFix(CodeFix fix, SuggestedAction suggestedAction, IDictionary<DiagnosticData, IList<SuggestedAction>> map, IList<DiagnosticData> order) { var diag = fix.GetPrimaryDiagnosticData(); if (!map.ContainsKey(diag)) { // Remember the order of the keys for the 'map' dictionary. order.Add(diag); map[diag] = ImmutableArray.CreateBuilder<SuggestedAction>(); } map[diag].Add(suggestedAction); } /// <summary> /// Return prioritized set of fix groups such that fix group for suppression always show up at the bottom of the list. /// </summary> /// <remarks> /// Fix groups are returned in priority order determined based on <see cref="ExtensionOrderAttribute"/>. /// Priority for all <see cref="SuggestedActionSet"/>s containing fixes is set to <see cref="SuggestedActionSetPriority.Medium"/> by default. /// The only exception is the case where a <see cref="SuggestedActionSet"/> only contains suppression fixes - /// the priority of such <see cref="SuggestedActionSet"/>s is set to <see cref="SuggestedActionSetPriority.None"/> so that suppression fixes /// always show up last after all other fixes (and refactorings) for the selected line of code. /// </remarks> private static IEnumerable<SuggestedActionSet> PrioritizeFixGroups(IDictionary<DiagnosticData, IList<SuggestedAction>> map, IList<DiagnosticData> order) { var sets = ImmutableArray.CreateBuilder<SuggestedActionSet>(); foreach (var diag in order) { var fixes = map[diag]; var priority = fixes.All(s => s is SuppressionSuggestedAction) ? SuggestedActionSetPriority.None : SuggestedActionSetPriority.Medium; // diagnostic from things like build shouldn't reach here since we don't support LB for those diagnostics Contract.Requires(diag.HasTextSpan); sets.Add(new SuggestedActionSet(fixes, priority, diag.TextSpan.ToSpan())); } return sets.ToImmutable(); } private IEnumerable<SuggestedActionSet> GetRefactorings( IDocumentSupportsFeatureService supportsFeatureService, ISuggestedActionCategorySet requestedActionCategories, Workspace workspace, Document document, SnapshotSpan range, CancellationToken cancellationToken) { var optionService = workspace.Services.GetService<IOptionService>(); if (optionService.GetOption(EditorComponentOnOffOptions.CodeRefactorings) && _owner._codeRefactoringService != null && supportsFeatureService.SupportsRefactorings(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Refactoring)) { // Get the selection while on the UI thread. var selection = TryGetCodeRefactoringSelection(_subjectBuffer, _textView, range); if (!selection.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return null; } var refactorings = Task.Run( async () => await _owner._codeRefactoringService.GetRefactoringsAsync( document, selection.Value, cancellationToken).ConfigureAwait(false), cancellationToken).WaitAndGetResult(cancellationToken); return refactorings.Select(r => OrganizeRefactorings(workspace, r)); } return null; } /// <summary> /// Arrange refactorings into groups. /// </summary> /// <remarks> /// Refactorings are returned in priority order determined based on <see cref="ExtensionOrderAttribute"/>. /// Priority for all <see cref="SuggestedActionSet"/>s containing refactorings is set to <see cref="SuggestedActionSetPriority.Low"/> /// and should show up after fixes but before suppression fixes in the light bulb menu. /// </remarks> private SuggestedActionSet OrganizeRefactorings(Workspace workspace, CodeRefactoring refactoring) { var refactoringSuggestedActions = ImmutableArray.CreateBuilder<SuggestedAction>(); foreach (var a in refactoring.Actions) { refactoringSuggestedActions.Add( new CodeRefactoringSuggestedAction( workspace, _subjectBuffer, _owner._editHandler, _owner._waitIndicator, a, refactoring.Provider)); } return new SuggestedActionSet(refactoringSuggestedActions.ToImmutable(), SuggestedActionSetPriority.Low); } public async Task<bool> HasSuggestedActionsAsync(ISuggestedActionCategorySet requestedActionCategories, SnapshotSpan range, CancellationToken cancellationToken) { // Explicitly hold onto below fields in locals and use these locals throughout this code path to avoid crashes // if these fields happen to be cleared by Dispose() below. This is required since this code path involves // code that can run asynchronously from background thread. var view = _textView; var buffer = _subjectBuffer; var provider = _owner; if (view == null || buffer == null || provider == null) { return false; } using (var asyncToken = provider._listener.BeginAsyncOperation("HasSuggestedActionsAsync")) { var documentAndSnapshot = await GetMatchingDocumentAndSnapshotAsync(range.Snapshot, cancellationToken).ConfigureAwait(false); if (!documentAndSnapshot.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return false; } var document = documentAndSnapshot.Value.Item1; var workspace = document.Project.Solution.Workspace; var supportsFeatureService = workspace.Services.GetService<IDocumentSupportsFeatureService>(); return await HasFixesAsync( supportsFeatureService, requestedActionCategories, provider, document, range, cancellationToken).ConfigureAwait(false) || await HasRefactoringsAsync( supportsFeatureService, requestedActionCategories, provider, document, buffer, view, range, cancellationToken).ConfigureAwait(false); } } private async Task<bool> HasFixesAsync( IDocumentSupportsFeatureService supportsFeatureService, ISuggestedActionCategorySet requestedActionCategories, SuggestedActionsSourceProvider provider, Document document, SnapshotSpan range, CancellationToken cancellationToken) { if (provider._codeFixService != null && supportsFeatureService.SupportsCodeFixes(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.CodeFix)) { // We only consider suppressions if lightbulb is asking for everything. // If the light bulb is only asking for code fixes, then we don't consider suppressions. var considerSuppressionFixes = requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Any); var result = await Task.Run( async () => await provider._codeFixService.GetFirstDiagnosticWithFixAsync( document, range.Span.ToTextSpan(), considerSuppressionFixes, cancellationToken).ConfigureAwait(false), cancellationToken).ConfigureAwait(false); if (result.HasFix) { Logger.Log(FunctionId.SuggestedActions_HasSuggestedActionsAsync); return true; } if (result.PartialResult) { // reset solution version number so that we can raise suggested action changed event Volatile.Write(ref _lastSolutionVersionReported, InvalidSolutionVersion); return false; } } return false; } private async Task<bool> HasRefactoringsAsync( IDocumentSupportsFeatureService supportsFeatureService, ISuggestedActionCategorySet requestedActionCategories, SuggestedActionsSourceProvider provider, Document document, ITextBuffer buffer, ITextView view, SnapshotSpan range, CancellationToken cancellationToken) { var optionService = document.Project.Solution.Workspace.Services.GetService<IOptionService>(); if (optionService.GetOption(EditorComponentOnOffOptions.CodeRefactorings) && provider._codeRefactoringService != null && supportsFeatureService.SupportsRefactorings(document) && requestedActionCategories.Contains(PredefinedSuggestedActionCategoryNames.Refactoring)) { TextSpan? selection = null; if (IsForeground()) { // This operation needs to happen on UI thread because it needs to access textView.Selection. selection = TryGetCodeRefactoringSelection(buffer, view, range); } else { await InvokeBelowInputPriority(() => { // This operation needs to happen on UI thread because it needs to access textView.Selection. selection = TryGetCodeRefactoringSelection(buffer, view, range); }).ConfigureAwait(false); } if (!selection.HasValue) { // this is here to fail test and see why it is failed. Trace.WriteLine("given range is not current"); return false; } return await Task.Run( async () => await provider._codeRefactoringService.HasRefactoringsAsync( document, selection.Value, cancellationToken).ConfigureAwait(false), cancellationToken).ConfigureAwait(false); } return false; } private static TextSpan? TryGetCodeRefactoringSelection(ITextBuffer buffer, ITextView view, SnapshotSpan range) { var selectedSpans = view.Selection.SelectedSpans .SelectMany(ss => view.BufferGraph.MapDownToBuffer(ss, SpanTrackingMode.EdgeExclusive, buffer)) .Where(ss => !view.IsReadOnlyOnSurfaceBuffer(ss)) .ToList(); // We only support refactorings when there is a single selection in the document. if (selectedSpans.Count != 1) { return null; } var translatedSpan = selectedSpans[0].TranslateTo(range.Snapshot, SpanTrackingMode.EdgeInclusive); // We only support refactorings when selected span intersects with the span that the light bulb is asking for. if (!translatedSpan.IntersectsWith(range)) { return null; } return translatedSpan.Span.ToTextSpan(); } private static async Task<ValueTuple<Document, ITextSnapshot>?> GetMatchingDocumentAndSnapshotAsync(ITextSnapshot givenSnapshot, CancellationToken cancellationToken) { var buffer = givenSnapshot.TextBuffer; if (buffer == null) { return null; } var workspace = buffer.GetWorkspace(); if (workspace == null) { return null; } var documentId = workspace.GetDocumentIdInCurrentContext(buffer.AsTextContainer()); if (documentId == null) { return null; } var document = workspace.CurrentSolution.GetDocument(documentId); if (document == null) { return null; } var sourceText = await document.GetTextAsync(cancellationToken).ConfigureAwait(false); cancellationToken.ThrowIfCancellationRequested(); var snapshot = sourceText.FindCorrespondingEditorTextSnapshot(); if (snapshot == null || snapshot.Version.ReiteratedVersionNumber != givenSnapshot.Version.ReiteratedVersionNumber) { return null; } return ValueTuple.Create(document, snapshot); } private void OnTextViewClosed(object sender, EventArgs e) { Dispose(); } private void OnWorkspaceChanged(object sender, EventArgs e) { // REVIEW: this event should give both old and new workspace as argument so that // one doesn't need to hold onto workspace in field. // remove existing event registration if (_workspace != null) { _workspace.DocumentActiveContextChanged -= OnActiveContextChanged; } // REVIEW: why one need to get new workspace from registration? why not just pass in the new workspace? // add new event registration _workspace = _registration.Workspace; if (_workspace != null) { _workspace.DocumentActiveContextChanged += OnActiveContextChanged; } } private void OnActiveContextChanged(object sender, DocumentEventArgs e) { // REVIEW: it would be nice for changed event to pass in both old and new document. OnSuggestedActionsChanged(e.Document.Project.Solution.Workspace, e.Document.Id, e.Document.Project.Solution.WorkspaceVersion); } private void OnDiagnosticsUpdated(object sender, DiagnosticsUpdatedArgs e) { // document removed case. no reason to raise event if (e.Solution == null) { return; } OnSuggestedActionsChanged(e.Workspace, e.DocumentId, e.Solution.WorkspaceVersion); } private void OnSuggestedActionsChanged(Workspace currentWorkspace, DocumentId currentDocumentId, int solutionVersion, DiagnosticsUpdatedArgs args = null) { // Explicitly hold onto the _subjectBuffer field in a local and use this local in this function to avoid crashes // if this field happens to be cleared by Dispose() below. This is required since this code path involves code // that can run on background thread. var buffer = _subjectBuffer; if (buffer == null) { return; } var workspace = buffer.GetWorkspace(); // workspace is not ready, nothing to do. if (workspace == null || workspace != currentWorkspace) { return; } if (currentDocumentId != workspace.GetDocumentIdInCurrentContext(buffer.AsTextContainer()) || solutionVersion == Volatile.Read(ref _lastSolutionVersionReported)) { return; } this.SuggestedActionsChanged?.Invoke(this, EventArgs.Empty); Volatile.Write(ref _lastSolutionVersionReported, solutionVersion); } public void Dispose() { if (_owner != null) { var updateSource = (IDiagnosticUpdateSource)_owner._diagnosticService; updateSource.DiagnosticsUpdated -= OnDiagnosticsUpdated; _owner = null; } if (_workspace != null) { _workspace.DocumentActiveContextChanged -= OnActiveContextChanged; _workspace = null; } if (_registration != null) { _registration.WorkspaceChanged -= OnWorkspaceChanged; _registration = null; } if (_textView != null) { _textView.Closed -= OnTextViewClosed; _textView = null; } if (_subjectBuffer != null) { _subjectBuffer = null; } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Runtime.CompilerServices; namespace UninitializedHighWord { public struct StackFiller { public Int32 Field00; public Int32 Field01; public Int32 Field02; public Int32 Field03; public Int32 Field04; public Int32 Field05; public Int32 Field06; public Int32 Field07; public Int32 Field08; public Int32 Field09; public Int32 Field10; public Int32 Field11; public Int32 Field12; public Int32 Field13; public Int32 Field14; public Int32 Field15; [MethodImpl(MethodImplOptions.NoInlining)] public static void FillWithFFPattern(ref StackFiller target) { target.Field00 = -1; target.Field01 = -1; target.Field02 = -1; target.Field03 = -1; target.Field04 = -1; target.Field05 = -1; target.Field06 = -1; target.Field07 = -1; target.Field08 = -1; target.Field09 = -1; target.Field10 = -1; target.Field11 = -1; target.Field12 = -1; target.Field13 = -1; target.Field14 = -1; target.Field15 = -1; return; } } public struct SystemTime { public short Year; public short Month; public short DayOfWeek; public short Day; public short Hour; public short Minute; public short Second; public short Milliseconds; } public struct RegistryTimeZoneInformation { public Int32 Bias; public Int32 StandardBias; public Int32 DaylightBias; public SystemTime StandardDate; public SystemTime DaylightDate; } internal static class App { private static bool s_fArgumentCheckPassed = false; private static bool s_fPreparingMethods = false; [MethodImpl(MethodImplOptions.NoInlining)] private static void CheckArguments( Int32 fill, Int32 year, Int32 month, Int32 day, Int32 hour, Int32 minute, Int32 second, Int32 milliseconds ) { if (App.s_fPreparingMethods) { return; } else { if ((hour == 0) && (minute == 0) && (second == 0) && (milliseconds == 0)) { App.s_fArgumentCheckPassed = true; Console.WriteLine("Argument check passed. All trailing arguments are zero."); } else { App.s_fArgumentCheckPassed = false; Console.WriteLine( "Argument check failed. Trailing argument values are:\r\n" + " Hour = {0:x8}\r\n" + " Minute = {1:x8}\r\n" + " Second = {2:x8}\r\n" + " Milliseconds = {3:x8}\r\n", hour, minute, second, milliseconds ); } return; } } [MethodImpl(MethodImplOptions.NoInlining)] private static void GenerateHalfInitializedArgSlots( RegistryTimeZoneInformation timeZoneInformation ) { if (timeZoneInformation.DaylightDate.Year == 0) { App.CheckArguments( 1, 1, 1, 1, timeZoneInformation.DaylightDate.Hour, timeZoneInformation.DaylightDate.Minute, timeZoneInformation.DaylightDate.Second, timeZoneInformation.DaylightDate.Milliseconds ); } return; } [MethodImpl(MethodImplOptions.NoInlining)] private static void InitializeStack( Int32 arg1, Int32 arg2, Int32 arg3, Int32 arg4, StackFiller fill1, StackFiller fill2, StackFiller fill3, StackFiller fill4 ) { return; } [MethodImpl(MethodImplOptions.NoInlining)] private static void StompStackBelowCallerSP( ) { var filler = new StackFiller(); StackFiller.FillWithFFPattern(ref filler); App.InitializeStack( 1, 1, 1, 1, filler, filler, filler, filler ); return; } [MethodImpl(MethodImplOptions.NoInlining)] private static void PrepareMethods( ) { var timeZoneInformation = new RegistryTimeZoneInformation(); App.s_fPreparingMethods = true; { App.GenerateHalfInitializedArgSlots(timeZoneInformation); } App.s_fPreparingMethods = false; return; } [MethodImpl(MethodImplOptions.NoInlining)] private static int RunTest( ) { var timeZoneInformation = new RegistryTimeZoneInformation(); App.StompStackBelowCallerSP(); App.GenerateHalfInitializedArgSlots(timeZoneInformation); if (App.s_fArgumentCheckPassed) { Console.WriteLine("Passed."); return 100; } else { Console.WriteLine("Failed."); return 101; } } private static int Main() { App.PrepareMethods(); return App.RunTest(); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace AllHttpMethods.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using Hyak.Common; using Microsoft.WindowsAzure.Management.ServiceBus.Models; namespace Microsoft.WindowsAzure.Management.ServiceBus.Models { public partial class ServiceBusQueueCreateParameters { private IList<ServiceBusSharedAccessAuthorizationRule> _authorizationRules; /// <summary> /// Optional. Gets the authorization rules for the description. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public IList<ServiceBusSharedAccessAuthorizationRule> AuthorizationRules { get { return this._authorizationRules; } set { this._authorizationRules = value; } } private string _autoDeleteOnIdle; /// <summary> /// Optional. Implemented. /// </summary> public string AutoDeleteOnIdle { get { return this._autoDeleteOnIdle; } set { this._autoDeleteOnIdle = value; } } private CountDetails _countDetails; /// <summary> /// Optional. Current queue statistics. /// </summary> public CountDetails CountDetails { get { return this._countDetails; } set { this._countDetails = value; } } private bool _deadLetteringOnMessageExpiration; /// <summary> /// Optional. This field controls how the Service Bus handles a message /// whose TTL has expired. If it is enabled and a message expires, the /// Service Bus moves the message from the queue into the queue's /// dead-letter sub-queue. If disabled, message will be permanently /// deleted from the queue. Settable only at queue creation time.* /// Default: false (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public bool DeadLetteringOnMessageExpiration { get { return this._deadLetteringOnMessageExpiration; } set { this._deadLetteringOnMessageExpiration = value; } } private string _defaultMessageTimeToLive; /// <summary> /// Optional. Depending on whether DeadLettering is enabled, a message /// is automatically moved to the DeadLetterQueue or deleted if it has /// been stored in the queue for longer than the specified time. This /// value is overwritten by a TTL specified on the message if and only /// if the message TTL is smaller than the TTL set on the queue. This /// value is immutable after the Queue has been created:* Range: 1 /// second - TimeSpan.MaxValue* Default: TimeSpan.MaxValue (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public string DefaultMessageTimeToLive { get { return this._defaultMessageTimeToLive; } set { this._defaultMessageTimeToLive = value; } } private string _duplicateDetectionHistoryTimeWindow; /// <summary> /// Optional. Specifies the time span during which the Service Bus /// detects message duplication:* Range: 1 second - 7 days* Default: /// 10 minutes (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public string DuplicateDetectionHistoryTimeWindow { get { return this._duplicateDetectionHistoryTimeWindow; } set { this._duplicateDetectionHistoryTimeWindow = value; } } private bool _enableBatchedOperations; /// <summary> /// Optional. Enables or disables service side batching behavior when /// performing operations for the specific queue. When enabled, /// service bus will collect/batch multiple operations to the backend /// to be more connection efficient. If user wants lower operation /// latency then they can disable this feature. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public bool EnableBatchedOperations { get { return this._enableBatchedOperations; } set { this._enableBatchedOperations = value; } } private string _entityAvailabilityStatus; /// <summary> /// Optional. The current availability status of the queue. /// </summary> public string EntityAvailabilityStatus { get { return this._entityAvailabilityStatus; } set { this._entityAvailabilityStatus = value; } } private bool _isAnonymousAccessible; /// <summary> /// Optional. Gets whether anonymous access is allowed. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public bool IsAnonymousAccessible { get { return this._isAnonymousAccessible; } set { this._isAnonymousAccessible = value; } } private string _lockDuration; /// <summary> /// Optional. Determines the amount of time in seconds in which a /// message should be locked for processing by a receiver. After this /// period, the message is unlocked and available for consumption by /// the next receiver. Settable only at queue creation time:* Range: 0 /// - 5 minutes. 0 means that the message is not locked* Default: 30 /// seconds (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public string LockDuration { get { return this._lockDuration; } set { this._lockDuration = value; } } private int _maxDeliveryCount; /// <summary> /// Optional. The maximum number of times a message SB will try to /// deliver before being dead lettered or discarded. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public int MaxDeliveryCount { get { return this._maxDeliveryCount; } set { this._maxDeliveryCount = value; } } private int _maxSizeInMegabytes; /// <summary> /// Optional. Specifies the maximum queue size in megabytes. Any /// attempt to enqueue a message that will cause the queue to exceed /// this value will fail. You can only set this parameter at queue /// creation time using the following values: * Range: 1 - 1024 (valid /// values are 1024, 2048, 3072, 4096, 5120) * Default: 1*1024 (valid /// values are 1024, 2048, 3072, 4096, 5120) (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public int MaxSizeInMegabytes { get { return this._maxSizeInMegabytes; } set { this._maxSizeInMegabytes = value; } } private int _messageCount; /// <summary> /// Optional. Displays the number of messages currently in the queue. /// (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public int MessageCount { get { return this._messageCount; } set { this._messageCount = value; } } private string _name; /// <summary> /// Required. The name of the queue. /// </summary> public string Name { get { return this._name; } set { this._name = value; } } private bool _requiresDuplicateDetection; /// <summary> /// Optional. Settable only at queue creation time.* Default for /// durable queue: false (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public bool RequiresDuplicateDetection { get { return this._requiresDuplicateDetection; } set { this._requiresDuplicateDetection = value; } } private bool _requiresSession; /// <summary> /// Optional. Settable only at queue creation time. If set to true, the /// queue will be session-aware and only SessionReceiver will be /// supported. Session-aware queues are not supported through REST.* /// Default for durable queue: false (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public bool RequiresSession { get { return this._requiresSession; } set { this._requiresSession = value; } } private int _sizeInBytes; /// <summary> /// Optional. Reflects the actual bytes that messages in the queue /// currently occupy toward the queue's quota.* Range: 0 /// -MaxTopicSizeinMegaBytes (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public int SizeInBytes { get { return this._sizeInBytes; } set { this._sizeInBytes = value; } } private string _status; /// <summary> /// Optional. Gets or sets the current status of the queue (enabled or /// disabled). When a queue is disabled, that queue cannot send or /// receive messages. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public string Status { get { return this._status; } set { this._status = value; } } private bool _supportOrdering; /// <summary> /// Optional. Gets or sets whether the queue supports ordering. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh780773.aspx /// for more information) /// </summary> public bool SupportOrdering { get { return this._supportOrdering; } set { this._supportOrdering = value; } } /// <summary> /// Initializes a new instance of the ServiceBusQueueCreateParameters /// class. /// </summary> public ServiceBusQueueCreateParameters() { this.AuthorizationRules = new LazyList<ServiceBusSharedAccessAuthorizationRule>(); } /// <summary> /// Initializes a new instance of the ServiceBusQueueCreateParameters /// class with required arguments. /// </summary> public ServiceBusQueueCreateParameters(string name) : this() { if (name == null) { throw new ArgumentNullException("name"); } this.Name = name; } } }
namespace iControl { using System.Xml.Serialization; using System.Web.Services; using System.ComponentModel; using System.Web.Services.Protocols; using System; using System.Diagnostics; /// <remarks/> [System.CodeDom.Compiler.GeneratedCodeAttribute("wsdl", "2.0.50727.3038")] [System.Diagnostics.DebuggerStepThroughAttribute()] [System.ComponentModel.DesignerCategoryAttribute("code")] [System.Web.Services.WebServiceBindingAttribute(Name="System.InetBinding", Namespace="urn:iControl")] public partial class SystemInet : iControlInterface { public SystemInet() { this.Url = "https://url_to_service"; } //======================================================================= // Operations //======================================================================= //----------------------------------------------------------------------- // get_dns_server_address //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string [] get_dns_server_address( ) { object [] results = this.Invoke("get_dns_server_address", new object [0]); return ((string [])(results[0])); } public System.IAsyncResult Beginget_dns_server_address(System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_dns_server_address", new object[0], callback, asyncState); } public string [] Endget_dns_server_address(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string [])(results[0])); } //----------------------------------------------------------------------- // get_hostname //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string get_hostname( ) { object [] results = this.Invoke("get_hostname", new object [0]); return ((string)(results[0])); } public System.IAsyncResult Beginget_hostname(System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_hostname", new object[0], callback, asyncState); } public string Endget_hostname(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string)(results[0])); } //----------------------------------------------------------------------- // get_ntp_server_address //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string [] get_ntp_server_address( ) { object [] results = this.Invoke("get_ntp_server_address", new object [0]); return ((string [])(results[0])); } public System.IAsyncResult Beginget_ntp_server_address(System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_ntp_server_address", new object[0], callback, asyncState); } public string [] Endget_ntp_server_address(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string [])(results[0])); } //----------------------------------------------------------------------- // get_version //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string get_version( ) { object [] results = this.Invoke("get_version", new object [] { }); return ((string)(results[0])); } public System.IAsyncResult Beginget_version(System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("get_version", new object[] { }, callback, asyncState); } public string Endget_version(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string)(results[0])); } //----------------------------------------------------------------------- // hostname_to_ip //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string [] hostname_to_ip( string [] hostnames ) { object [] results = this.Invoke("hostname_to_ip", new object [] { hostnames}); return ((string [])(results[0])); } public System.IAsyncResult Beginhostname_to_ip(string [] hostnames, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("hostname_to_ip", new object[] { hostnames}, callback, asyncState); } public string [] Endhostname_to_ip(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string [])(results[0])); } //----------------------------------------------------------------------- // ip_to_hostname //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string [] ip_to_hostname( string [] ip_addresses ) { object [] results = this.Invoke("ip_to_hostname", new object [] { ip_addresses}); return ((string [])(results[0])); } public System.IAsyncResult Beginip_to_hostname(string [] ip_addresses, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("ip_to_hostname", new object[] { ip_addresses}, callback, asyncState); } public string [] Endip_to_hostname(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string [])(results[0])); } //----------------------------------------------------------------------- // service_name_to_service_number //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public long [] service_name_to_service_number( string [] service_names ) { object [] results = this.Invoke("service_name_to_service_number", new object [] { service_names}); return ((long [])(results[0])); } public System.IAsyncResult Beginservice_name_to_service_number(string [] service_names, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("service_name_to_service_number", new object[] { service_names}, callback, asyncState); } public long [] Endservice_name_to_service_number(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((long [])(results[0])); } //----------------------------------------------------------------------- // service_number_to_service_name //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] [return: System.Xml.Serialization.SoapElementAttribute("return")] public string [] service_number_to_service_name( long [] services ) { object [] results = this.Invoke("service_number_to_service_name", new object [] { services}); return ((string [])(results[0])); } public System.IAsyncResult Beginservice_number_to_service_name(long [] services, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("service_number_to_service_name", new object[] { services}, callback, asyncState); } public string [] Endservice_number_to_service_name(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); return ((string [])(results[0])); } //----------------------------------------------------------------------- // set_hostname //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] public void set_hostname( string hostname ) { this.Invoke("set_hostname", new object [] { hostname}); } public System.IAsyncResult Beginset_hostname(string hostname, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("set_hostname", new object[] { hostname}, callback, asyncState); } public void Endset_hostname(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } //----------------------------------------------------------------------- // set_ntp_server_address //----------------------------------------------------------------------- [System.Web.Services.Protocols.SoapRpcMethodAttribute("urn:iControl:System/Inet", RequestNamespace="urn:iControl:System/Inet", ResponseNamespace="urn:iControl:System/Inet")] public void set_ntp_server_address( string [] ntp_addresses ) { this.Invoke("set_ntp_server_address", new object [] { ntp_addresses}); } public System.IAsyncResult Beginset_ntp_server_address(string [] ntp_addresses, System.AsyncCallback callback, object asyncState) { return this.BeginInvoke("set_ntp_server_address", new object[] { ntp_addresses}, callback, asyncState); } public void Endset_ntp_server_address(System.IAsyncResult asyncResult) { object [] results = this.EndInvoke(asyncResult); } } //======================================================================= // Enums //======================================================================= //======================================================================= // Structs //======================================================================= }
namespace CapnpNet.Rpc { [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Message : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public Message(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Message(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Message(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Message(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public enum Union : ushort { unimplemented = 0, abort = 1, bootstrap = 8, call = 2, @return = 3, finish = 4, resolve = 5, release = 6, disembargo = 13, obsoleteSave = 7, obsoleteDelete = 9, provide = 10, accept = 11, join = 12, } public bool Is(out Message unimplemented) { var ret = this.which == Union.unimplemented; unimplemented = ret ? this.unimplemented : default (Message); return ret; } public bool Is(out Exception abort) { var ret = this.which == Union.abort; abort = ret ? this.abort : default (Exception); return ret; } public bool Is(out Bootstrap bootstrap) { var ret = this.which == Union.bootstrap; bootstrap = ret ? this.bootstrap : default (Bootstrap); return ret; } public bool Is(out Call call) { var ret = this.which == Union.call; call = ret ? this.call : default (Call); return ret; } public bool Is(out Return @return) { var ret = this.which == Union.@return; @return = ret ? this.@return : default (Return); return ret; } public bool Is(out Finish finish) { var ret = this.which == Union.finish; finish = ret ? this.finish : default (Finish); return ret; } public bool Is(out Resolve resolve) { var ret = this.which == Union.resolve; resolve = ret ? this.resolve : default (Resolve); return ret; } public bool Is(out Release release) { var ret = this.which == Union.release; release = ret ? this.release : default (Release); return ret; } public bool Is(out Disembargo disembargo) { var ret = this.which == Union.disembargo; disembargo = ret ? this.disembargo : default (Disembargo); return ret; } public bool Is(out Provide provide) { var ret = this.which == Union.provide; provide = ret ? this.provide : default (Provide); return ret; } public bool Is(out Accept accept) { var ret = this.which == Union.accept; accept = ret ? this.accept : default (Accept); return ret; } public bool Is(out Join join) { var ret = this.which == Union.join; join = ret ? this.join : default (Join); return ret; } public Union which { get { return (Union)_s.ReadUInt16(0); } set { _s.WriteUInt16(0, (ushort)value); } } public Message unimplemented { get { return _s.DereferencePointer<Message>(0); } set { _s.WritePointer(0, value); } } public Exception abort { get { return _s.DereferencePointer<Exception>(0); } set { _s.WritePointer(0, value); } } public Bootstrap bootstrap { get { return _s.DereferencePointer<Bootstrap>(0); } set { _s.WritePointer(0, value); } } public Call call { get { return _s.DereferencePointer<Call>(0); } set { _s.WritePointer(0, value); } } public Return @return { get { return _s.DereferencePointer<Return>(0); } set { _s.WritePointer(0, value); } } public Finish finish { get { return _s.DereferencePointer<Finish>(0); } set { _s.WritePointer(0, value); } } public Resolve resolve { get { return _s.DereferencePointer<Resolve>(0); } set { _s.WritePointer(0, value); } } public Release release { get { return _s.DereferencePointer<Release>(0); } set { _s.WritePointer(0, value); } } public Disembargo disembargo { get { return _s.DereferencePointer<Disembargo>(0); } set { _s.WritePointer(0, value); } } public global::CapnpNet.AbsPointer obsoleteSave { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(0); } set { _s.WritePointer(0, value); } } public global::CapnpNet.AbsPointer obsoleteDelete { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(0); } set { _s.WritePointer(0, value); } } public Provide provide { get { return _s.DereferencePointer<Provide>(0); } set { _s.WritePointer(0, value); } } public Accept accept { get { return _s.DereferencePointer<Accept>(0); } set { _s.WritePointer(0, value); } } public Join join { get { return _s.DereferencePointer<Join>(0); } set { _s.WritePointer(0, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Bootstrap : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public Bootstrap(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Bootstrap(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Bootstrap(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Bootstrap(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public uint questionId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public global::CapnpNet.AbsPointer deprecatedObjectId { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(0); } set { _s.WritePointer(0, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Call : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 3; public const int KNOWN_POINTER_WORDS = 3; private global::CapnpNet.Struct _s; public Call(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Call(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Call(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Call(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public uint questionId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public MessageTarget target { get { return _s.DereferencePointer<MessageTarget>(0); } set { _s.WritePointer(0, value); } } public ulong interfaceId { get { return _s.ReadUInt64(1); } set { _s.WriteUInt64(1, value); } } public ushort methodId { get { return _s.ReadUInt16(2); } set { _s.WriteUInt16(2, value); } } public bool allowThirdPartyTailCall { get { return _s.ReadBool(128); } set { _s.WriteBool(128, value); } } public Payload @params { get { return _s.DereferencePointer<Payload>(1); } set { _s.WritePointer(1, value); } } public sendResultsToGroup sendResultsTo => new sendResultsToGroup(_s); public struct sendResultsToGroup { private readonly global::CapnpNet.Struct _s; public sendResultsToGroup(global::CapnpNet.Struct s) { _s = s; } public enum Union : ushort { caller = 0, yourself = 1, thirdParty = 2, } public Union which { get { return (Union)_s.ReadUInt16(3); } set { _s.WriteUInt16(3, (ushort)value); } } public global::CapnpNet.AbsPointer thirdParty { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(2); } set { _s.WritePointer(2, value); } } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Return : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 2; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public Return(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Return(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Return(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Return(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public enum Union : ushort { results = 0, exception = 1, canceled = 2, resultsSentElsewhere = 3, takeFromOtherQuestion = 4, acceptFromThirdParty = 5, } public bool Is(out Payload results) { var ret = this.which == Union.results; results = ret ? this.results : default (Payload); return ret; } public bool Is(out Exception exception) { var ret = this.which == Union.exception; exception = ret ? this.exception : default (Exception); return ret; } public Union which { get { return (Union)_s.ReadUInt16(3); } set { _s.WriteUInt16(3, (ushort)value); } } public uint answerId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public bool releaseParamCaps { get { return _s.ReadBool(32, true); } set { _s.WriteBool(32, value, true); } } public Payload results { get { return _s.DereferencePointer<Payload>(0); } set { _s.WritePointer(0, value); } } public Exception exception { get { return _s.DereferencePointer<Exception>(0); } set { _s.WritePointer(0, value); } } public uint takeFromOtherQuestion { get { return _s.ReadUInt32(2); } set { _s.WriteUInt32(2, value); } } public global::CapnpNet.AbsPointer acceptFromThirdParty { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(0); } set { _s.WritePointer(0, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Finish : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 0; private global::CapnpNet.Struct _s; public Finish(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Finish(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Finish(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Finish(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public uint questionId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public bool releaseResultCaps { get { return _s.ReadBool(32, true); } set { _s.WriteBool(32, value, true); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Resolve : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public Resolve(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Resolve(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Resolve(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Resolve(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public enum Union : ushort { cap = 0, exception = 1, } public bool Is(out CapDescriptor cap) { var ret = this.which == Union.cap; cap = ret ? this.cap : default (CapDescriptor); return ret; } public bool Is(out Exception exception) { var ret = this.which == Union.exception; exception = ret ? this.exception : default (Exception); return ret; } public Union which { get { return (Union)_s.ReadUInt16(2); } set { _s.WriteUInt16(2, (ushort)value); } } public uint promiseId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public CapDescriptor cap { get { return _s.DereferencePointer<CapDescriptor>(0); } set { _s.WritePointer(0, value); } } public Exception exception { get { return _s.DereferencePointer<Exception>(0); } set { _s.WritePointer(0, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Release : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 0; private global::CapnpNet.Struct _s; public Release(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Release(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Release(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Release(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public uint id { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public uint referenceCount { get { return _s.ReadUInt32(1); } set { _s.WriteUInt32(1, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Disembargo : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public Disembargo(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Disembargo(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Disembargo(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Disembargo(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public MessageTarget target { get { return _s.DereferencePointer<MessageTarget>(0); } set { _s.WritePointer(0, value); } } public contextGroup context => new contextGroup(_s); public struct contextGroup { private readonly global::CapnpNet.Struct _s; public contextGroup(global::CapnpNet.Struct s) { _s = s; } public enum Union : ushort { senderLoopback = 0, receiverLoopback = 1, accept = 2, provide = 3, } public Union which { get { return (Union)_s.ReadUInt16(2); } set { _s.WriteUInt16(2, (ushort)value); } } public uint senderLoopback { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public uint receiverLoopback { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public uint provide { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Provide : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 2; private global::CapnpNet.Struct _s; public Provide(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Provide(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Provide(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Provide(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public uint questionId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public MessageTarget target { get { return _s.DereferencePointer<MessageTarget>(0); } set { _s.WritePointer(0, value); } } public global::CapnpNet.AbsPointer recipient { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(1); } set { _s.WritePointer(1, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Accept : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public Accept(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Accept(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Accept(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Accept(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public uint questionId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public global::CapnpNet.AbsPointer provision { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(0); } set { _s.WritePointer(0, value); } } public bool embargo { get { return _s.ReadBool(32); } set { _s.WriteBool(32, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Join : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 2; private global::CapnpNet.Struct _s; public Join(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Join(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Join(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Join(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public uint questionId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public MessageTarget target { get { return _s.DereferencePointer<MessageTarget>(0); } set { _s.WritePointer(0, value); } } public global::CapnpNet.AbsPointer keyPart { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(1); } set { _s.WritePointer(1, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct MessageTarget : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public MessageTarget(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public MessageTarget(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public MessageTarget(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public MessageTarget(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public enum Union : ushort { importedCap = 0, promisedAnswer = 1, } public bool Is(out PromisedAnswer promisedAnswer) { var ret = this.which == Union.promisedAnswer; promisedAnswer = ret ? this.promisedAnswer : default (PromisedAnswer); return ret; } public Union which { get { return (Union)_s.ReadUInt16(2); } set { _s.WriteUInt16(2, (ushort)value); } } public uint importedCap { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public PromisedAnswer promisedAnswer { get { return _s.DereferencePointer<PromisedAnswer>(0); } set { _s.WritePointer(0, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Payload : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 0; public const int KNOWN_POINTER_WORDS = 2; private global::CapnpNet.Struct _s; public Payload(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Payload(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Payload(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Payload(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public global::CapnpNet.AbsPointer content { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(0); } set { _s.WritePointer(0, value); } } public global::CapnpNet.FlatArray<CapDescriptor> capTable { get { return _s.DereferencePointer<global::CapnpNet.FlatArray<CapDescriptor>>(1); } set { _s.WritePointer(1, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct CapDescriptor : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public CapDescriptor(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public CapDescriptor(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public CapDescriptor(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public CapDescriptor(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public enum Union : ushort { none = 0, senderHosted = 1, senderPromise = 2, receiverHosted = 3, receiverAnswer = 4, thirdPartyHosted = 5, } public bool Is(out PromisedAnswer receiverAnswer) { var ret = this.which == Union.receiverAnswer; receiverAnswer = ret ? this.receiverAnswer : default (PromisedAnswer); return ret; } public bool Is(out ThirdPartyCapDescriptor thirdPartyHosted) { var ret = this.which == Union.thirdPartyHosted; thirdPartyHosted = ret ? this.thirdPartyHosted : default (ThirdPartyCapDescriptor); return ret; } public Union which { get { return (Union)_s.ReadUInt16(0); } set { _s.WriteUInt16(0, (ushort)value); } } public uint senderHosted { get { return _s.ReadUInt32(1); } set { _s.WriteUInt32(1, value); } } public uint senderPromise { get { return _s.ReadUInt32(1); } set { _s.WriteUInt32(1, value); } } public uint receiverHosted { get { return _s.ReadUInt32(1); } set { _s.WriteUInt32(1, value); } } public PromisedAnswer receiverAnswer { get { return _s.DereferencePointer<PromisedAnswer>(0); } set { _s.WritePointer(0, value); } } public ThirdPartyCapDescriptor thirdPartyHosted { get { return _s.DereferencePointer<ThirdPartyCapDescriptor>(0); } set { _s.WritePointer(0, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct PromisedAnswer : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public PromisedAnswer(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public PromisedAnswer(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public PromisedAnswer(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public PromisedAnswer(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public uint questionId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } public global::CapnpNet.FlatArray<Op> transform { get { return _s.DereferencePointer<global::CapnpNet.FlatArray<Op>>(0); } set { _s.WritePointer(0, value); } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Op : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 0; private global::CapnpNet.Struct _s; public Op(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Op(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Op(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Op(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public enum Union : ushort { noop = 0, getPointerField = 1, } public Union which { get { return (Union)_s.ReadUInt16(0); } set { _s.WriteUInt16(0, (ushort)value); } } public ushort getPointerField { get { return _s.ReadUInt16(1); } set { _s.WriteUInt16(1, value); } } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct ThirdPartyCapDescriptor : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public ThirdPartyCapDescriptor(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public ThirdPartyCapDescriptor(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public ThirdPartyCapDescriptor(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public ThirdPartyCapDescriptor(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public global::CapnpNet.AbsPointer id { get { return _s.DereferencePointer<global::CapnpNet.AbsPointer>(0); } set { _s.WritePointer(0, value); } } public uint vineId { get { return _s.ReadUInt32(0); } set { _s.WriteUInt32(0, value); } } } [global::CapnpNet.PreferredListEncoding(global::CapnpNet.ElementSize.Composite)] public struct Exception : global::CapnpNet.IStruct { public const int KNOWN_DATA_WORDS = 1; public const int KNOWN_POINTER_WORDS = 1; private global::CapnpNet.Struct _s; public Exception(ref global::CapnpNet.AllocationContext allocContext) : this (allocContext.Allocate(KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS)) { } public Exception(global::CapnpNet.Message m) : this (m, KNOWN_DATA_WORDS, KNOWN_POINTER_WORDS) { } public Exception(global::CapnpNet.Message m, ushort dataWords, ushort pointers) : this (m.Allocate(dataWords, pointers)) { } public Exception(global::CapnpNet.Struct s) { _s = s; } global::CapnpNet.Struct global::CapnpNet.IStruct.Struct { get { return _s; } } global::CapnpNet.AbsPointer global::CapnpNet.IAbsPointer.Pointer { get { return _s.Pointer; } } public global::CapnpNet.Text reason { get { return _s.DereferencePointer<global::CapnpNet.Text>(0); } set { _s.WritePointer(0, value); } } public Type type { get { return (Type)_s.ReadUInt16(2); } set { _s.WriteUInt16(2, (ushort)value); } } public bool obsoleteIsCallersFault { get { return _s.ReadBool(0); } set { _s.WriteBool(0, value); } } public ushort obsoleteDurability { get { return _s.ReadUInt16(1); } set { _s.WriteUInt16(1, value); } } public enum Type : ushort { failed = 0, overloaded = 1, disconnected = 2, unimplemented = 3 } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.ComponentModel.Composition; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Notification; using Microsoft.CodeAnalysis.Shared.TestHooks; using Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem; using Microsoft.VisualStudio.LanguageServices.Implementation.Venus; using Microsoft.VisualStudio.Shell; using Roslyn.Utilities; namespace Microsoft.VisualStudio.LanguageServices.Implementation.TaskList { [Export(typeof(ExternalErrorDiagnosticUpdateSource))] internal class ExternalErrorDiagnosticUpdateSource : IDiagnosticUpdateSource { private readonly Workspace _workspace; private readonly IDiagnosticAnalyzerService _diagnosticService; private readonly IGlobalOperationNotificationService _notificationService; private readonly SimpleTaskQueue _taskQueue; private readonly IAsynchronousOperationListener _listener; private InprogressState _state = null; private ImmutableArray<DiagnosticData> _lastBuiltResult = ImmutableArray<DiagnosticData>.Empty; [ImportingConstructor] public ExternalErrorDiagnosticUpdateSource( VisualStudioWorkspaceImpl workspace, IDiagnosticAnalyzerService diagnosticService, IDiagnosticUpdateSourceRegistrationService registrationService, [ImportMany] IEnumerable<Lazy<IAsynchronousOperationListener, FeatureMetadata>> asyncListeners) : this(workspace, diagnosticService, registrationService, new AggregateAsynchronousOperationListener(asyncListeners, FeatureAttribute.ErrorList)) { Contract.Requires(!KnownUIContexts.SolutionBuildingContext.IsActive); KnownUIContexts.SolutionBuildingContext.UIContextChanged += OnSolutionBuild; } /// <summary> /// internal for testing /// </summary> internal ExternalErrorDiagnosticUpdateSource( Workspace workspace, IDiagnosticAnalyzerService diagnosticService, IDiagnosticUpdateSourceRegistrationService registrationService, IAsynchronousOperationListener listener) { // use queue to serialize work. no lock needed _taskQueue = new SimpleTaskQueue(TaskScheduler.Default); _listener = listener; _workspace = workspace; _workspace.WorkspaceChanged += OnWorkspaceChanged; _diagnosticService = diagnosticService; _notificationService = _workspace.Services.GetService<IGlobalOperationNotificationService>(); registrationService.Register(this); } public event EventHandler<bool> BuildStarted; public event EventHandler<DiagnosticsUpdatedArgs> DiagnosticsUpdated; public bool IsInProgress => _state != null; public ImmutableArray<DiagnosticData> GetBuildErrors() { return _lastBuiltResult; } public bool SupportedDiagnosticId(ProjectId projectId, string id) { return _state?.SupportedDiagnosticId(projectId, id) ?? false; } public void ClearErrors(ProjectId projectId) { // capture state if it exists var state = _state; var asyncToken = _listener.BeginAsyncOperation("ClearErrors"); _taskQueue.ScheduleTask(() => { // record the project as built only if we are in build. // otherwise (such as closing solution or removing project), no need to record it state?.Built(projectId); ClearProjectErrors(state?.Solution ?? _workspace.CurrentSolution, projectId); }).CompletesAsyncOperation(asyncToken); } private void OnWorkspaceChanged(object sender, WorkspaceChangeEventArgs e) { switch (e.Kind) { case WorkspaceChangeKind.SolutionAdded: case WorkspaceChangeKind.SolutionRemoved: case WorkspaceChangeKind.SolutionCleared: case WorkspaceChangeKind.SolutionReloaded: { var asyncToken = _listener.BeginAsyncOperation("OnSolutionChanged"); _taskQueue.ScheduleTask(() => e.OldSolution.ProjectIds.Do(p => ClearProjectErrors(e.OldSolution, p))).CompletesAsyncOperation(asyncToken); break; } case WorkspaceChangeKind.ProjectRemoved: case WorkspaceChangeKind.ProjectReloaded: { var asyncToken = _listener.BeginAsyncOperation("OnProjectChanged"); _taskQueue.ScheduleTask(() => ClearProjectErrors(e.OldSolution, e.ProjectId)).CompletesAsyncOperation(asyncToken); break; } case WorkspaceChangeKind.DocumentRemoved: case WorkspaceChangeKind.DocumentReloaded: { var asyncToken = _listener.BeginAsyncOperation("OnDocumentRemoved"); _taskQueue.ScheduleTask(() => ClearDocumentErrors(e.OldSolution, e.ProjectId, e.DocumentId)).CompletesAsyncOperation(asyncToken); break; } case WorkspaceChangeKind.ProjectAdded: case WorkspaceChangeKind.DocumentAdded: case WorkspaceChangeKind.DocumentChanged: case WorkspaceChangeKind.ProjectChanged: case WorkspaceChangeKind.SolutionChanged: case WorkspaceChangeKind.AdditionalDocumentAdded: case WorkspaceChangeKind.AdditionalDocumentRemoved: case WorkspaceChangeKind.AdditionalDocumentReloaded: case WorkspaceChangeKind.AdditionalDocumentChanged: break; default: Contract.Fail("Unknown workspace events"); break; } } internal void OnSolutionBuild(object sender, UIContextChangedEventArgs e) { if (e.Activated) { // build just started, create the state and fire build in progress event. var state = GetOrCreateInprogressState(); return; } // get local copy of inprogress state var inprogressState = _state; // building is done. reset the state. Interlocked.CompareExchange(ref _state, null, inprogressState); // enqueue build/live sync in the queue. var asyncToken = _listener.BeginAsyncOperation("OnSolutionBuild"); _taskQueue.ScheduleTask(async () => { // nothing to do if (inprogressState == null) { return; } _lastBuiltResult = inprogressState.GetBuildDiagnostics(); // we are about to update live analyzer data using one from build. // pause live analyzer using (var operation = _notificationService.Start("BuildDone")) { Func<DiagnosticData, bool> liveDiagnosticChecker = d => { // REVIEW: we probably need a better design on de-duplicating live and build errors. or don't de-dup at all. // for now, we are special casing compiler error case. var project = inprogressState.Solution.GetProject(d.ProjectId); if (project == null) { // project doesn't exist return false; } // REVIEW: current design is that we special case compiler analyzer case and we accept only document level // diagnostic as live. otherwise, we let them be build errors. we changed compiler analyzer accordingly as well // so that it doesn't report project level diagnostic as live errors. if (_diagnosticService.IsCompilerDiagnostic(project.Language, d) && d.DocumentId == null) { // compiler error but project level error return false; } if (inprogressState.SupportedDiagnosticId(d.ProjectId, d.Id)) { return true; } return false; }; var diagnosticService = _diagnosticService as DiagnosticAnalyzerService; if (diagnosticService != null) { await CleanupAllLiveErrorsIfNeededAsync(diagnosticService, inprogressState.Solution, inprogressState).ConfigureAwait(false); await SyncBuildErrorsAndReportAsync(diagnosticService, inprogressState.Solution, inprogressState.GetLiveDiagnosticsPerProject(liveDiagnosticChecker)).ConfigureAwait(false); } inprogressState.Done(); } }).CompletesAsyncOperation(asyncToken); } private async System.Threading.Tasks.Task CleanupAllLiveErrorsIfNeededAsync(DiagnosticAnalyzerService diagnosticService, Solution solution, InprogressState state) { if (_workspace.Options.GetOption(InternalDiagnosticsOptions.BuildErrorIsTheGod)) { await CleanupAllLiveErrors(diagnosticService, solution.ProjectIds).ConfigureAwait(false); return; } if (_workspace.Options.GetOption(InternalDiagnosticsOptions.ClearLiveErrorsForProjectBuilt)) { await CleanupAllLiveErrors(diagnosticService, state.GetProjectsBuilt(solution)).ConfigureAwait(false); return; } await CleanupAllLiveErrors(diagnosticService, state.GetProjectsWithoutErrors(solution)).ConfigureAwait(false); return; } private System.Threading.Tasks.Task CleanupAllLiveErrors(DiagnosticAnalyzerService diagnosticService, IEnumerable<ProjectId> projects) { var map = projects.ToImmutableDictionary(p => p, _ => ImmutableArray<DiagnosticData>.Empty); return diagnosticService.SynchronizeWithBuildAsync(_workspace, map); } private async System.Threading.Tasks.Task SyncBuildErrorsAndReportAsync( DiagnosticAnalyzerService diagnosticService, Solution solution, ImmutableDictionary<ProjectId, ImmutableArray<DiagnosticData>> map) { // make those errors live errors await diagnosticService.SynchronizeWithBuildAsync(_workspace, map).ConfigureAwait(false); // raise events for ones left-out var buildErrors = GetBuildErrors().Except(map.Values.SelectMany(v => v)).GroupBy(k => k.DocumentId); foreach (var group in buildErrors) { if (group.Key == null) { foreach (var projectGroup in group.GroupBy(g => g.ProjectId)) { Contract.ThrowIfNull(projectGroup.Key); ReportBuildErrors(projectGroup.Key, solution, projectGroup.ToImmutableArray()); } continue; } ReportBuildErrors(group.Key, solution, group.ToImmutableArray()); } } private void ReportBuildErrors<T>(T item, Solution solution, ImmutableArray<DiagnosticData> buildErrors) { var projectId = item as ProjectId; if (projectId != null) { RaiseDiagnosticsCreated(projectId, solution, projectId, null, buildErrors); return; } // must be not null var documentId = item as DocumentId; RaiseDiagnosticsCreated(documentId, solution, documentId.ProjectId, documentId, buildErrors); } private void ClearProjectErrors(Solution solution, ProjectId projectId) { // remove all project errors RaiseDiagnosticsRemoved(projectId, solution, projectId, documentId: null); var project = solution.GetProject(projectId); if (project == null) { return; } // remove all document errors foreach (var documentId in project.DocumentIds) { ClearDocumentErrors(solution, projectId, documentId); } } private void ClearDocumentErrors(Solution solution, ProjectId projectId, DocumentId documentId) { RaiseDiagnosticsRemoved(documentId, solution, projectId, documentId); } public void AddNewErrors(ProjectId projectId, DiagnosticData diagnostic) { // capture state that will be processed in background thread. var state = GetOrCreateInprogressState(); var asyncToken = _listener.BeginAsyncOperation("Project New Errors"); _taskQueue.ScheduleTask(() => { state.AddError(projectId, diagnostic); }).CompletesAsyncOperation(asyncToken); } public void AddNewErrors(DocumentId documentId, DiagnosticData diagnostic) { // capture state that will be processed in background thread. var state = GetOrCreateInprogressState(); var asyncToken = _listener.BeginAsyncOperation("Document New Errors"); _taskQueue.ScheduleTask(() => { state.AddError(documentId, diagnostic); }).CompletesAsyncOperation(asyncToken); } public void AddNewErrors( ProjectId projectId, HashSet<DiagnosticData> projectErrors, Dictionary<DocumentId, HashSet<DiagnosticData>> documentErrorMap) { // capture state that will be processed in background thread var state = GetOrCreateInprogressState(); var asyncToken = _listener.BeginAsyncOperation("Project New Errors"); _taskQueue.ScheduleTask(() => { foreach (var kv in documentErrorMap) { state.AddErrors(kv.Key, kv.Value); } state.AddErrors(projectId, projectErrors); }).CompletesAsyncOperation(asyncToken); } private InprogressState GetOrCreateInprogressState() { if (_state == null) { // here, we take current snapshot of solution when the state is first created. and through out this code, we use this snapshot. // since we have no idea what actual snapshot of solution the out of proc build has picked up, it doesn't remove the race we can have // between build and diagnostic service, but this at least make us to consistent inside of our code. Interlocked.CompareExchange(ref _state, new InprogressState(this, _workspace.CurrentSolution), null); } return _state; } private void RaiseDiagnosticsCreated(object id, Solution solution, ProjectId projectId, DocumentId documentId, ImmutableArray<DiagnosticData> items) { DiagnosticsUpdated?.Invoke(this, DiagnosticsUpdatedArgs.DiagnosticsCreated( CreateArgumentKey(id), _workspace, solution, projectId, documentId, items)); } private void RaiseDiagnosticsRemoved(object id, Solution solution, ProjectId projectId, DocumentId documentId) { DiagnosticsUpdated?.Invoke(this, DiagnosticsUpdatedArgs.DiagnosticsRemoved( CreateArgumentKey(id), _workspace, solution, projectId, documentId)); } private static ArgumentKey CreateArgumentKey(object id) => new ArgumentKey(id); private void RaiseBuildStarted(bool started) { BuildStarted?.Invoke(this, started); } #region not supported public bool SupportGetDiagnostics { get { return false; } } public ImmutableArray<DiagnosticData> GetDiagnostics( Workspace workspace, ProjectId projectId, DocumentId documentId, object id, bool includeSuppressedDiagnostics = false, CancellationToken cancellationToken = default(CancellationToken)) { return ImmutableArray<DiagnosticData>.Empty; } #endregion private class InprogressState { private readonly ExternalErrorDiagnosticUpdateSource _owner; private readonly Solution _solution; private readonly HashSet<ProjectId> _builtProjects = new HashSet<ProjectId>(); private readonly Dictionary<ProjectId, HashSet<DiagnosticData>> _projectMap = new Dictionary<ProjectId, HashSet<DiagnosticData>>(); private readonly Dictionary<DocumentId, HashSet<DiagnosticData>> _documentMap = new Dictionary<DocumentId, HashSet<DiagnosticData>>(); private readonly Dictionary<ProjectId, HashSet<string>> _diagnosticIdMap = new Dictionary<ProjectId, HashSet<string>>(); public InprogressState(ExternalErrorDiagnosticUpdateSource owner, Solution solution) { _owner = owner; _solution = solution; // let people know build has started // TODO: to be more accurate, it probably needs to be counted. but for now, // I think the way it is doing probably enough. _owner.RaiseBuildStarted(started: true); } public Solution Solution => _solution; public void Done() { _owner.RaiseBuildStarted(started: false); } public bool SupportedDiagnosticId(ProjectId projectId, string id) { HashSet<string> ids; if (_diagnosticIdMap.TryGetValue(projectId, out ids)) { return ids.Contains(id); } // set ids set var map = new HashSet<string>(); _diagnosticIdMap.Add(projectId, map); var project = _solution.GetProject(projectId); if (project == null) { // projectId no longer exist, return false; return false; } var descriptorMap = _owner._diagnosticService.GetDiagnosticDescriptors(project); map.UnionWith(descriptorMap.Values.SelectMany(v => v.Select(d => d.Id))); return map.Contains(id); } public ImmutableArray<DiagnosticData> GetBuildDiagnostics() { return ImmutableArray.CreateRange(_projectMap.Values.SelectMany(d => d).Concat(_documentMap.Values.SelectMany(d => d))); } public void Built(ProjectId projectId) { _builtProjects.Add(projectId); } public IEnumerable<ProjectId> GetProjectsBuilt(Solution solution) { return solution.ProjectIds.Where(p => _builtProjects.Contains(p)); } public IEnumerable<ProjectId> GetProjectsWithErrors(Solution solution) { return GetProjectIds().Where(p => solution.GetProject(p) != null); } public IEnumerable<ProjectId> GetProjectsWithoutErrors(Solution solution) { return GetProjectsBuilt(solution).Except(GetProjectsWithErrors(solution)); } public ImmutableDictionary<ProjectId, ImmutableArray<DiagnosticData>> GetLiveDiagnosticsPerProject(Func<DiagnosticData, bool> liveDiagnosticChecker) { var builder = ImmutableDictionary.CreateBuilder<ProjectId, ImmutableArray<DiagnosticData>>(); foreach (var projectId in GetProjectIds()) { var diagnostics = ImmutableArray.CreateRange( _projectMap.Where(kv => kv.Key == projectId).SelectMany(kv => kv.Value).Concat( _documentMap.Where(kv => kv.Key.ProjectId == projectId).SelectMany(kv => kv.Value)).Where(liveDiagnosticChecker)); builder.Add(projectId, diagnostics); } return builder.ToImmutable(); } public void AddErrors(DocumentId key, HashSet<DiagnosticData> diagnostics) { AddErrors(_documentMap, key, diagnostics); } public void AddErrors(ProjectId key, HashSet<DiagnosticData> diagnostics) { AddErrors(_projectMap, key, diagnostics); } public void AddError(DocumentId key, DiagnosticData diagnostic) { AddError(_documentMap, key, diagnostic); } public void AddError(ProjectId key, DiagnosticData diagnostic) { AddError(_projectMap, key, diagnostic); } private void AddErrors<T>(Dictionary<T, HashSet<DiagnosticData>> map, T key, HashSet<DiagnosticData> diagnostics) { var errors = GetErrorSet(map, key); errors.UnionWith(diagnostics); } private void AddError<T>(Dictionary<T, HashSet<DiagnosticData>> map, T key, DiagnosticData diagnostic) { var errors = GetErrorSet(map, key); errors.Add(diagnostic); } private IEnumerable<ProjectId> GetProjectIds() { return _documentMap.Keys.Select(k => k.ProjectId).Concat(_projectMap.Keys).Distinct(); } private HashSet<DiagnosticData> GetErrorSet<T>(Dictionary<T, HashSet<DiagnosticData>> map, T key) { return map.GetOrAdd(key, _ => new HashSet<DiagnosticData>(DiagnosticDataComparer.Instance)); } } private class ArgumentKey : BuildToolId.Base<object> { public ArgumentKey(object key) : base(key) { } public override string BuildTool { get { return PredefinedBuildTools.Build; } } public override bool Equals(object obj) { var other = obj as ArgumentKey; if (other == null) { return false; } return base.Equals(obj); } public override int GetHashCode() { return base.GetHashCode(); } } private class DiagnosticDataComparer : IEqualityComparer<DiagnosticData> { public static readonly DiagnosticDataComparer Instance = new DiagnosticDataComparer(); public bool Equals(DiagnosticData item1, DiagnosticData item2) { // crash if any one of them is NULL if ((IsNull(item1.DocumentId) ^ IsNull(item2.DocumentId)) || (IsNull(item1.ProjectId) ^ IsNull(item2.ProjectId))) { return false; } var lineColumn1 = GetOriginalOrMappedLineColumn(item1); var lineColumn2 = GetOriginalOrMappedLineColumn(item2); if (item1.DocumentId != null && item2.DocumentId != null) { return item1.Id == item2.Id && item1.Message == item2.Message && item1.ProjectId == item2.ProjectId && item1.DocumentId == item2.DocumentId && lineColumn1.Item1 == lineColumn2.Item1 && lineColumn1.Item2 == lineColumn2.Item2 && item1.Severity == item2.Severity; } return item1.Id == item2.Id && item1.Message == item2.Message && item1.ProjectId == item2.ProjectId && item1.DataLocation?.OriginalFilePath == item2.DataLocation?.OriginalFilePath && lineColumn1.Item1 == lineColumn2.Item1 && lineColumn1.Item2 == lineColumn2.Item2 && item1.Severity == item2.Severity; } public int GetHashCode(DiagnosticData obj) { var lineColumn = GetOriginalOrMappedLineColumn(obj); if (obj.DocumentId != null) { return Hash.Combine(obj.Id, Hash.Combine(obj.Message, Hash.Combine(obj.ProjectId, Hash.Combine(obj.DocumentId, Hash.Combine(lineColumn.Item1, Hash.Combine(lineColumn.Item2, (int)obj.Severity)))))); } return Hash.Combine(obj.Id, Hash.Combine(obj.Message, Hash.Combine(obj.ProjectId, Hash.Combine(obj.DataLocation?.OriginalFilePath?.GetHashCode() ?? 0, Hash.Combine(lineColumn.Item1, Hash.Combine(lineColumn.Item2, (int)obj.Severity)))))); } private static ValueTuple<int, int> GetOriginalOrMappedLineColumn(DiagnosticData data) { var workspace = data.Workspace as VisualStudioWorkspaceImpl; if (workspace == null) { return ValueTuple.Create(data.DataLocation?.MappedStartLine ?? 0, data.DataLocation?.MappedStartColumn ?? 0); } if (data.DocumentId == null) { return ValueTuple.Create(data.DataLocation?.MappedStartLine ?? 0, data.DataLocation?.MappedStartColumn ?? 0); } var containedDocument = workspace.GetHostDocument(data.DocumentId) as ContainedDocument; if (containedDocument == null) { return ValueTuple.Create(data.DataLocation?.MappedStartLine ?? 0, data.DataLocation?.MappedStartColumn ?? 0); } return ValueTuple.Create(data.DataLocation?.OriginalStartLine ?? 0, data.DataLocation?.OriginalStartColumn ?? 0); } private bool IsNull<T>(T item) where T : class { return item == null; } } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Asset.V1.Snippets { using Google.Api.Gax; using Google.LongRunning; using Google.Protobuf.WellKnownTypes; using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; /// <summary>Generated snippets.</summary> public sealed class AllGeneratedAssetServiceClientSnippets { /// <summary>Snippet for ExportAssets</summary> public void ExportAssetsRequestObject() { // Snippet: ExportAssets(ExportAssetsRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) ExportAssetsRequest request = new ExportAssetsRequest { ParentAsResourceName = new UnparsedResourceName("a/wildcard/resource"), ReadTime = new Timestamp(), AssetTypes = { "", }, ContentType = ContentType.Unspecified, OutputConfig = new OutputConfig(), RelationshipTypes = { "", }, }; // Make the request Operation<ExportAssetsResponse, ExportAssetsRequest> response = assetServiceClient.ExportAssets(request); // Poll until the returned long-running operation is complete Operation<ExportAssetsResponse, ExportAssetsRequest> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result ExportAssetsResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<ExportAssetsResponse, ExportAssetsRequest> retrievedResponse = assetServiceClient.PollOnceExportAssets(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result ExportAssetsResponse retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for ExportAssetsAsync</summary> public async Task ExportAssetsRequestObjectAsync() { // Snippet: ExportAssetsAsync(ExportAssetsRequest, CallSettings) // Additional: ExportAssetsAsync(ExportAssetsRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) ExportAssetsRequest request = new ExportAssetsRequest { ParentAsResourceName = new UnparsedResourceName("a/wildcard/resource"), ReadTime = new Timestamp(), AssetTypes = { "", }, ContentType = ContentType.Unspecified, OutputConfig = new OutputConfig(), RelationshipTypes = { "", }, }; // Make the request Operation<ExportAssetsResponse, ExportAssetsRequest> response = await assetServiceClient.ExportAssetsAsync(request); // Poll until the returned long-running operation is complete Operation<ExportAssetsResponse, ExportAssetsRequest> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result ExportAssetsResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<ExportAssetsResponse, ExportAssetsRequest> retrievedResponse = await assetServiceClient.PollOnceExportAssetsAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result ExportAssetsResponse retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for ListAssets</summary> public void ListAssetsRequestObject() { // Snippet: ListAssets(ListAssetsRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) ListAssetsRequest request = new ListAssetsRequest { ParentAsResourceName = new UnparsedResourceName("a/wildcard/resource"), ReadTime = new Timestamp(), AssetTypes = { "", }, ContentType = ContentType.Unspecified, RelationshipTypes = { "", }, }; // Make the request PagedEnumerable<ListAssetsResponse, Asset> response = assetServiceClient.ListAssets(request); // Iterate over all response items, lazily performing RPCs as required foreach (Asset item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListAssetsResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Asset item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Asset> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Asset item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAssetsAsync</summary> public async Task ListAssetsRequestObjectAsync() { // Snippet: ListAssetsAsync(ListAssetsRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) ListAssetsRequest request = new ListAssetsRequest { ParentAsResourceName = new UnparsedResourceName("a/wildcard/resource"), ReadTime = new Timestamp(), AssetTypes = { "", }, ContentType = ContentType.Unspecified, RelationshipTypes = { "", }, }; // Make the request PagedAsyncEnumerable<ListAssetsResponse, Asset> response = assetServiceClient.ListAssetsAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((Asset item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListAssetsResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Asset item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Asset> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Asset item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAssets</summary> public void ListAssets() { // Snippet: ListAssets(string, string, int?, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) string parent = "a/wildcard/resource"; // Make the request PagedEnumerable<ListAssetsResponse, Asset> response = assetServiceClient.ListAssets(parent); // Iterate over all response items, lazily performing RPCs as required foreach (Asset item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListAssetsResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Asset item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Asset> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Asset item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAssetsAsync</summary> public async Task ListAssetsAsync() { // Snippet: ListAssetsAsync(string, string, int?, CallSettings) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) string parent = "a/wildcard/resource"; // Make the request PagedAsyncEnumerable<ListAssetsResponse, Asset> response = assetServiceClient.ListAssetsAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((Asset item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListAssetsResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Asset item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Asset> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Asset item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAssets</summary> public void ListAssetsResourceNames() { // Snippet: ListAssets(IResourceName, string, int?, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) IResourceName parent = new UnparsedResourceName("a/wildcard/resource"); // Make the request PagedEnumerable<ListAssetsResponse, Asset> response = assetServiceClient.ListAssets(parent); // Iterate over all response items, lazily performing RPCs as required foreach (Asset item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (ListAssetsResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Asset item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Asset> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Asset item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAssetsAsync</summary> public async Task ListAssetsResourceNamesAsync() { // Snippet: ListAssetsAsync(IResourceName, string, int?, CallSettings) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) IResourceName parent = new UnparsedResourceName("a/wildcard/resource"); // Make the request PagedAsyncEnumerable<ListAssetsResponse, Asset> response = assetServiceClient.ListAssetsAsync(parent); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((Asset item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((ListAssetsResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (Asset item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<Asset> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (Asset item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for BatchGetAssetsHistory</summary> public void BatchGetAssetsHistoryRequestObject() { // Snippet: BatchGetAssetsHistory(BatchGetAssetsHistoryRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) BatchGetAssetsHistoryRequest request = new BatchGetAssetsHistoryRequest { ParentAsResourceName = new UnparsedResourceName("a/wildcard/resource"), AssetNames = { "", }, ContentType = ContentType.Unspecified, ReadTimeWindow = new TimeWindow(), RelationshipTypes = { "", }, }; // Make the request BatchGetAssetsHistoryResponse response = assetServiceClient.BatchGetAssetsHistory(request); // End snippet } /// <summary>Snippet for BatchGetAssetsHistoryAsync</summary> public async Task BatchGetAssetsHistoryRequestObjectAsync() { // Snippet: BatchGetAssetsHistoryAsync(BatchGetAssetsHistoryRequest, CallSettings) // Additional: BatchGetAssetsHistoryAsync(BatchGetAssetsHistoryRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) BatchGetAssetsHistoryRequest request = new BatchGetAssetsHistoryRequest { ParentAsResourceName = new UnparsedResourceName("a/wildcard/resource"), AssetNames = { "", }, ContentType = ContentType.Unspecified, ReadTimeWindow = new TimeWindow(), RelationshipTypes = { "", }, }; // Make the request BatchGetAssetsHistoryResponse response = await assetServiceClient.BatchGetAssetsHistoryAsync(request); // End snippet } /// <summary>Snippet for CreateFeed</summary> public void CreateFeedRequestObject() { // Snippet: CreateFeed(CreateFeedRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) CreateFeedRequest request = new CreateFeedRequest { Parent = "", FeedId = "", Feed = new Feed(), }; // Make the request Feed response = assetServiceClient.CreateFeed(request); // End snippet } /// <summary>Snippet for CreateFeedAsync</summary> public async Task CreateFeedRequestObjectAsync() { // Snippet: CreateFeedAsync(CreateFeedRequest, CallSettings) // Additional: CreateFeedAsync(CreateFeedRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) CreateFeedRequest request = new CreateFeedRequest { Parent = "", FeedId = "", Feed = new Feed(), }; // Make the request Feed response = await assetServiceClient.CreateFeedAsync(request); // End snippet } /// <summary>Snippet for CreateFeed</summary> public void CreateFeed() { // Snippet: CreateFeed(string, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) string parent = ""; // Make the request Feed response = assetServiceClient.CreateFeed(parent); // End snippet } /// <summary>Snippet for CreateFeedAsync</summary> public async Task CreateFeedAsync() { // Snippet: CreateFeedAsync(string, CallSettings) // Additional: CreateFeedAsync(string, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) string parent = ""; // Make the request Feed response = await assetServiceClient.CreateFeedAsync(parent); // End snippet } /// <summary>Snippet for GetFeed</summary> public void GetFeedRequestObject() { // Snippet: GetFeed(GetFeedRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) GetFeedRequest request = new GetFeedRequest { FeedName = FeedName.FromProjectFeed("[PROJECT]", "[FEED]"), }; // Make the request Feed response = assetServiceClient.GetFeed(request); // End snippet } /// <summary>Snippet for GetFeedAsync</summary> public async Task GetFeedRequestObjectAsync() { // Snippet: GetFeedAsync(GetFeedRequest, CallSettings) // Additional: GetFeedAsync(GetFeedRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) GetFeedRequest request = new GetFeedRequest { FeedName = FeedName.FromProjectFeed("[PROJECT]", "[FEED]"), }; // Make the request Feed response = await assetServiceClient.GetFeedAsync(request); // End snippet } /// <summary>Snippet for GetFeed</summary> public void GetFeed() { // Snippet: GetFeed(string, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) string name = "projects/[PROJECT]/feeds/[FEED]"; // Make the request Feed response = assetServiceClient.GetFeed(name); // End snippet } /// <summary>Snippet for GetFeedAsync</summary> public async Task GetFeedAsync() { // Snippet: GetFeedAsync(string, CallSettings) // Additional: GetFeedAsync(string, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) string name = "projects/[PROJECT]/feeds/[FEED]"; // Make the request Feed response = await assetServiceClient.GetFeedAsync(name); // End snippet } /// <summary>Snippet for GetFeed</summary> public void GetFeedResourceNames() { // Snippet: GetFeed(FeedName, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) FeedName name = FeedName.FromProjectFeed("[PROJECT]", "[FEED]"); // Make the request Feed response = assetServiceClient.GetFeed(name); // End snippet } /// <summary>Snippet for GetFeedAsync</summary> public async Task GetFeedResourceNamesAsync() { // Snippet: GetFeedAsync(FeedName, CallSettings) // Additional: GetFeedAsync(FeedName, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) FeedName name = FeedName.FromProjectFeed("[PROJECT]", "[FEED]"); // Make the request Feed response = await assetServiceClient.GetFeedAsync(name); // End snippet } /// <summary>Snippet for ListFeeds</summary> public void ListFeedsRequestObject() { // Snippet: ListFeeds(ListFeedsRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) ListFeedsRequest request = new ListFeedsRequest { Parent = "", }; // Make the request ListFeedsResponse response = assetServiceClient.ListFeeds(request); // End snippet } /// <summary>Snippet for ListFeedsAsync</summary> public async Task ListFeedsRequestObjectAsync() { // Snippet: ListFeedsAsync(ListFeedsRequest, CallSettings) // Additional: ListFeedsAsync(ListFeedsRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) ListFeedsRequest request = new ListFeedsRequest { Parent = "", }; // Make the request ListFeedsResponse response = await assetServiceClient.ListFeedsAsync(request); // End snippet } /// <summary>Snippet for ListFeeds</summary> public void ListFeeds() { // Snippet: ListFeeds(string, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) string parent = ""; // Make the request ListFeedsResponse response = assetServiceClient.ListFeeds(parent); // End snippet } /// <summary>Snippet for ListFeedsAsync</summary> public async Task ListFeedsAsync() { // Snippet: ListFeedsAsync(string, CallSettings) // Additional: ListFeedsAsync(string, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) string parent = ""; // Make the request ListFeedsResponse response = await assetServiceClient.ListFeedsAsync(parent); // End snippet } /// <summary>Snippet for UpdateFeed</summary> public void UpdateFeedRequestObject() { // Snippet: UpdateFeed(UpdateFeedRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) UpdateFeedRequest request = new UpdateFeedRequest { Feed = new Feed(), UpdateMask = new FieldMask(), }; // Make the request Feed response = assetServiceClient.UpdateFeed(request); // End snippet } /// <summary>Snippet for UpdateFeedAsync</summary> public async Task UpdateFeedRequestObjectAsync() { // Snippet: UpdateFeedAsync(UpdateFeedRequest, CallSettings) // Additional: UpdateFeedAsync(UpdateFeedRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) UpdateFeedRequest request = new UpdateFeedRequest { Feed = new Feed(), UpdateMask = new FieldMask(), }; // Make the request Feed response = await assetServiceClient.UpdateFeedAsync(request); // End snippet } /// <summary>Snippet for UpdateFeed</summary> public void UpdateFeed() { // Snippet: UpdateFeed(Feed, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) Feed feed = new Feed(); // Make the request Feed response = assetServiceClient.UpdateFeed(feed); // End snippet } /// <summary>Snippet for UpdateFeedAsync</summary> public async Task UpdateFeedAsync() { // Snippet: UpdateFeedAsync(Feed, CallSettings) // Additional: UpdateFeedAsync(Feed, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) Feed feed = new Feed(); // Make the request Feed response = await assetServiceClient.UpdateFeedAsync(feed); // End snippet } /// <summary>Snippet for DeleteFeed</summary> public void DeleteFeedRequestObject() { // Snippet: DeleteFeed(DeleteFeedRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) DeleteFeedRequest request = new DeleteFeedRequest { FeedName = FeedName.FromProjectFeed("[PROJECT]", "[FEED]"), }; // Make the request assetServiceClient.DeleteFeed(request); // End snippet } /// <summary>Snippet for DeleteFeedAsync</summary> public async Task DeleteFeedRequestObjectAsync() { // Snippet: DeleteFeedAsync(DeleteFeedRequest, CallSettings) // Additional: DeleteFeedAsync(DeleteFeedRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) DeleteFeedRequest request = new DeleteFeedRequest { FeedName = FeedName.FromProjectFeed("[PROJECT]", "[FEED]"), }; // Make the request await assetServiceClient.DeleteFeedAsync(request); // End snippet } /// <summary>Snippet for DeleteFeed</summary> public void DeleteFeed() { // Snippet: DeleteFeed(string, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) string name = "projects/[PROJECT]/feeds/[FEED]"; // Make the request assetServiceClient.DeleteFeed(name); // End snippet } /// <summary>Snippet for DeleteFeedAsync</summary> public async Task DeleteFeedAsync() { // Snippet: DeleteFeedAsync(string, CallSettings) // Additional: DeleteFeedAsync(string, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) string name = "projects/[PROJECT]/feeds/[FEED]"; // Make the request await assetServiceClient.DeleteFeedAsync(name); // End snippet } /// <summary>Snippet for DeleteFeed</summary> public void DeleteFeedResourceNames() { // Snippet: DeleteFeed(FeedName, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) FeedName name = FeedName.FromProjectFeed("[PROJECT]", "[FEED]"); // Make the request assetServiceClient.DeleteFeed(name); // End snippet } /// <summary>Snippet for DeleteFeedAsync</summary> public async Task DeleteFeedResourceNamesAsync() { // Snippet: DeleteFeedAsync(FeedName, CallSettings) // Additional: DeleteFeedAsync(FeedName, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) FeedName name = FeedName.FromProjectFeed("[PROJECT]", "[FEED]"); // Make the request await assetServiceClient.DeleteFeedAsync(name); // End snippet } /// <summary>Snippet for SearchAllResources</summary> public void SearchAllResourcesRequestObject() { // Snippet: SearchAllResources(SearchAllResourcesRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) SearchAllResourcesRequest request = new SearchAllResourcesRequest { Scope = "", Query = "", AssetTypes = { "", }, OrderBy = "", ReadMask = new FieldMask(), }; // Make the request PagedEnumerable<SearchAllResourcesResponse, ResourceSearchResult> response = assetServiceClient.SearchAllResources(request); // Iterate over all response items, lazily performing RPCs as required foreach (ResourceSearchResult item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (SearchAllResourcesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (ResourceSearchResult item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<ResourceSearchResult> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (ResourceSearchResult item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchAllResourcesAsync</summary> public async Task SearchAllResourcesRequestObjectAsync() { // Snippet: SearchAllResourcesAsync(SearchAllResourcesRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) SearchAllResourcesRequest request = new SearchAllResourcesRequest { Scope = "", Query = "", AssetTypes = { "", }, OrderBy = "", ReadMask = new FieldMask(), }; // Make the request PagedAsyncEnumerable<SearchAllResourcesResponse, ResourceSearchResult> response = assetServiceClient.SearchAllResourcesAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((ResourceSearchResult item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((SearchAllResourcesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (ResourceSearchResult item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<ResourceSearchResult> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (ResourceSearchResult item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchAllResources</summary> public void SearchAllResources() { // Snippet: SearchAllResources(string, string, IEnumerable<string>, string, int?, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) string scope = ""; string query = ""; IEnumerable<string> assetTypes = new string[] { "", }; // Make the request PagedEnumerable<SearchAllResourcesResponse, ResourceSearchResult> response = assetServiceClient.SearchAllResources(scope, query, assetTypes); // Iterate over all response items, lazily performing RPCs as required foreach (ResourceSearchResult item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (SearchAllResourcesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (ResourceSearchResult item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<ResourceSearchResult> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (ResourceSearchResult item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchAllResourcesAsync</summary> public async Task SearchAllResourcesAsync() { // Snippet: SearchAllResourcesAsync(string, string, IEnumerable<string>, string, int?, CallSettings) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) string scope = ""; string query = ""; IEnumerable<string> assetTypes = new string[] { "", }; // Make the request PagedAsyncEnumerable<SearchAllResourcesResponse, ResourceSearchResult> response = assetServiceClient.SearchAllResourcesAsync(scope, query, assetTypes); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((ResourceSearchResult item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((SearchAllResourcesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (ResourceSearchResult item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<ResourceSearchResult> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (ResourceSearchResult item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchAllIamPolicies</summary> public void SearchAllIamPoliciesRequestObject() { // Snippet: SearchAllIamPolicies(SearchAllIamPoliciesRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) SearchAllIamPoliciesRequest request = new SearchAllIamPoliciesRequest { Scope = "", Query = "", AssetTypes = { "", }, OrderBy = "", }; // Make the request PagedEnumerable<SearchAllIamPoliciesResponse, IamPolicySearchResult> response = assetServiceClient.SearchAllIamPolicies(request); // Iterate over all response items, lazily performing RPCs as required foreach (IamPolicySearchResult item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (SearchAllIamPoliciesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (IamPolicySearchResult item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<IamPolicySearchResult> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (IamPolicySearchResult item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchAllIamPoliciesAsync</summary> public async Task SearchAllIamPoliciesRequestObjectAsync() { // Snippet: SearchAllIamPoliciesAsync(SearchAllIamPoliciesRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) SearchAllIamPoliciesRequest request = new SearchAllIamPoliciesRequest { Scope = "", Query = "", AssetTypes = { "", }, OrderBy = "", }; // Make the request PagedAsyncEnumerable<SearchAllIamPoliciesResponse, IamPolicySearchResult> response = assetServiceClient.SearchAllIamPoliciesAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((IamPolicySearchResult item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((SearchAllIamPoliciesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (IamPolicySearchResult item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<IamPolicySearchResult> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (IamPolicySearchResult item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchAllIamPolicies</summary> public void SearchAllIamPolicies() { // Snippet: SearchAllIamPolicies(string, string, string, int?, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) string scope = ""; string query = ""; // Make the request PagedEnumerable<SearchAllIamPoliciesResponse, IamPolicySearchResult> response = assetServiceClient.SearchAllIamPolicies(scope, query); // Iterate over all response items, lazily performing RPCs as required foreach (IamPolicySearchResult item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (SearchAllIamPoliciesResponse page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (IamPolicySearchResult item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<IamPolicySearchResult> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (IamPolicySearchResult item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for SearchAllIamPoliciesAsync</summary> public async Task SearchAllIamPoliciesAsync() { // Snippet: SearchAllIamPoliciesAsync(string, string, string, int?, CallSettings) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) string scope = ""; string query = ""; // Make the request PagedAsyncEnumerable<SearchAllIamPoliciesResponse, IamPolicySearchResult> response = assetServiceClient.SearchAllIamPoliciesAsync(scope, query); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((IamPolicySearchResult item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((SearchAllIamPoliciesResponse page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (IamPolicySearchResult item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<IamPolicySearchResult> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (IamPolicySearchResult item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for AnalyzeIamPolicy</summary> public void AnalyzeIamPolicyRequestObject() { // Snippet: AnalyzeIamPolicy(AnalyzeIamPolicyRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) AnalyzeIamPolicyRequest request = new AnalyzeIamPolicyRequest { AnalysisQuery = new IamPolicyAnalysisQuery(), ExecutionTimeout = new Duration(), }; // Make the request AnalyzeIamPolicyResponse response = assetServiceClient.AnalyzeIamPolicy(request); // End snippet } /// <summary>Snippet for AnalyzeIamPolicyAsync</summary> public async Task AnalyzeIamPolicyRequestObjectAsync() { // Snippet: AnalyzeIamPolicyAsync(AnalyzeIamPolicyRequest, CallSettings) // Additional: AnalyzeIamPolicyAsync(AnalyzeIamPolicyRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) AnalyzeIamPolicyRequest request = new AnalyzeIamPolicyRequest { AnalysisQuery = new IamPolicyAnalysisQuery(), ExecutionTimeout = new Duration(), }; // Make the request AnalyzeIamPolicyResponse response = await assetServiceClient.AnalyzeIamPolicyAsync(request); // End snippet } /// <summary>Snippet for AnalyzeIamPolicyLongrunning</summary> public void AnalyzeIamPolicyLongrunningRequestObject() { // Snippet: AnalyzeIamPolicyLongrunning(AnalyzeIamPolicyLongrunningRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) AnalyzeIamPolicyLongrunningRequest request = new AnalyzeIamPolicyLongrunningRequest { AnalysisQuery = new IamPolicyAnalysisQuery(), OutputConfig = new IamPolicyAnalysisOutputConfig(), }; // Make the request Operation<AnalyzeIamPolicyLongrunningResponse, AnalyzeIamPolicyLongrunningMetadata> response = assetServiceClient.AnalyzeIamPolicyLongrunning(request); // Poll until the returned long-running operation is complete Operation<AnalyzeIamPolicyLongrunningResponse, AnalyzeIamPolicyLongrunningMetadata> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result AnalyzeIamPolicyLongrunningResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<AnalyzeIamPolicyLongrunningResponse, AnalyzeIamPolicyLongrunningMetadata> retrievedResponse = assetServiceClient.PollOnceAnalyzeIamPolicyLongrunning(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result AnalyzeIamPolicyLongrunningResponse retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for AnalyzeIamPolicyLongrunningAsync</summary> public async Task AnalyzeIamPolicyLongrunningRequestObjectAsync() { // Snippet: AnalyzeIamPolicyLongrunningAsync(AnalyzeIamPolicyLongrunningRequest, CallSettings) // Additional: AnalyzeIamPolicyLongrunningAsync(AnalyzeIamPolicyLongrunningRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) AnalyzeIamPolicyLongrunningRequest request = new AnalyzeIamPolicyLongrunningRequest { AnalysisQuery = new IamPolicyAnalysisQuery(), OutputConfig = new IamPolicyAnalysisOutputConfig(), }; // Make the request Operation<AnalyzeIamPolicyLongrunningResponse, AnalyzeIamPolicyLongrunningMetadata> response = await assetServiceClient.AnalyzeIamPolicyLongrunningAsync(request); // Poll until the returned long-running operation is complete Operation<AnalyzeIamPolicyLongrunningResponse, AnalyzeIamPolicyLongrunningMetadata> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result AnalyzeIamPolicyLongrunningResponse result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<AnalyzeIamPolicyLongrunningResponse, AnalyzeIamPolicyLongrunningMetadata> retrievedResponse = await assetServiceClient.PollOnceAnalyzeIamPolicyLongrunningAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result AnalyzeIamPolicyLongrunningResponse retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for AnalyzeMove</summary> public void AnalyzeMoveRequestObject() { // Snippet: AnalyzeMove(AnalyzeMoveRequest, CallSettings) // Create client AssetServiceClient assetServiceClient = AssetServiceClient.Create(); // Initialize request argument(s) AnalyzeMoveRequest request = new AnalyzeMoveRequest { Resource = "", DestinationParent = "", View = AnalyzeMoveRequest.Types.AnalysisView.Unspecified, }; // Make the request AnalyzeMoveResponse response = assetServiceClient.AnalyzeMove(request); // End snippet } /// <summary>Snippet for AnalyzeMoveAsync</summary> public async Task AnalyzeMoveRequestObjectAsync() { // Snippet: AnalyzeMoveAsync(AnalyzeMoveRequest, CallSettings) // Additional: AnalyzeMoveAsync(AnalyzeMoveRequest, CancellationToken) // Create client AssetServiceClient assetServiceClient = await AssetServiceClient.CreateAsync(); // Initialize request argument(s) AnalyzeMoveRequest request = new AnalyzeMoveRequest { Resource = "", DestinationParent = "", View = AnalyzeMoveRequest.Types.AnalysisView.Unspecified, }; // Make the request AnalyzeMoveResponse response = await assetServiceClient.AnalyzeMoveAsync(request); // End snippet } } }
namespace RealArtists.ChargeBee.Models { using System; using System.Collections.Generic; using System.ComponentModel; using RealArtists.ChargeBee.Internal; using RealArtists.ChargeBee.Models.Enums; public class InvoiceEstimate : Resource { public bool Recurring { get { return GetValue<bool>("recurring", true); } } public PriceTypeEnum PriceType { get { return GetEnum<PriceTypeEnum>("price_type", true); } } public string CurrencyCode { get { return GetValue<string>("currency_code", true); } } public int SubTotal { get { return GetValue<int>("sub_total", true); } } public int? Total { get { return GetValue<int?>("total", false); } } public int? CreditsApplied { get { return GetValue<int?>("credits_applied", false); } } public int? AmountPaid { get { return GetValue<int?>("amount_paid", false); } } public int? AmountDue { get { return GetValue<int?>("amount_due", false); } } public List<InvoiceEstimateLineItem> LineItems { get { return GetResourceList<InvoiceEstimateLineItem>("line_items"); } } public List<InvoiceEstimateDiscount> Discounts { get { return GetResourceList<InvoiceEstimateDiscount>("discounts"); } } public List<InvoiceEstimateTax> Taxes { get { return GetResourceList<InvoiceEstimateTax>("taxes"); } } public List<InvoiceEstimateLineItemTax> LineItemTaxes { get { return GetResourceList<InvoiceEstimateLineItemTax>("line_ite_taxes"); } } public List<InvoiceEstimateLineItemDiscount> LineItemDiscounts { get { return GetResourceList<InvoiceEstimateLineItemDiscount>("line_item_discounts"); } } public class InvoiceEstimateLineItem : Resource { public enum EntityTypeEnum { Unknown, [Description("plan_setup")] PlanSetup, [Description("plan")] Plan, [Description("addon")] Addon, [Description("adhoc")] Adhoc, } public string Id() { return GetValue<string>("id", false); } public string SubscriptionId() { return GetValue<string>("subscription_id", false); } public DateTime DateFrom() { return (DateTime)GetDateTime("date_from", true); } public DateTime DateTo() { return (DateTime)GetDateTime("date_to", true); } public int UnitAmount() { return GetValue<int>("unit_amount", true); } public int? Quantity() { return GetValue<int?>("quantity", false); } public bool IsTaxed() { return GetValue<bool>("is_taxed", true); } public int? TaxAmount() { return GetValue<int?>("tax_amount", false); } public double? TaxRate() { return GetValue<double?>("tax_rate", false); } public int Amount() { return GetValue<int>("amount", true); } public int? DiscountAmount() { return GetValue<int?>("discount_amount", false); } public int? ItemLevelDiscountAmount() { return GetValue<int?>("ite_level_discount_amount", false); } public string Description() { return GetValue<string>("description", true); } public EntityTypeEnum EntityType() { return GetEnum<EntityTypeEnum>("entity_type", true); } public TaxExemptReasonEnum? TaxExemptReason() { return GetEnum<TaxExemptReasonEnum>("tax_exempt_reason", false); } public string EntityId() { return GetValue<string>("entity_id", false); } } public class InvoiceEstimateDiscount : Resource { public enum EntityTypeEnum { Unknown, [Description("ite_level_coupon")] ItemLevelCoupon, [Description("document_level_coupon")] DocumentLevelCoupon, [Description("promotional_credits")] PromotionalCredits, [Description("prorated_credits")] ProratedCredits, } public int Amount() { return GetValue<int>("amount", true); } public string Description() { return GetValue<string>("description", false); } public EntityTypeEnum EntityType() { return GetEnum<EntityTypeEnum>("entity_type", true); } public string EntityId() { return GetValue<string>("entity_id", false); } } public class InvoiceEstimateTax : Resource { public string Name() { return GetValue<string>("name", true); } public int Amount() { return GetValue<int>("amount", true); } public string Description() { return GetValue<string>("description", false); } } public class InvoiceEstimateLineItemTax : Resource { public string LineItemId() { return GetValue<string>("line_ite_id", false); } public string TaxName() { return GetValue<string>("tax_name", true); } public double TaxRate() { return GetValue<double>("tax_rate", true); } public int TaxAmount() { return GetValue<int>("tax_amount", true); } public TaxJurisTypeEnum? TaxJurisType() { return GetEnum<TaxJurisTypeEnum>("tax_juris_type", false); } public string TaxJurisName() { return GetValue<string>("tax_juris_name", false); } public string TaxJurisCode() { return GetValue<string>("tax_juris_code", false); } } public class InvoiceEstimateLineItemDiscount : Resource { public enum DiscountTypeEnum { Unknown, [Description("item_level_coupon")] ItemLevelCoupon, [Description("document_level_coupon")] DocumentLevelCoupon, [Description("promotional_credits")] PromotionalCredits, [Description("prorated_credits")] ProratedCredits, } public string LineItemId() { return GetValue<string>("line_item_id", true); } public DiscountTypeEnum DiscountType() { return GetEnum<DiscountTypeEnum>("discount_type", true); } public string CouponId() { return GetValue<string>("coupon_id", false); } public int DiscountAmount() { return GetValue<int>("discount_amount", true); } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Orleans.Runtime; namespace Orleans.Streams { [Serializable] internal class StreamSubscriptionHandleImpl<T> : StreamSubscriptionHandle<T>, IStreamSubscriptionHandle { private StreamImpl<T> streamImpl; private readonly IStreamFilterPredicateWrapper filterWrapper; private readonly GuidId subscriptionId; private readonly bool isRewindable; [NonSerialized] private IAsyncObserver<T> observer; [NonSerialized] private IAsyncBatchObserver<T> batchObserver; [NonSerialized] private StreamHandshakeToken expectedToken; internal bool IsValid { get { return streamImpl != null; } } internal GuidId SubscriptionId { get { return subscriptionId; } } internal bool IsRewindable { get { return isRewindable; } } public override string ProviderName { get { return this.streamImpl.ProviderName; } } public override IStreamIdentity StreamIdentity { get { return streamImpl; } } public override Guid HandleId { get { return subscriptionId.Guid; } } public StreamSubscriptionHandleImpl(GuidId subscriptionId, StreamImpl<T> streamImpl) : this(subscriptionId, null, null, streamImpl, null, null) { } public StreamSubscriptionHandleImpl(GuidId subscriptionId, IAsyncObserver<T> observer, IAsyncBatchObserver<T> batchObserver, StreamImpl<T> streamImpl, IStreamFilterPredicateWrapper filterWrapper, StreamSequenceToken token) { this.subscriptionId = subscriptionId ?? throw new ArgumentNullException("subscriptionId"); this.observer = observer; this.batchObserver = batchObserver; this.streamImpl = streamImpl ?? throw new ArgumentNullException("streamImpl"); this.filterWrapper = filterWrapper; this.isRewindable = streamImpl.IsRewindable; if (IsRewindable) { expectedToken = StreamHandshakeToken.CreateStartToken(token); } } public void Invalidate() { this.streamImpl = null; this.observer = null; this.batchObserver = null; } public StreamHandshakeToken GetSequenceToken() { return this.expectedToken; } public override Task UnsubscribeAsync() { if (!IsValid) throw new InvalidOperationException("Handle is no longer valid. It has been used to unsubscribe or resume."); return this.streamImpl.UnsubscribeAsync(this); } public override Task<StreamSubscriptionHandle<T>> ResumeAsync(IAsyncObserver<T> obs, StreamSequenceToken token = null) { if (!IsValid) throw new InvalidOperationException("Handle is no longer valid. It has been used to unsubscribe or resume."); return this.streamImpl.ResumeAsync(this, obs, token); } public override Task<StreamSubscriptionHandle<T>> ResumeAsync(IAsyncBatchObserver<T> observer, StreamSequenceToken token = null) { if (!IsValid) throw new InvalidOperationException("Handle is no longer valid. It has been used to unsubscribe or resume."); return this.streamImpl.ResumeAsync(this, observer, token); } public async Task<StreamHandshakeToken> DeliverBatch(IBatchContainer batch, StreamHandshakeToken handshakeToken) { // we validate expectedToken only for ordered (rewindable) streams if (this.expectedToken != null) { if (!this.expectedToken.Equals(handshakeToken)) return this.expectedToken; } if (batch is IBatchContainerBatch) { var batchContainerBatch = batch as IBatchContainerBatch; await NextBatch(batchContainerBatch); } else { if (this.observer != null) { foreach (var itemTuple in batch.GetEvents<T>()) { await NextItem(itemTuple.Item1, itemTuple.Item2); } } else { await NextItems(batch.GetEvents<T>()); } } if (IsRewindable) { this.expectedToken = StreamHandshakeToken.CreateDeliveyToken(batch.SequenceToken); } return null; } public async Task<StreamHandshakeToken> DeliverItem(object item, StreamSequenceToken currentToken, StreamHandshakeToken handshakeToken) { if (this.expectedToken != null) { if (!this.expectedToken.Equals(handshakeToken)) return this.expectedToken; } await NextItem(item, currentToken); // check again, in case the expectedToken was changed indiretly via ResumeAsync() if (this.expectedToken != null) { if (!this.expectedToken.Equals(handshakeToken)) return this.expectedToken; } if (IsRewindable) { this.expectedToken = StreamHandshakeToken.CreateDeliveyToken(currentToken); } return null; } public async Task NextBatch(IBatchContainerBatch batchContainerBatch) { if (this.observer != null) { foreach (var batchContainer in batchContainerBatch.BatchContainers) { bool isRequestContextSet = batchContainer.ImportRequestContext(); foreach (var itemTuple in batchContainer.GetEvents<T>()) { await NextItem(itemTuple.Item1, itemTuple.Item2); } if (isRequestContextSet) { RequestContext.Clear(); } } } else { await NextItems(batchContainerBatch.BatchContainers.SelectMany(batch => batch.GetEvents<T>())); } } private Task NextItem(object item, StreamSequenceToken token) { T typedItem; try { typedItem = (T)item; } catch (InvalidCastException) { // We got an illegal item on the stream -- close it with a Cast exception throw new InvalidCastException("Received an item of type " + item.GetType().Name + ", expected " + typeof(T).FullName); } // This method could potentially be invoked after Dispose() has been called, // so we have to ignore the request or we risk breaking unit tests AQ_01 - AQ_04. if (this.observer == null || !IsValid) return Task.CompletedTask; if (filterWrapper != null && !filterWrapper.ShouldReceive(streamImpl, filterWrapper.FilterData, typedItem)) return Task.CompletedTask; return this.observer.OnNextAsync(typedItem, token); } private Task NextItems(IEnumerable<Tuple<T, StreamSequenceToken>> items) { // This method could potentially be invoked after Dispose() has been called, // so we have to ignore the request or we risk breaking unit tests AQ_01 - AQ_04. if (this.batchObserver == null || !IsValid) return Task.CompletedTask; IList<SequentialItem<T>> batch = items .Where(item => filterWrapper == null || !filterWrapper.ShouldReceive(streamImpl, filterWrapper.FilterData, item)) .Select(item => new SequentialItem<T>(item.Item1, item.Item2)) .ToList(); return batch.Count != 0 ? this.batchObserver.OnNextAsync(batch) : Task.CompletedTask; } public Task CompleteStream() { return this.observer == null ? Task.CompletedTask : this.observer.OnCompletedAsync(); } public Task ErrorInStream(Exception ex) { return this.observer == null ? Task.CompletedTask : this.observer.OnErrorAsync(ex); } internal bool SameStreamId(StreamId streamId) { return IsValid && streamImpl.StreamId.Equals(streamId); } public override bool Equals(StreamSubscriptionHandle<T> other) { var o = other as StreamSubscriptionHandleImpl<T>; return o != null && SubscriptionId.Equals(o.SubscriptionId); } public override bool Equals(object obj) { return Equals(obj as StreamSubscriptionHandle<T>); } public override int GetHashCode() { return SubscriptionId.GetHashCode(); } public override string ToString() { return String.Format("StreamSubscriptionHandleImpl:Stream={0},HandleId={1}", IsValid ? streamImpl.StreamId.ToString() : "null", HandleId); } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Collections.Generic; using System.Linq; using Hyak.Common; using Microsoft.Azure.Management.SiteRecovery.Models; namespace Microsoft.Azure.Management.SiteRecovery.Models { /// <summary> /// InMageAzureV2 Provider specific entity details. /// </summary> public partial class InMageAzureV2ProviderSpecificSettings : ReplicationProviderSpecificSettings { private string _agentVersion; /// <summary> /// Optional. Agent version. /// </summary> public string AgentVersion { get { return this._agentVersion; } set { this._agentVersion = value; } } private IList<AzureVmDiskDetails> _azureVMDiskDetails; /// <summary> /// Optional. Gets or sets Azure VM Disk details. /// </summary> public IList<AzureVmDiskDetails> AzureVMDiskDetails { get { return this._azureVMDiskDetails; } set { this._azureVMDiskDetails = value; } } private double _compressedDataRateInMB; /// <summary> /// Optional. Compressed data change rate in MB. /// </summary> public double CompressedDataRateInMB { get { return this._compressedDataRateInMB; } set { this._compressedDataRateInMB = value; } } private string _discoveryType; /// <summary> /// Optional. Gets or sets a value inidicating the discovery type of /// the machine.Value can be vCenter or physical. /// </summary> public string DiscoveryType { get { return this._discoveryType; } set { this._discoveryType = value; } } private string _diskResized; /// <summary> /// Optional. Value indicating whether any disk is resized for this VM. /// </summary> public string DiskResized { get { return this._diskResized; } set { this._diskResized = value; } } private string _infrastructureVmId; /// <summary> /// Optional. Infrastructure VM Id. /// </summary> public string InfrastructureVmId { get { return this._infrastructureVmId; } set { this._infrastructureVmId = value; } } private string _ipAddress; /// <summary> /// Optional. Source IP address. /// </summary> public string IpAddress { get { return this._ipAddress; } set { this._ipAddress = value; } } private string _isAgentUpdateRequired; /// <summary> /// Optional. Value indicating whether installed agent needs to be /// updated. /// </summary> public string IsAgentUpdateRequired { get { return this._isAgentUpdateRequired; } set { this._isAgentUpdateRequired = value; } } private string _isRebootAfterUpdateRequired; /// <summary> /// Optional. Value indicating whether the source server requires a /// restart after update. /// </summary> public string IsRebootAfterUpdateRequired { get { return this._isRebootAfterUpdateRequired; } set { this._isRebootAfterUpdateRequired = value; } } private System.DateTime? _lastHeartbeat; /// <summary> /// Optional. Last heartbeat received from the source server. /// </summary> public System.DateTime? LastHeartbeat { get { return this._lastHeartbeat; } set { this._lastHeartbeat = value; } } private string _masterTargetId; /// <summary> /// Optional. Master target Id. /// </summary> public string MasterTargetId { get { return this._masterTargetId; } set { this._masterTargetId = value; } } private string _multiVmGroupId; /// <summary> /// Optional. Multi vm group Id. /// </summary> public string MultiVmGroupId { get { return this._multiVmGroupId; } set { this._multiVmGroupId = value; } } private string _multiVmGroupName; /// <summary> /// Optional. Multi vm group name. /// </summary> public string MultiVmGroupName { get { return this._multiVmGroupName; } set { this._multiVmGroupName = value; } } private string _oSDiskId; /// <summary> /// Optional. Id of the disk containing the OS. /// </summary> public string OSDiskId { get { return this._oSDiskId; } set { this._oSDiskId = value; } } private string _oSType; /// <summary> /// Optional. Type of the OS on the VM. /// </summary> public string OSType { get { return this._oSType; } set { this._oSType = value; } } private string _processServerId; /// <summary> /// Optional. Process server Id. /// </summary> public string ProcessServerId { get { return this._processServerId; } set { this._processServerId = value; } } private IList<InMageAzureV2ProtectedDiskDetails> _protectedDisks; /// <summary> /// Optional. List of protected volumes. /// </summary> public IList<InMageAzureV2ProtectedDiskDetails> ProtectedDisks { get { return this._protectedDisks; } set { this._protectedDisks = value; } } private string _protectionStage; /// <summary> /// Optional. Protection stage. /// </summary> public string ProtectionStage { get { return this._protectionStage; } set { this._protectionStage = value; } } private string _recoveryAzureStorageAccount; /// <summary> /// Optional. Gets or sets the recovery Azure storage account. /// </summary> public string RecoveryAzureStorageAccount { get { return this._recoveryAzureStorageAccount; } set { this._recoveryAzureStorageAccount = value; } } private string _recoveryAzureVMName; /// <summary> /// Optional. Gets or sets Recovery Azure given name. /// </summary> public string RecoveryAzureVMName { get { return this._recoveryAzureVMName; } set { this._recoveryAzureVMName = value; } } private string _recoveryAzureVMSize; /// <summary> /// Optional. Gets or sets the Recovery Azure VM size. /// </summary> public string RecoveryAzureVMSize { get { return this._recoveryAzureVMSize; } set { this._recoveryAzureVMSize = value; } } private int _resyncProgressPercentage; /// <summary> /// Optional. Resync progress percentage. /// </summary> public int ResyncProgressPercentage { get { return this._resyncProgressPercentage; } set { this._resyncProgressPercentage = value; } } private long? _rpoInSeconds; /// <summary> /// Optional. RPO in seconds. /// </summary> public long? RpoInSeconds { get { return this._rpoInSeconds; } set { this._rpoInSeconds = value; } } private string _selectedRecoveryAzureNetworkId; /// <summary> /// Optional. Gets or sets the selected recovery azure network Id. /// </summary> public string SelectedRecoveryAzureNetworkId { get { return this._selectedRecoveryAzureNetworkId; } set { this._selectedRecoveryAzureNetworkId = value; } } private int _sourceVmCPUCount; /// <summary> /// Optional. CPU count of the VM on the primary side. /// </summary> public int SourceVmCPUCount { get { return this._sourceVmCPUCount; } set { this._sourceVmCPUCount = value; } } private int _sourceVmRAMSizeInMB; /// <summary> /// Optional. RAM size of the VM on the primary side. /// </summary> public int SourceVmRAMSizeInMB { get { return this._sourceVmRAMSizeInMB; } set { this._sourceVmRAMSizeInMB = value; } } private double _uncompressedDataRateInMB; /// <summary> /// Optional. Uncompressed data change rate in MB. /// </summary> public double UncompressedDataRateInMB { get { return this._uncompressedDataRateInMB; } set { this._uncompressedDataRateInMB = value; } } private string _vCenterInfrastructureId; /// <summary> /// Optional. vCenter Infrastructure Id. /// </summary> public string VCenterInfrastructureId { get { return this._vCenterInfrastructureId; } set { this._vCenterInfrastructureId = value; } } private string _vHDName; /// <summary> /// Optional. OS disk VHD name. /// </summary> public string VHDName { get { return this._vHDName; } set { this._vHDName = value; } } private string _vmId; /// <summary> /// Optional. Virtual machine Id. /// </summary> public string VmId { get { return this._vmId; } set { this._vmId = value; } } private IList<VMNicDetails> _vMNics; /// <summary> /// Optional. Gets or sets the network details. /// </summary> public IList<VMNicDetails> VMNics { get { return this._vMNics; } set { this._vMNics = value; } } /// <summary> /// Initializes a new instance of the /// InMageAzureV2ProviderSpecificSettings class. /// </summary> public InMageAzureV2ProviderSpecificSettings() { this.AzureVMDiskDetails = new LazyList<AzureVmDiskDetails>(); this.ProtectedDisks = new LazyList<InMageAzureV2ProtectedDiskDetails>(); this.VMNics = new LazyList<VMNicDetails>(); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the MIT license. See License.txt in the project root for license information. using System.Threading.Tasks; using Microsoft.CodeAnalysis.Testing; using Xunit; using VerifyCS = Test.Utilities.CSharpCodeFixVerifier< Microsoft.NetCore.Analyzers.Runtime.TestForNaNCorrectlyAnalyzer, Microsoft.CodeAnalysis.Testing.EmptyCodeFixProvider>; using VerifyVB = Test.Utilities.VisualBasicCodeFixVerifier< Microsoft.NetCore.Analyzers.Runtime.TestForNaNCorrectlyAnalyzer, Microsoft.CodeAnalysis.Testing.EmptyCodeFixProvider>; namespace Microsoft.NetCore.Analyzers.Runtime.UnitTests { public class TestForNaNCorrectlyTests { [Fact] public async Task CSharpDiagnosticForEqualityWithFloatNaNAsync() { var code = @" public class A { public bool Compare(float f) { return f == float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(6, 16)); } [Fact] public async Task BasicDiagnosticForEqualityWithFloatNaNAsync() { var code = @" Public Class A Public Function Compare(f As Single) As Boolean Return f = Single.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(4, 16)); } [Fact] public async Task CSharpDiagnosticForInequalityWithFloatNaNAsync() { var code = @" public class A { public bool Compare(float f) { return f != float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(6, 16)); } [Fact] public async Task BasicDiagnosticForInEqualityWithFloatNaNAsync() { var code = @" Public Class A Public Function Compare(f As Single) As Boolean Return f <> Single.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(4, 16)); } [Fact] public async Task CSharpDiagnosticForGreaterThanFloatNaNAsync() { var code = @" public class A { public bool Compare(float f) { return f > float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(6, 16)); } [Fact] public async Task BasicDiagnosticForGreaterThanFloatNaNAsync() { var code = @" Public Class A Public Function Compare(f As Single) As Boolean Return f > Single.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(4, 16)); } [Fact] public async Task CSharpDiagnosticForGreaterThanOrEqualToFloatNaNAsync() { var code = @" public class A { public bool Compare(float f) { return f >= float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(6, 16)); } [Fact] public async Task BasicDiagnosticForGreaterThanOrEqualToFloatNaNAsync() { var code = @" Public Class A Public Function Compare(f As Single) As Boolean Return f >= Single.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(4, 16)); } [Fact] public async Task CSharpDiagnosticForLessThanFloatNaNAsync() { var code = @" public class A { public bool Compare(float f) { return f < float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(6, 16)); } [Fact] public async Task BasicDiagnosticForLessThanFloatNaNAsync() { var code = @" Public Class A Public Function Compare(f As Single) As Boolean Return f < Single.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(4, 16)); } [Fact] public async Task CSharpDiagnosticForLessThanOrEqualToFloatNaNAsync() { var code = @" public class A { public bool Compare(float f) { return f <= float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(6, 16)); } [Fact] public async Task BasicDiagnosticForLessThanOrEqualToFloatNaNAsync() { var code = @" Public Class A Public Function Compare(f As Single) As Boolean Return f <= Single.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(4, 16)); } [Fact] public async Task CSharpDiagnosticForComparisonWithDoubleNaNAsync() { var code = @" public class A { public bool Compare(double d) { return d == double.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(6, 16)); } [Fact] public async Task BasicDiagnosticForComparisonWithDoubleNaNAsync() { var code = @" Public Class A Public Function Compare(d As Double) As Boolean Return d < Double.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(4, 16)); } [Fact] public async Task CSharpDiagnosticForComparisonWithNaNOnLeftAsync() { var code = @" public class A { public bool Compare(double d) { return double.NaN == d; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(6, 16)); } [Fact] public async Task BasicDiagnosticForComparisonWithNaNOnLeftAsync() { var code = @" Public Class A Public Function Compare(d As Double) As Boolean Return Double.NaN = d End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(4, 16)); } [Fact] public async Task CSharpNoDiagnosticForComparisonWithBadExpressionAsync() { var code = @" public class A { public bool Compare(float f) { return f == float.{|CS0117:NbN|}; // Misspelled. } } "; await VerifyCS.VerifyAnalyzerAsync(code); } [Fact] public async Task BasicNoDiagnosticForComparisonWithBadExpressionAsync() { var code = @" Public Class A Public Function Compare(f As Single) As Boolean Return f = {|BC30456:Single.NbN|} ' Misspelled End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code); } [Fact] public async Task CSharpNoDiagnosticForComparisonWithFunctionReturningNaNAsync() { var code = @" public class A { public bool Compare(float f) { return f == NaNFunc(); } private float NaNFunc() { return float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code); } [Fact] public async Task BasicNoDiagnosticForComparisonWithFunctionReturningNaNAsync() { var code = @" Public Class A Public Function Compare(f As Single) As Boolean Return f = NaNFunc() End Function Private Function NaNFunc() As Single Return Single.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code); } [Fact] public async Task CSharpNoDiagnosticForEqualityWithNonNaNAsync() { var code = @" public class A { public bool Compare(float f) { return f == 1.0; } } "; await VerifyCS.VerifyAnalyzerAsync(code); } [Fact] public async Task BasicNoDiagnosticForEqualityWithNonNaNAsync() { var code = @" Public Class A Public Function Compare(f As Single) As Boolean Return f = 1.0 End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code); } [Fact] public async Task CSharpNoDiagnosticForNonComparisonOperationWithNaNAsync() { var code = @" public class A { public float OperateOn(float f) { return f + float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code); } [Fact] public async Task BasicNoDiagnosticForNonComparisonOperationWithNonNaNAsync() { var code = @" Public Class A Public Function OperateOn(f As Single) As Single Return f + Single.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code); } [Fact] public async Task CSharpOnlyOneDiagnosticForComparisonWithNaNOnBothSidesAsync() { var code = @" public class A { public bool Compare() { return float.NaN == float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(6, 16)); } [Fact] public async Task BasicOnlyOneDiagnosticForComparisonWithNonNaNOnBothSidesAsync() { var code = @" Public Class A Public Function Compare() As Boolean Return Single.NaN = Single.NaN End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(4, 16)); } // At @srivatsn's suggestion, here are a few tests that verify that the operation // tree is correct when the comparison occurs in syntactic constructs other than // a function return value. Of course we can't be exhaustive about this, and these // tests are really more about the correctness of the operation tree -- ensuring // that "binary operator expressions" are present in places we expect them to be -- // than they are about the correctness of our treatment of these expressions once // we find them. [Fact] public async Task CSharpDiagnosticForComparisonWithNaNInFunctionArgumentAsync() { var code = @" public class A { float _n = 42.0F; public void F() { G(_n == float.NaN); } public void G(bool comparison) {} } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(8, 11)); } [Fact] public async Task BasicDiagnosticForComparisonWithNaNInFunctionArgumentAsync() { var code = @" Public Class A Private _n As Single = 42.0F Public Sub F() G(_n = Single.NaN) End Sub Public Sub G(comparison As Boolean) End Sub End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(6, 11)); } [Fact] public async Task CSharpDiagnosticForComparisonWithNaNInTernaryOperatorAsync() { var code = @" public class A { float _n = 42.0F; public int F() { return _n == float.NaN ? 1 : 0; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(8, 16)); } [Fact] public async Task BasicDiagnosticForComparisonWithNaNInIfOperatorAsync() { // VB doesn't have the ternary operator, but we add this test for symmetry. var code = @" Public Class A Private _n As Single = 42.0F Public Function F() As Integer Return If(_n = Single.NaN, 1, 0) End Function End Class "; await VerifyVB.VerifyAnalyzerAsync(code, GetBasicResultAt(6, 19)); } [Fact] public async Task CSharpDiagnosticForComparisonWithNaNInThrowStatementAsync() { var code = @" public class A { float _n = 42.0F; public void F() { throw _n != float.NaN ? new System.Exception() : new System.ArgumentException(); } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(8, 15)); } [Fact] public async Task CSharpDiagnosticForComparisonWithNaNInCatchFilterClauseAsync() { var code = @" using System; public class A { float _n = 42.0F; public void F() { try { } catch (Exception ex) when (_n != float.NaN) { } } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(13, 36)); } [Fact] public async Task CSharpDiagnosticForComparisonWithNaNInYieldReturnStatementAsync() { var code = @" using System.Collections.Generic; public class A { float _n = 42.0F; public IEnumerable<bool> F() { yield return _n != float.NaN; } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(10, 22)); } [Fact] public async Task CSharpDiagnosticForComparisonWithNaNInSwitchStatementAsync() { var code = @" public class A { float _n = 42.0F; public void F() { switch (_n != float.NaN) { default: throw new System.NotImplementedException(); } } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(8, 17)); } [Fact] public async Task CSharpDiagnosticForComparisonWithNaNInForLoopAsync() { var code = @" public class A { float _n = 42.0F; public void F() { for (; _n != float.NaN; ) { throw new System.Exception(); } } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(8, 16)); } [Fact] public async Task CSharpDiagnosticForComparisonWithNaNInWhileLoopAsync() { var code = @" public class A { float _n = 42.0F; public void F() { while (_n != float.NaN) { } } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(8, 16)); } [Fact] public async Task CSharpDiagnosticForComparisonWithNaNInDoWhileLoopAsync() { var code = @" public class A { float _n = 42.0F; public void F() { do { } while (_n != float.NaN); } } "; await VerifyCS.VerifyAnalyzerAsync(code, GetCSharpResultAt(11, 16)); } private static DiagnosticResult GetCSharpResultAt(int line, int column) #pragma warning disable RS0030 // Do not used banned APIs => VerifyCS.Diagnostic() .WithLocation(line, column); #pragma warning restore RS0030 // Do not used banned APIs private static DiagnosticResult GetBasicResultAt(int line, int column) #pragma warning disable RS0030 // Do not used banned APIs => VerifyVB.Diagnostic() .WithLocation(line, column); #pragma warning restore RS0030 // Do not used banned APIs } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. namespace Microsoft.Azure.Management.Monitor.Fluent { using Microsoft.Azure.Management.Monitor.Fluent.MetricAlert.Definition; using Microsoft.Azure.Management.Monitor.Fluent.MetricAlert.Update; using Microsoft.Azure.Management.Monitor.Fluent.Models; using Microsoft.Azure.Management.ResourceManager.Fluent; using Microsoft.Azure.Management.ResourceManager.Fluent.Core; using System; using System.Linq; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; /// <summary> /// Implementation for MetricAlert. /// </summary> ///GENTHASH:Y29tLm1pY3Jvc29mdC5henVyZS5tYW5hZ2VtZW50Lm1vbml0b3IuaW1wbGVtZW50YXRpb24uTWV0cmljQWxlcnRJbXBs internal partial class MetricAlertImpl : GroupableResource< IMetricAlert, MetricAlertResourceInner, MetricAlertImpl, MonitorManager, MetricAlert.Definition.IBlank, MetricAlert.Definition.IWithScopes, MetricAlert.Definition.IWithCreate, MetricAlert.Update.IUpdate>, IMetricAlert, IDefinition, IUpdate, IWithMetricUpdate { private Dictionary<string, Microsoft.Azure.Management.Monitor.Fluent.IMetricAlertCondition> conditions; ///GENMHASH:93FFF181B400DDE81DA77A82752C1C48:F98AC2175A8CD73451AD6F369CD5E05F internal MetricAlertImpl(string name, MetricAlertResourceInner innerModel, MonitorManager monitorManager) : base(name, innerModel, monitorManager) { this.conditions = new Dictionary<string, Microsoft.Azure.Management.Monitor.Fluent.IMetricAlertCondition>(); var crits = (MetricAlertSingleResourceMultipleMetricCriteria)innerModel.Criteria; if (crits != null) { foreach (var crit in crits.AllOf) { this.conditions[crit.Name] = new MetricAlertConditionImpl(crit.Name, crit, this); } } } ///GENMHASH:04C3DADE8E037DF05A82835AF96AF265:D45B28490B7AE6B4CA8461670CCAE4DC internal MetricAlertImpl WithAlertCriteria(MetricAlertConditionImpl criteria) { this.WithoutAlertCriteria(criteria.Name()); this.conditions[criteria.Name()] = criteria; return this; } ///GENMHASH:5AD91481A0966B059A478CD4E9DD9466:5B960FE4701B7B30A98A4F211FA06D5D protected override async Task<Models.MetricAlertResourceInner> GetInnerAsync(CancellationToken cancellationToken = default(CancellationToken)) { return await this.Manager.Inner.MetricAlerts.GetAsync(this.ResourceGroupName, this.Name, cancellationToken); } ///GENMHASH:66DC6CE2AC9BE61B5E666402EB693221:2D08CF98D3B6811D1A7A4C18C0CE3C12 public IReadOnlyCollection<string> ActionGroupIds() { var ids = new List<string>(); if (this.Inner.Actions != null && this.Inner.Actions != null) { foreach (var maag in this.Inner.Actions) { ids.Add(maag.ActionGroupId); } } return ids; } ///GENMHASH:757305684CB38CD78E303A75B6BB60FF:C847040A726E35E60A000ACD7F24E314 public IReadOnlyDictionary<string, Microsoft.Azure.Management.Monitor.Fluent.IMetricAlertCondition> AlertCriterias() { return this.conditions; } ///GENMHASH:BD4E8EEC1F995C84FF18BAE3CCFD22A6:F72671A23D283F9DD9B5C804037ECE33 public bool AutoMitigate() { return (this.Inner.AutoMitigate.HasValue == false) ? false : this.Inner.AutoMitigate.Value; } ///GENMHASH:0202A00A1DCF248D2647DBDBEF2CA865:3D3357BF7A9E06A99BB65E3E9DAF00FD public override async Task<Microsoft.Azure.Management.Monitor.Fluent.IMetricAlert> CreateResourceAsync(CancellationToken cancellationToken = default(CancellationToken)) { this.Inner.Location = "global"; var crit = new MetricAlertSingleResourceMultipleMetricCriteria(); crit.AllOf = new List<MetricCriteria>(); foreach (var mc in conditions.Values) { crit.AllOf.Add(mc.Inner); } this.Inner.Criteria = crit; SetInner(await this.Manager.Inner.MetricAlerts.CreateOrUpdateAsync(this.ResourceGroupName, this.Name, this.Inner, cancellationToken)); return this; } ///GENMHASH:8CCE644095FFB50F9DEE14F363C80774:D71D22182A27EAB88444CD16A8974390 public MetricAlertConditionImpl DefineAlertCriteria(string name) { return new MetricAlertConditionImpl(name, new MetricCriteria(), this); } ///GENMHASH:7B3CA3D467253D93C6FF7587C3C0D0B7:F5293CC540B22E551BB92F6FCE17DE2C public string Description() { return this.Inner.Description; } ///GENMHASH:1703877FCECC33D73EA04EEEF89045EF:EB71563FB99F270D0827FDCDA083A584 public bool Enabled() { return this.Inner.Enabled; } ///GENMHASH:6B9F8E34E59C56A0ADE05FF4B71FFF16:3A883853EF6DBDD2909F1D82D52F6295 public TimeSpan EvaluationFrequency() { return this.Inner.EvaluationFrequency; } ///GENMHASH:DF5C039E76E3291E606FA7B30E6A35B8:63328FCE78D88A10DDBE141D8DF86DAB public DateTime? LastUpdatedTime() { return this.Inner.LastUpdatedTime; } ///GENMHASH:C457EEA978B7A6C6C56D90DDF5271FFB:82059B9BE2545D9387D9EA1B5A801869 public IReadOnlyCollection<string> Scopes() { return this.Inner.Scopes.ToList(); } ///GENMHASH:ADCA390FA193949D8BA48D8804FB138B:D820AD5904970E73EAE6FDD91C9395A4 public int Severity() { return this.Inner.Severity; } ///GENMHASH:A61C25AD4B6930EB03CA48C25CDEF795:79090E4718A09FDF5299FE081DD6B337 public MetricAlertConditionImpl UpdateAlertCriteria(string name) { return (MetricAlertConditionImpl)this.conditions[name]; } ///GENMHASH:AE926B5FF5A4B01D584D38C07E21A243:15DB234CEC0D38C1E33EB2ECEB2CC038 public TimeSpan WindowSize() { return this.Inner.WindowSize; } ///GENMHASH:8251517CD3DB23FD0217AD932D86E975:89FE971323C03077A05F6DBB399CC7F8 public MetricAlertImpl WithActionGroups(params string[] actionGroupId) { if (this.Inner.Actions == null) { this.Inner.Actions = new List<MetricAlertAction>(); } this.Inner.Actions.Clear(); foreach (var agid in actionGroupId) { var maa = new MetricAlertAction(); maa.ActionGroupId = agid; this.Inner.Actions.Add(maa); } return this; } ///GENMHASH:3E72FBE95EB9F0D5CB0EE25FB0D4289B:D28CEE20EE587F7BE2C58D660CBB76F2 public MetricAlertImpl WithAlertDetails(int severity, string description) { this.WithSeverity(severity); return this.WithDescription(description); } ///GENMHASH:B1FAD9ED00B5928448AB0AA933758335:5640B4C7C912ABC98D9779381D53E6DC public MetricAlertImpl WithAutoMitigation() { this.Inner.AutoMitigate = true; return this; } ///GENMHASH:016764F09D1966D691B5DE3A7FD47AC9:5D67BF1D9DA1008F878F13C112FF5F35 public MetricAlertImpl WithDescription(string description) { this.Inner.Description = description; return this; } ///GENMHASH:CEDDCCEB2476E58338BF2FA01220048D:CFB14B1FCC87FA2BAE66C1739882B0E0 public MetricAlertImpl WithFrequency(TimeSpan frequency) { this.Inner.EvaluationFrequency = frequency; return this; } ///GENMHASH:ED05B641BBACDA0FE20CB8084C06E215:7AEA88F41785879622403706A8BF6B9A public MetricAlertImpl WithoutActionGroup(string actionGroupId) { if (this.Inner.Actions != null) { var toDelete = new List<MetricAlertAction>(); foreach (var maa in this.Inner.Actions) { if (maa.ActionGroupId.Equals(actionGroupId, StringComparison.OrdinalIgnoreCase)) { toDelete.Add(maa); } } foreach (var maa in toDelete) { this.Inner.Actions.Remove(maa); } } return this; } ///GENMHASH:E4FEC8C316C1129E5FA8F1D228445F51:4B6FC8F18AB8BEA63867486135BF38C3 public MetricAlertImpl WithoutAlertCriteria(string name) { if (this.conditions.ContainsKey(name)) { this.conditions.Remove(name); } return this; } ///GENMHASH:1350F1101BA21E04B29D498C2E0AA500:C7B841429CEB812AD0A4C96DFAFF636B public MetricAlertImpl WithoutAutoMitigation() { this.Inner.AutoMitigate = false; return this; } ///GENMHASH:252AAE75064297D555927CEDAE99C9D4:3E52FB242763B2F8A4587CF4CE43F118 public MetricAlertImpl WithPeriod(TimeSpan size) { this.Inner.WindowSize = size; return this; } ///GENMHASH:19D591A5811CC295B77719A40CEB3F64:9A4882A827B87B926799484B506DA9A3 public MetricAlertImpl WithRuleDisabled() { this.Inner.Enabled = false; return this; } ///GENMHASH:1952D7AE67830F92010B1423D9533A88:B605F0C6D20484DEA14055C58519B8C8 public MetricAlertImpl WithRuleEnabled() { this.Inner.Enabled = true; return this; } ///GENMHASH:7ED8FFB8E1E8A478D0B971D4B84FAE92:3182F67E8B2D04AAB4A46329B8E3F9E8 public MetricAlertImpl WithSeverity(int severity) { this.Inner.Severity = severity; return this; } ///GENMHASH:21C5E913CC99F20E7CFF02057B43ED9D:252983E9D051F9EAAC0EB5276C560315 public MetricAlertImpl WithTargetResource(string resourceId) { this.Inner.Scopes = new List<string>(); this.Inner.Scopes.Add(resourceId); return this; } ///GENMHASH:FF34A220CBD022BF5822C4584DEEE94E:A6098866C47E7A7E582B09209AD5C53E public MetricAlertImpl WithTargetResource(IHasId resource) { return this.WithTargetResource(resource.Id); return this; } } }
/******************************************************************************************** Copyright (c) Microsoft Corporation All rights reserved. Microsoft Public License: This license governs use of the accompanying software. If you use the software, you accept this license. If you do not accept the license, do not use the software. 1. Definitions The terms "reproduce," "reproduction," "derivative works," and "distribution" have the same meaning here as under U.S. copyright law. A "contribution" is the original software, or any additions or changes to the software. A "contributor" is any person that distributes its contribution under this license. "Licensed patents" are a contributor's patent claims that read directly on its contribution. 2. Grant of Rights (A) Copyright Grant- Subject to the terms of this license, including the license conditions and limitations in section 3, each contributor grants you a non-exclusive, worldwide, royalty-free copyright license to reproduce its contribution, prepare derivative works of its contribution, and distribute its contribution or any derivative works that you create. (B) Patent Grant- Subject to the terms of this license, including the license conditions and limitations in section 3, each contributor grants you a non-exclusive, worldwide, royalty-free license under its licensed patents to make, have made, use, sell, offer for sale, import, and/or otherwise dispose of its contribution in the software or derivative works of the contribution in the software. 3. Conditions and Limitations (A) No Trademark License- This license does not grant you rights to use any contributors' name, logo, or trademarks. (B) If you bring a patent claim against any contributor over patents that you claim are infringed by the software, your patent license from such contributor to the software ends automatically. (C) If you distribute any portion of the software, you must retain all copyright, patent, trademark, and attribution notices that are present in the software. (D) If you distribute any portion of the software in source code form, you may do so only under this license by including a complete copy of this license with your distribution. If you distribute any portion of the software in compiled or object code form, you may only do so under a license that complies with this license. (E) The software is licensed "as-is." You bear the risk of using it. The contributors give no express warranties, guarantees or conditions. You may have additional consumer rights under your local laws which this license cannot change. To the extent permitted under your local laws, the contributors exclude the implied warranties of merchantability, fitness for a particular purpose and non-infringement. ********************************************************************************************/ using System; using System.IO; using System.Reflection; using Microsoft.VisualStudio; using Microsoft.VisualStudio.Shell.Interop; using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VsSDK.UnitTestLibrary; using MSBuild = Microsoft.Build.BuildEngine; namespace Microsoft.VisualStudio.Project.Samples.NestedProject.UnitTests { /// <summary> ///This is a test class for VisualStudio.Project.Samples.NestedProject.OANestedProject and is intended ///to contain all VisualStudio.Project.Samples.NestedProject.OANestedProject Unit Tests. ///</summary> [TestClass()] public class AutomationTests : BaseTest { #region Fields private OANestedProject nestedProject; private OANestedProjectProperties projectProperties; #endregion Fields #region Tests Initialization && Cleanup [ClassInitialize] public static void TestClassInitialize(TestContext context) { fullPathToClassTemplateFile = Path.Combine(context.TestDeploymentDir, fullPathToClassTemplateFile); fullPathToProjectFile = Path.Combine(context.TestDeploymentDir, fullPathToProjectFile); fullPathToTargetFile = Path.Combine(context.TestDeploymentDir, fullPathToTargetFile); } /// <summary> /// Runs before the test to allocate and configure resources needed /// by all tests in the test class. /// </summary> [TestInitialize()] public override void Initialize() { base.Initialize(); //init the automation objects nestedProject = new OANestedProject(projectNode); projectProperties = (OANestedProjectProperties)nestedProject.Properties; } #endregion #region The tests for the OANestedProject && OANestedProjectProperties classes #region Constructors tests /// <summary> /// The test for OANestedProject default constructor. ///</summary> [TestMethod()] public void ConstructorTest() { Assert.IsNotNull(nestedProject, "OANestedProject instance was uninitialized."); Assert.IsNotNull(nestedProject.Project, "OANestedProject Project property was uninitialized."); } #endregion Constructors tests #region Properties tests /// <summary> /// The test for Properties property. ///</summary> [TestMethod()] public void PropertiesTest() { Assert.IsNotNull(nestedProject.Properties, "Node Properties was uninitialized."); Assert.IsTrue((nestedProject.Properties is OANestedProjectProperties), "Returned Node Properties was initialized by unexpected type value."); } #endregion Properties tests #endregion The tests for the OANestedProject && OANestedProjectProperties classes #region The tests for the OANestedProjectProperty class #region Constructors tests /// <summary> /// The test for OANestedProjectProperty explicit default constructor. ///</summary> [TestMethod()] public void DefaultConstructorTest() { OANestedProjectProperty target = new OANestedProjectProperty(); Assert.IsNotNull(target, "The OANestedProjectProperty instance was not created successfully."); } /// <summary> /// The test for OANestedProjectProperty internal constructor. ///</summary> [TestMethod()] public void InternalConstructorTest() { Assert.IsNotNull(nestedProject.Properties, "Node Properties was uninitialized."); Assert.IsTrue((nestedProject.Properties is OANestedProjectProperties), "Returned Node Properties was initialized by unexpected type value."); string name = "Some random name"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); } #endregion Constructors tests #region Properties tests /// <summary> /// The test for the Application property. /// </summary> /// <remarks>This property marked as "Microsoft Internal Use Only" and returns null.</remarks> [TestMethod()] public void ApplicationPropertyTest() { string name = "Some random name"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); Assert.IsNull(testProperty.Application, "Application property was returned as initialized value."); } /// <summary> /// The test for the Parent property. /// </summary> [TestMethod()] public void ParentPropertyTest() { string name = "Some random name"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); Assert.AreEqual(projectProperties, testProperty.Parent, "ProjectProperty Parent was initialized by unexpected value."); } /// <summary> /// The test for the Collection property. /// </summary> [TestMethod()] public void CollectionPropertyTest() { string name = "Some random name"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); Assert.AreEqual(projectProperties, testProperty.Collection, "ProjectProperty Collection was initialized by unexpected value."); } /// <summary> /// The test for the Parent DTE property. /// </summary> [TestMethod()] public void ParentDTEPropertyTest() { string name = "Some random name"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); Assert.AreEqual(projectProperties.DTE, testProperty.DTE, "ProjectProperty Parent.DTE was initialized by unexpected value."); } /// <summary> /// The test for the Name property. /// </summary> [TestMethod()] public void NamePropertyTest() { string name = "Some random name"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); Assert.AreEqual(name, testProperty.Name, "ProjectProperty Name was initialized by unexpected value."); } /// <summary> /// The test for the get_IndexValue() method. /// </summary> /// <remarks>Probably method get_IndexValue() is uncompleted.</remarks> [TestMethod()] public void get_IndexedValueTest() { string name = "Some Random Name"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); object actualValue = testProperty.get_IndexedValue(null, null, null, null); Assert.IsNull(actualValue, "Method get_IndexValue was returned unexpected value."); } /// <summary> /// The test for the set_IndexValue() method. /// </summary> /// <remarks>Probably method set_IndexValue() is uncompleted.</remarks> [TestMethod()] public void set_IndexedValueTest() { string name = "Some Random Name"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); // simply call this method testProperty.set_IndexedValue(null, null, null, null, null); } /// <summary> /// This method tests Object and dependent on Value properties. /// </summary> [TestMethod()] public void ObjectAndValuePropertiesTest() { string name = "SomeRandomName"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); testProperty.Object = name; Assert.AreEqual((object)name, testProperty.Object, "ProjectProperty Object was initialized by unexpected value."); Assert.AreEqual((object)name, testProperty.Value, "ProjectProperty Value was initialized by unexpected value."); Assert.IsTrue(nestedProject.IsDirty, "After property changing IsDirty flag was not set to the false"); } /// <summary> /// The test for the Value property in scenario when assigned to integer value. /// </summary> [TestMethod()] public void ValueAsIntegerPropertyTest() { string name = "SomeRandomName"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); int expectedValue = 77777; testProperty.Value = expectedValue; Assert.AreEqual(expectedValue.ToString(), testProperty.Value, "ProjectProperty Value was initialized by unexpected value."); Assert.IsTrue(nestedProject.IsDirty, "After property changing IsDirty flag was not set to the false"); } /// <summary> /// The test for the Value property in scenario when assigned to null referenced value. /// </summary> [TestMethod()] [ExpectedException(typeof(ArgumentNullException))] public void ValueAsNullPropertyTest() { string name = "SomeRandomName"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); testProperty.Value = null; } /// <summary> /// The test for the let_Value method. /// </summary> [TestMethod()] public void let_ValueMethodTest() { string name = "SomeRandomName"; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); int expectedValue = 77777; testProperty.let_Value(expectedValue); Assert.AreEqual(expectedValue.ToString(), testProperty.Value, "ProjectProperty Value was initialized by unexpected value."); Assert.IsTrue(nestedProject.IsDirty, "After property changing IsDirty flag was not set to the false"); } /// <summary> /// The test method for the NumIndices property. /// </summary> /// <remarks>This property always returns zero value.</remarks> [TestMethod()] public void NumIndicesPropertyTest() { string name = "Some Random Name"; short expectedValue = 0; OANestedProjectProperty testProperty = VisualStudio_Project_Samples_OANestedProjectPropertyAccessor.CreatePrivate(projectProperties, name); Assert.IsNotNull(testProperty, "The OANestedProjectProperty instance was not created successfully."); Assert.AreEqual(expectedValue, testProperty.NumIndices, "Property NumIndices was returned unexpected value."); } #endregion Properties tests #endregion The tests for the OANestedProjectProperty class } }
//! \file FileSystem.cs //! \date Fri Jun 05 15:32:27 2015 //! \brief Gameres file system abstraction. // // Copyright (C) 2015 by morkt // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text.RegularExpressions; using GameRes.Strings; namespace GameRes { public interface IFileSystem : IDisposable { /// <summary> /// Returns entry corresponding to the given file or directory within filesystem. /// </summary> /// <exception cref="FileNotFoundException">File is not found.</exception> Entry FindFile (string filename); /// <summary> /// System.IO.File.Exists() analog. /// </summary> bool FileExists (string filename); /// <summary> /// Open file for reading as stream. /// </summary> Stream OpenStream (Entry entry); Stream OpenSeekableStream (Entry entry); /// <summary> /// Open file for reading as memory-mapped view. /// </summary> ArcView OpenView (Entry entry); /// <summary> /// Enumerates subdirectories and files in current directory. /// </summary> IEnumerable<Entry> GetFiles (); /// <summary> /// System.IO.Path.Combine() analog. /// </summary> string CombinePath (string path1, string path2); /// <summary> /// Recursively enumerates files in the current directory and its subdirectories. /// Subdirectory entries are omitted from resulting set. /// </summary> IEnumerable<Entry> GetFilesRecursive (); string CurrentDirectory { get; set; } } public class SubDirEntry : Entry { public override string Type { get { return "directory"; } } public SubDirEntry (string name) { Name = name; Size = 0; } } public sealed class PhysicalFileSystem : IFileSystem { public string CurrentDirectory { get { return Directory.GetCurrentDirectory(); } set { Directory.SetCurrentDirectory (value); } } public string CombinePath (string path1, string path2) { return Path.Combine (path1, path2); } public Entry FindFile (string filename) { var attr = File.GetAttributes (filename); if ((attr & FileAttributes.Directory) == FileAttributes.Directory) return new SubDirEntry (filename); else return EntryFromFileInfo (new FileInfo (filename)); } public bool FileExists (string filename) { return File.Exists (filename); } public IEnumerable<Entry> GetFiles () { var info = new DirectoryInfo (CurrentDirectory); foreach (var subdir in info.EnumerateDirectories()) { if (0 != (subdir.Attributes & (FileAttributes.Hidden | FileAttributes.System))) continue; yield return new SubDirEntry (subdir.FullName); } foreach (var file in info.EnumerateFiles()) { if (0 != (file.Attributes & FileAttributes.System)) continue; yield return EntryFromFileInfo (file); } } public IEnumerable<Entry> GetFilesRecursive () { var info = new DirectoryInfo (CurrentDirectory); foreach (var file in info.EnumerateFiles ("*", SearchOption.AllDirectories)) { if (0 != (file.Attributes & (FileAttributes.Hidden | FileAttributes.System))) continue; yield return EntryFromFileInfo (file); } } private Entry EntryFromFileInfo (FileInfo file) { var entry = FormatCatalog.Instance.Create<Entry> (file.FullName); entry.Size = (uint)Math.Min (file.Length, uint.MaxValue); return entry; } public Stream OpenStream (Entry entry) { return File.OpenRead (entry.Name); } public Stream OpenSeekableStream (Entry entry) { return OpenStream (entry); } public ArcView OpenView (Entry entry) { return new ArcView (entry.Name); } public void Dispose () { GC.SuppressFinalize (this); } } public abstract class ArchiveFileSystem : IFileSystem { protected readonly ArcFile m_arc; protected readonly Dictionary<string, Entry> m_dir; public ArcFile Source { get { return m_arc; } } public abstract string CurrentDirectory { get; set; } public ArchiveFileSystem (ArcFile arc) { m_arc = arc; m_dir = new Dictionary<string, Entry> (arc.Dir.Count, StringComparer.InvariantCultureIgnoreCase); foreach (var entry in arc.Dir) { if (!m_dir.ContainsKey (entry.Name)) m_dir.Add (entry.Name, entry); } } public bool FileExists (string filename) { return m_dir.ContainsKey (filename); } public Stream OpenStream (Entry entry) { return m_arc.OpenEntry (entry); } public Stream OpenSeekableStream (Entry entry) { return m_arc.OpenSeekableEntry (entry); } public ArcView OpenView (Entry entry) { return m_arc.OpenView (entry); } public abstract Entry FindFile (string filename); public abstract IEnumerable<Entry> GetFiles (); public abstract IEnumerable<Entry> GetFilesRecursive (); public abstract string CombinePath (string path1, string path2); #region IDisposable Members bool _arc_disposed = false; public void Dispose () { Dispose (true); GC.SuppressFinalize (this); } protected virtual void Dispose (bool disposing) { if (!_arc_disposed) { if (disposing) { m_arc.Dispose(); } _arc_disposed = true; } } #endregion } public class FlatArchiveFileSystem : ArchiveFileSystem { public override string CurrentDirectory { get { return ""; } set { if (string.IsNullOrEmpty (value)) return; if (".." == value || "." == value) return; if ("\\" == value || "/" == value) return; throw new DirectoryNotFoundException(); } } public FlatArchiveFileSystem (ArcFile arc) : base (arc) { } public override Entry FindFile (string filename) { Entry entry = null; if (!m_dir.TryGetValue (filename, out entry)) throw new FileNotFoundException(); return entry; } public override IEnumerable<Entry> GetFiles () { return m_arc.Dir; } public override IEnumerable<Entry> GetFilesRecursive () { return m_arc.Dir; } public override string CombinePath (string path1, string path2) { return Path.Combine (path1, path2); } } public class TreeArchiveFileSystem : ArchiveFileSystem { private string m_cwd; private string PathDelimiter { get; set; } private static readonly char[] m_path_delimiters = { '/', '\\' }; public TreeArchiveFileSystem (ArcFile arc) : base (arc) { m_cwd = ""; PathDelimiter = "/"; } public override string CurrentDirectory { get { return m_cwd; } set { ChDir (value); } } public override string CombinePath (string path1, string path2) { if (0 == path1.Length) return path2; if (0 == path2.Length) return path1; if (path1.EndsWith (PathDelimiter)) return path1+path2; return string.Join (PathDelimiter, path1, path2); } public override Entry FindFile (string filename) { Entry entry = null; if (m_dir.TryGetValue (filename, out entry)) return entry; if (m_dir.Keys.Any (n => n.StartsWith (filename + PathDelimiter))) return new SubDirEntry (filename); throw new FileNotFoundException(); } static readonly Regex path_re = new Regex (@"\G[/\\]?([^/\\]+)([/\\])"); public override IEnumerable<Entry> GetFiles () { IEnumerable<Entry> dir = GetFilesRecursive(); var root_dir = m_cwd; if (!string.IsNullOrEmpty (root_dir)) root_dir += PathDelimiter; var subdirs = new HashSet<string>(); foreach (var entry in dir) { var match = path_re.Match (entry.Name, root_dir.Length); if (match.Success) { string name = match.Groups[1].Value; if (subdirs.Add (name)) { PathDelimiter = match.Groups[2].Value; yield return new SubDirEntry (root_dir+name); } } else { yield return entry; } } } public override IEnumerable<Entry> GetFilesRecursive () { if (0 == m_cwd.Length) return m_arc.Dir; else return from file in m_arc.Dir where file.Name.StartsWith (m_cwd + PathDelimiter) select file; } public IEnumerable<Entry> GetFilesRecursive (IEnumerable<Entry> list) { var result = new List<Entry>(); foreach (var entry in list) { if (!(entry is SubDirEntry)) // add ordinary file result.Add (entry); else if (".." == entry.Name) // skip reference to parent directory continue; else // add all files contained within directory, recursive result.AddRange (from file in m_arc.Dir where file.Name.StartsWith (entry.Name+PathDelimiter) select file); } return result; } private void ChDir (string path) { if (string.IsNullOrEmpty (path)) { m_cwd = ""; return; } var cur_dir = new List<string>(); if (-1 != Array.IndexOf (m_path_delimiters, path[0])) { path = path.TrimStart (m_path_delimiters); } else if (".." == path && !string.IsNullOrEmpty (m_cwd)) { cur_dir.AddRange (m_cwd.Split (m_path_delimiters)); } var path_list = path.Split (m_path_delimiters); foreach (var dir in path_list) { if ("." == dir) { continue; } else if (".." == dir) { if (0 == cur_dir.Count) continue; cur_dir.RemoveAt (cur_dir.Count-1); } else { cur_dir.Add (dir); } } string new_path = string.Join (PathDelimiter, cur_dir); if (0 != new_path.Length) { var entry = m_arc.Dir.FirstOrDefault (e => e.Name.StartsWith (new_path + PathDelimiter)); if (null == entry) throw new DirectoryNotFoundException(); } m_cwd = new_path; } } public sealed class FileSystemStack : IDisposable { Stack<IFileSystem> m_fs_stack = new Stack<IFileSystem>(); Stack<string> m_arc_name_stack = new Stack<string>(); public IEnumerable<IFileSystem> All { get { return m_fs_stack; } } public IFileSystem Top { get { return m_fs_stack.Peek(); } } public int Count { get { return m_fs_stack.Count; } } public IEnumerable<string> ArcStack { get { return m_arc_name_stack; } } public ArcFile CurrentArchive { get; private set; } private IFileSystem LastVisitedArc { get; set; } private string LastVisitedPath { get; set; } public FileSystemStack () { m_fs_stack.Push (new PhysicalFileSystem()); } public void ChDir (Entry entry) { if (entry is SubDirEntry) { if (Count > 1 && ".." == entry.Name && string.IsNullOrEmpty (Top.CurrentDirectory)) { Pop(); if (!string.IsNullOrEmpty (LastVisitedPath)) { Top.CurrentDirectory = Path.GetDirectoryName (LastVisitedPath); } } else { Top.CurrentDirectory = entry.Name; } return; } if (entry.Name == LastVisitedPath && null != LastVisitedArc) { Push (LastVisitedPath, LastVisitedArc); var fs = LastVisitedArc as ArchiveFileSystem; if (null != fs) CurrentArchive = fs.Source; return; } Flush(); var arc = ArcFile.TryOpen (entry.Name); if (null == arc) { if (FormatCatalog.Instance.LastError is OperationCanceledException) throw FormatCatalog.Instance.LastError; else throw new UnknownFormatException (FormatCatalog.Instance.LastError); } try { Push (entry.Name, arc.CreateFileSystem()); CurrentArchive = arc; } catch { arc.Dispose(); throw; } } private void Push (string path, IFileSystem fs) { m_fs_stack.Push (fs); m_arc_name_stack.Push (path); } internal void Pop () { if (m_fs_stack.Count > 1) { Flush(); LastVisitedArc = m_fs_stack.Pop(); LastVisitedPath = m_arc_name_stack.Pop(); if (m_fs_stack.Count > 1 && m_fs_stack.Peek() is ArchiveFileSystem) CurrentArchive = (m_fs_stack.Peek() as ArchiveFileSystem).Source; else CurrentArchive = null; } } public void Flush () { if (LastVisitedArc != null && (0 == Count || LastVisitedArc != Top)) { LastVisitedArc.Dispose(); LastVisitedArc = null; LastVisitedPath = null; } } private bool _disposed = false; public void Dispose () { if (!_disposed) { Flush(); foreach (var fs in m_fs_stack.Reverse()) fs.Dispose(); _disposed = true; } GC.SuppressFinalize (this); } } public static class VFS { private static FileSystemStack m_vfs = new FileSystemStack(); /// <summary> /// Top, or "current" filesystem in VFS hierarchy. /// </summary> public static IFileSystem Top { get { return m_vfs.Top; } } /// <summary> /// Whether top filesystem is virtual (i.e. represents an archive). /// </summary> public static bool IsVirtual { get { return m_vfs.Count > 1; } } /// <summary> /// Number of filesystems in hierarchy. ==1 when only physical file system is represented. /// Always >= 1 /// </summary> public static int Count { get { return m_vfs.Count; } } /// <summary> /// Archive corresponding to the top filesystem, or null if file system doesn't have underlying /// archive file. /// </summary> public static ArcFile CurrentArchive { get { return m_vfs.CurrentArchive; } } private static string[] m_top_path = new string[1]; public static IEnumerable<string> FullPath { get { m_top_path[0] = Top.CurrentDirectory; if (1 == Count) return m_top_path; else return m_vfs.ArcStack.Reverse().Concat (m_top_path); } set { if (!value.Any()) return; var desired = value.ToArray(); int desired_vfs_count = desired.Length; var arc_iterator = m_vfs.ArcStack.Reverse().GetEnumerator(); int i = 0; while (i < desired_vfs_count - 1 && arc_iterator.MoveNext()) { if (arc_iterator.Current != desired[i]) break; ++i; } while (Count > i+1) m_vfs.Pop(); while (Count < desired_vfs_count) { var entry = m_vfs.Top.FindFile (desired[Count-1]); if (entry is SubDirEntry) throw new FileNotFoundException(); m_vfs.ChDir (entry); } m_vfs.Top.CurrentDirectory = desired.Last(); } } public static string CombinePath (string path1, string path2) { return m_vfs.Top.CombinePath (path1, path2); } public static Entry FindFile (string filename) { if (".." == filename) return new SubDirEntry (".."); return m_vfs.Top.FindFile (filename); } public static bool FileExists (string filename) { return m_vfs.Top.FileExists (filename); } public static Stream OpenStream (Entry entry) { return m_vfs.Top.OpenStream (entry); } public static Stream OpenSeekableStream (Entry entry) { return m_vfs.Top.OpenSeekableStream (entry); } public static ArcView OpenView (Entry entry) { return m_vfs.Top.OpenView (entry); } public static Stream OpenStream (string filename) { return m_vfs.Top.OpenStream (m_vfs.Top.FindFile (filename)); } public static Stream OpenSeekableStream (string filename) { return m_vfs.Top.OpenSeekableStream (m_vfs.Top.FindFile (filename)); } public static ArcView OpenView (string filename) { return m_vfs.Top.OpenView (m_vfs.Top.FindFile (filename)); } public static void ChDir (Entry entry) { m_vfs.ChDir (entry); } public static void ChDir (string path) { m_vfs.ChDir (FindFile (path)); } public static void Flush () { m_vfs.Flush(); } public static IEnumerable<Entry> GetFiles () { return m_vfs.Top.GetFiles(); } } public class UnknownFormatException : FileFormatException { public UnknownFormatException () : base (garStrings.MsgUnknownFormat) { } public UnknownFormatException (Exception inner) : base (garStrings.MsgUnknownFormat, inner) { } } }
//--------------------------------------------------------------------------- // // <copyright file=KeyBinding.cs company=Microsoft> // Copyright (C) Microsoft Corporation. All rights reserved. // </copyright> // // // Description: The KeyBinding class is used by the developer to create Keyboard Input Bindings // // See spec at : http://avalon/coreui/Specs/Commanding(new).mht // //* KeyBinding class serves the purpose of Input Bindings for Keyboard Device. // // History: // 06/01/2003 : chandras - Created // 05/01/2004 : chandra - changed to accommodate new design // ( http://avalon/coreui/Specs/Commanding(new).mht ) //--------------------------------------------------------------------------- using System; using System.Windows.Input; using System.Windows; using System.ComponentModel; using System.Windows.Markup; using SR=MS.Internal.PresentationCore.SR; using SRID=MS.Internal.PresentationCore.SRID; namespace System.Windows.Input { /// <summary> /// KeyBinding - Implements InputBinding (generic InputGesture-Command map) /// KeyBinding acts like a map for KeyGesture and Commands. /// Most of the logic is in InputBinding and KeyGesture, this only /// facilitates user to add Key/Modifiers directly without going in /// KeyGesture path. Also it provides the KeyGestureTypeConverter /// on the Gesture property to have KeyGesture, like Ctrl+X, Alt+V /// defined in Markup as Gesture="Ctrl+X" working /// </summary> public class KeyBinding : InputBinding { //------------------------------------------------------ // // Constructors // //------------------------------------------------------ #region Constructor /// <summary> /// Constructor /// </summary> public KeyBinding() : base() { } /// <summary> /// Constructor /// </summary> /// <param name="command">Command associated</param> /// <param name="gesture">KeyGesture associated</param> public KeyBinding(ICommand command, KeyGesture gesture) : base(command, gesture) { SynchronizePropertiesFromGesture(gesture); } /// <summary> /// Constructor /// </summary> /// <param name="command"></param> /// <param name="modifiers">modifiers</param> /// <param name="key">key</param> public KeyBinding(ICommand command, Key key, ModifierKeys modifiers) : this(command, new KeyGesture(key, modifiers)) { } #endregion Constructor //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ #region Public Methods /// <summary> /// KeyGesture Override, to ensure type-safety and provide a /// TypeConverter for KeyGesture /// </summary> [TypeConverter(typeof(KeyGestureConverter))] [ValueSerializer(typeof(KeyGestureValueSerializer))] public override InputGesture Gesture { get { return base.Gesture as KeyGesture; } set { KeyGesture keyGesture = value as KeyGesture; if (keyGesture != null) { base.Gesture = value; SynchronizePropertiesFromGesture(keyGesture); } else { throw new ArgumentException(SR.Get(SRID.InputBinding_ExpectedInputGesture, typeof(KeyGesture))); } } } /// <summary> /// Dependency Property for Modifiers /// </summary> public static readonly DependencyProperty ModifiersProperty = DependencyProperty.Register("Modifiers", typeof(ModifierKeys), typeof(KeyBinding), new UIPropertyMetadata(ModifierKeys.None, new PropertyChangedCallback(OnModifiersPropertyChanged))); /// <summary> /// Modifiers /// </summary> public ModifierKeys Modifiers { get { return (ModifierKeys)GetValue(ModifiersProperty); } set { SetValue(ModifiersProperty, value); } } private static void OnModifiersPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { KeyBinding keyBinding = (KeyBinding)d; keyBinding.SynchronizeGestureFromProperties(keyBinding.Key, (ModifierKeys)(e.NewValue)); } /// <summary> /// Dependency Property for Key /// </summary> public static readonly DependencyProperty KeyProperty = DependencyProperty.Register("Key", typeof(Key), typeof(KeyBinding), new UIPropertyMetadata(Key.None, new PropertyChangedCallback(OnKeyPropertyChanged))); /// <summary> /// Key /// </summary> public Key Key { get { return (Key)GetValue(KeyProperty); } set { SetValue(KeyProperty, value); } } private static void OnKeyPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { KeyBinding keyBinding = (KeyBinding)d; keyBinding.SynchronizeGestureFromProperties((Key)(e.NewValue), keyBinding.Modifiers); } #endregion Public Methods #region Freezable protected override Freezable CreateInstanceCore() { return new KeyBinding(); } #endregion #region Private Methods /// <summary> /// Synchronized Properties from Gesture /// </summary> private void SynchronizePropertiesFromGesture(KeyGesture keyGesture) { if (!_settingGesture) { _settingGesture = true; try { Key = keyGesture.Key; Modifiers = keyGesture.Modifiers; } finally { _settingGesture = false; } } } /// <summary> /// Synchronized Gesture from properties /// </summary> private void SynchronizeGestureFromProperties(Key key, ModifierKeys modifiers) { if (!_settingGesture) { _settingGesture = true; try { Gesture = new KeyGesture(key, modifiers, /*validateGesture = */ false); } finally { _settingGesture = false; } } } #endregion //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ #region Data private bool _settingGesture = false; #endregion } }
// // Encog(tm) Core v3.2 - .Net Version // http://www.heatonresearch.com/encog/ // // Copyright 2008-2014 Heaton Research, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // For more information on Heaton Research copyrights, licenses // and trademarks visit: // http://www.heatonresearch.com/copyright // using System.Collections.Generic; using System.Linq; using Encog.MathUtil; using Encog.ML.Bayesian.Query.Enumeration; using Encog.ML.Data; namespace Encog.ML.Bayesian.Training.Search.k2 { /// <summary> /// Search for optimal Bayes structure with K2. /// </summary> public class SearchK2 : IBayesSearch { /// <summary> /// The node ordering. /// </summary> private readonly IList<BayesianEvent> _nodeOrdering = new List<BayesianEvent>(); /// <summary> /// The data to use. /// </summary> private IMLDataSet _data; /// <summary> /// The current index. /// </summary> private int _index = -1; /// <summary> /// The last calculated value for p. /// </summary> private double _lastCalculatedP; /// <summary> /// The network to optimize. /// </summary> private BayesianNetwork _network; /// <summary> /// The trainer being used. /// </summary> private TrainBayesian _train; #region IBayesSearch Members /// <inheritdoc/> public void Init(TrainBayesian theTrainer, BayesianNetwork theNetwork, IMLDataSet theData) { _network = theNetwork; _data = theData; _train = theTrainer; OrderNodes(); _index = -1; } /// <inheritdoc/> public bool Iteration() { if (_index == -1) { OrderNodes(); } else { BayesianEvent e = _nodeOrdering[_index]; double oldP = CalculateG(_network, e, e.Parents); while (e.Parents.Count < _train.MaximumParents) { BayesianEvent z = FindZ(e, _index, oldP); if (z != null) { _network.CreateDependency(z, e); oldP = _lastCalculatedP; } else { break; } } } _index++; return (_index < _data.InputSize); } #endregion /// <summary> /// Basically the goal here is to get the classification target, if it exists, /// to go first. This will greatly enhance K2's effectiveness. /// </summary> private void OrderNodes() { _nodeOrdering.Clear(); // is there a classification target? if (_network.ClassificationTarget != -1) { _nodeOrdering.Add(_network.ClassificationTargetEvent); } // now add the others foreach (BayesianEvent e in _network.Events) { if (!_nodeOrdering.Contains(e)) { _nodeOrdering.Add(e); } } } /// <summary> /// Find the value for z. /// </summary> /// <param name="e">The event that we are clauclating for.</param> /// <param name="n">The value for n.</param> /// <param name="old">The old value.</param> /// <returns>The new value for z.</returns> private BayesianEvent FindZ(BayesianEvent e, int n, double old) { BayesianEvent result = null; double maxChildP = double.NegativeInfinity; //System.out.println("Finding parent for: " + event.toString()); for (int i = 0; i < n; i++) { BayesianEvent trialParent = _nodeOrdering[i]; IList<BayesianEvent> parents = new List<BayesianEvent>(); parents.CopyTo(e.Parents.ToArray(), 0); parents.Add(trialParent); //System.out.println("Calculating adding " + trialParent.toString() + " to " + event.toString()); _lastCalculatedP = CalculateG(_network, e, parents); //System.out.println("lastP:" + this.lastCalculatedP); //System.out.println("old:" + old); if (_lastCalculatedP > old && _lastCalculatedP > maxChildP) { result = trialParent; maxChildP = _lastCalculatedP; //System.out.println("Current best is: " + result.toString()); } } _lastCalculatedP = maxChildP; return result; } /// <summary> /// Calculate the value N, which is the number of cases, from the training data, where the /// desiredValue matches the training data. Only cases where the parents match the specifed /// parent instance are considered. /// </summary> /// <param name="network">The network to calculate for.</param> /// <param name="e">The event we are calculating for. (variable i)</param> /// <param name="parents">The parents of the specified event we are considering.</param> /// <param name="parentInstance">The parent instance we are looking for.</param> /// <param name="desiredValue">The desired value.</param> /// <returns>The value N. </returns> public int CalculateN(BayesianNetwork network, BayesianEvent e, IList<BayesianEvent> parents, int[] parentInstance, int desiredValue) { int result = 0; int eventIndex = network.GetEventIndex(e); foreach (IMLDataPair pair in _data) { int[] d = _network.DetermineClasses(pair.Input); if (d[eventIndex] == desiredValue) { bool reject = false; for (int i = 0; i < parentInstance.Length; i++) { BayesianEvent parentEvent = parents[i]; int parentIndex = network.GetEventIndex(parentEvent); if (parentInstance[i] != d[parentIndex]) { reject = true; break; } } if (!reject) { result++; } } } return result; } /// <summary> /// Calculate the value N, which is the number of cases, from the training data, where the /// desiredValue matches the training data. Only cases where the parents match the specifed /// parent instance are considered. /// </summary> /// <param name="network">The network to calculate for.</param> /// <param name="e">The event we are calculating for. (variable i)</param> /// <param name="parents">The parents of the specified event we are considering.</param> /// <param name="parentInstance">The parent instance we are looking for.</param> /// <returns>The value N. </returns> public int CalculateN(BayesianNetwork network, BayesianEvent e, IList<BayesianEvent> parents, int[] parentInstance) { int result = 0; foreach (IMLDataPair pair in _data) { int[] d = _network.DetermineClasses(pair.Input); bool reject = false; for (int i = 0; i < parentInstance.Length; i++) { BayesianEvent parentEvent = parents[i]; int parentIndex = network.GetEventIndex(parentEvent); if (parentInstance[i] != (d[parentIndex])) { reject = true; break; } } if (!reject) { result++; } } return result; } /// <summary> /// Calculate G. /// </summary> /// <param name="network">The network to calculate for.</param> /// <param name="e">The event to calculate for.</param> /// <param name="parents">The parents.</param> /// <returns>The value for G.</returns> public double CalculateG(BayesianNetwork network, BayesianEvent e, IList<BayesianEvent> parents) { double result = 1.0; int r = e.Choices.Count; var args = new int[parents.Count]; do { double n = EncogMath.Factorial(r - 1); double d = EncogMath.Factorial(CalculateN(network, e, parents, args) + r - 1); double p1 = n/d; double p2 = 1; for (int k = 0; k < e.Choices.Count; k++) { p2 *= EncogMath.Factorial(CalculateN(network, e, parents, args, k)); } result *= p1*p2; } while (EnumerationQuery.Roll(parents, args)); return result; } } }
// Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Bot.Connector { using System; using System.Collections; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Microsoft.Rest; /// <summary> /// Extension methods for Conversations. /// </summary> public static partial class ConversationsExtensions { /// <summary> /// CreateConversation /// </summary> /// Create a new Conversation. /// /// POST to this method with a /// * Bot being the bot creating the conversation /// * IsGroup set to true if this is not a direct message (default is false) /// * Members array contining the members you want to have be in the /// conversation. /// /// The return value is a ResourceResponse which contains a conversation id /// which is suitable for use /// in the message payload and REST API uris. /// /// Most channels only support the semantics of bots initiating a direct /// message conversation. An example of how to do that would be: /// /// ``` /// var resource = await connector.conversations.CreateConversation(new /// ConversationParameters(){ Bot = bot, members = new ChannelAccount[] { new /// ChannelAccount("user1") } ); /// await connect.Conversations.SendToConversationAsync(resource.Id, new /// Activity() ... ) ; /// /// ``` /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='parameters'> /// Parameters to create the conversation from /// </param> public static ResourceResponse CreateConversation(this IConversations operations, ConversationParameters parameters) { return Task.Factory.StartNew(s => ((IConversations)s).CreateConversationAsync(parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// CreateConversation /// </summary> /// Create a new Conversation. /// /// POST to this method with a /// * Bot being the bot creating the conversation /// * IsGroup set to true if this is not a direct message (default is false) /// * Members array contining the members you want to have be in the /// conversation. /// /// The return value is a ResourceResponse which contains a conversation id /// which is suitable for use /// in the message payload and REST API uris. /// /// Most channels only support the semantics of bots initiating a direct /// message conversation. An example of how to do that would be: /// /// ``` /// var resource = await connector.conversations.CreateConversation(new /// ConversationParameters(){ Bot = bot, members = new ChannelAccount[] { new /// ChannelAccount("user1") } ); /// await connect.Conversations.SendToConversationAsync(resource.Id, new /// Activity() ... ) ; /// /// ``` /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='parameters'> /// Parameters to create the conversation from /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<ResourceResponse> CreateConversationAsync(this IConversations operations, ConversationParameters parameters, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.CreateConversationWithHttpMessagesAsync(parameters, null, cancellationToken).ConfigureAwait(false)) { return await _result.HandleErrorAsync<ResourceResponse>().ConfigureAwait(false); ; } } /// <summary> /// SendToConversation /// </summary> /// This method allows you to send an activity to a conversation regardless of /// previous posts to a conversation. /// /// This is slightly different then ReplyToConversation(). /// * SendToConverstion(conversationId) - will simply append a message to the /// end of the conversation according to the timestamp or semantics of the /// channel /// * ReplyToConversation(conversationId,ActivityId) - models the semantics of /// threaded conversations, meaning it has the information necessary for the /// channel to reply to the actual message being responded to. /// /// SendToConversation is appropriate for the first message which initiates a /// conversation, or if you don't have a particular activity you are /// responding to. /// /// ReplyToConversation is preferable to SendToConversation() because it /// maintains threaded conversations. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='activity'> /// Activity to send /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> public static APIResponse SendToConversation(this IConversations operations, Activity activity, string conversationId) { return Task.Factory.StartNew(s => ((IConversations)s).SendToConversationAsync(activity, conversationId), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// SendToConversation /// </summary> /// This method allows you to send an activity to a conversation regardless of /// previous posts to a conversation. /// /// This is slightly different then ReplyToConversation(). /// * SendToConverstion(conversationId) - will simply append a message to the /// end of the conversation according to the timestamp or semantics of the /// channel /// * ReplyToConversation(conversationId,ActivityId) - models the semantics of /// threaded conversations, meaning it has the information necessary for the /// channel to reply to the actual message being responded to. /// /// SendToConversation is appropriate for the first message which initiates a /// conversation, or if you don't have a particular activity you are /// responding to. /// /// ReplyToConversation is preferable to SendToConversation() because it /// maintains threaded conversations. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='activity'> /// Activity to send /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<APIResponse> SendToConversationAsync(this IConversations operations, Activity activity, string conversationId, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.SendToConversationWithHttpMessagesAsync(activity, conversationId, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// ReplyToActivity /// </summary> /// This method allows you to reply to an activity. /// /// This is slightly different then SendToConversation(). /// * SendToConverstion(conversationId) - will simply append a message to the /// end of the conversation according to the timestamp or semantics of the /// channel /// * ReplyToConversation(conversationId,ActivityId) - models the semantics of /// threaded conversations, meaning it has the information necessary for the /// channel to reply to the actual message being responded to. /// /// ReplyToConversation is almost always preferable to SendToConversation() /// because it maintains threaded conversations. /// /// SendToConversation is appropriate for the first message which initiates a /// conversation, or if you don't have a particular activity you are /// responding to. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> /// <param name='activityId'> /// activityId the reply is to (OPTIONAL) /// </param> /// <param name='activity'> /// Activity to send /// </param> public static APIResponse ReplyToActivity(this IConversations operations, string conversationId, string activityId, Activity activity) { return Task.Factory.StartNew(s => ((IConversations)s).ReplyToActivityAsync(conversationId, activityId, activity), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// ReplyToActivity /// </summary> /// This method allows you to reply to an activity. /// /// This is slightly different then SendToConversation(). /// * SendToConverstion(conversationId) - will simply append a message to the /// end of the conversation according to the timestamp or semantics of the /// channel /// * ReplyToConversation(conversationId,ActivityId) - models the semantics of /// threaded conversations, meaning it has the information necessary for the /// channel to reply to the actual message being responded to. /// /// ReplyToConversation is almost always preferable to SendToConversation() /// because it maintains threaded conversations. /// /// SendToConversation is appropriate for the first message which initiates a /// conversation, or if you don't have a particular activity you are /// responding to. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> /// <param name='activityId'> /// activityId the reply is to (OPTIONAL) /// </param> /// <param name='activity'> /// Activity to send /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<APIResponse> ReplyToActivityAsync(this IConversations operations, string conversationId, string activityId, Activity activity, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.ReplyToActivityWithHttpMessagesAsync(conversationId, activityId, activity, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// GetConversationMembers /// </summary> /// Call this method to enumerate the members of a converstion. /// /// This REST API takes a ConversationId and returns an array of /// ChannelAccount[] objects /// which are the members of the conversation. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> public static ChannelAccount[] GetConversationMembers(this IConversations operations, string conversationId) { return Task.Factory.StartNew(s => ((IConversations)s).GetConversationMembersAsync(conversationId), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// GetConversationMembers /// </summary> /// Call this method to enumerate the members of a converstion. /// /// This REST API takes a ConversationId and returns an array of /// ChannelAccount[] objects /// which are the members of the conversation. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<ChannelAccount[]> GetConversationMembersAsync(this IConversations operations, string conversationId, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetConversationMembersWithHttpMessagesAsync(conversationId, null, cancellationToken).ConfigureAwait(false)) { return await _result.HandleErrorAsync<ChannelAccount[]>().ConfigureAwait(false); } } /// <summary> /// GetActivityMembers /// </summary> /// Call this method to enumerate the members of an activity. /// /// This REST API takes a ConversationId and a ActivityId, returning an array /// of ChannelAccount[] objects /// which are the members of the particular activity in the conversation. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> /// <param name='activityId'> /// Activity ID /// </param> public static ChannelAccount[] GetActivityMembers(this IConversations operations, string conversationId, string activityId) { return Task.Factory.StartNew(s => ((IConversations)s).GetActivityMembersAsync(conversationId, activityId), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// GetActivityMembers /// </summary> /// Call this method to enumerate the members of an activity. /// /// This REST API takes a ConversationId and a ActivityId, returning an array /// of ChannelAccount[] objects /// which are the members of the particular activity in the conversation. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> /// <param name='activityId'> /// Activity ID /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<ChannelAccount[]> GetActivityMembersAsync(this IConversations operations, string conversationId, string activityId, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetActivityMembersWithHttpMessagesAsync(conversationId, activityId, null, cancellationToken).ConfigureAwait(false)) { return await _result.HandleErrorAsync<ChannelAccount[]>().ConfigureAwait(false); } } /// <summary> /// UploadAttachment /// </summary> /// This method allows you to upload an attachment directly into a channels /// blob storage. /// /// This is useful because it allows you to store data in a compliant store /// when dealing with enterprises. /// /// The response is a ResourceResponse which contains an AttachmentId which is /// suitable for using with the attachments api. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> /// <param name='attachmentUpload'> /// Attachment data /// </param> public static ResourceResponse UploadAttachment(this IConversations operations, string conversationId, AttachmentData attachmentUpload) { return Task.Factory.StartNew(s => ((IConversations)s).UploadAttachmentAsync(conversationId, attachmentUpload), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult(); } /// <summary> /// UploadAttachment /// </summary> /// This method allows you to upload an attachment directly into a channels /// blob storage. /// /// This is useful because it allows you to store data in a compliant store /// when dealing with enterprises. /// /// The response is a ResourceResponse which contains an AttachmentId which is /// suitable for using with the attachments api. /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='conversationId'> /// Conversation ID /// </param> /// <param name='attachmentUpload'> /// Attachment data /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<ResourceResponse> UploadAttachmentAsync(this IConversations operations, string conversationId, AttachmentData attachmentUpload, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.UploadAttachmentWithHttpMessagesAsync(conversationId, attachmentUpload, null, cancellationToken).ConfigureAwait(false)) { return await _result.HandleErrorAsync<ResourceResponse>().ConfigureAwait(false); } } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using Lucene.Net.Index; using Lucene.Net.Support; using Lucene.Net.Util; using Version = Lucene.Net.Util.Version; namespace Lucene.Net.Analysis.Query { /* * An {@link Analyzer} used primarily at query time to wrap another analyzer and provide a layer of protection * which prevents very common words from being passed into queries. * <p> * For very large indexes the cost * of reading TermDocs for a very common word can be high. This analyzer was created after experience with * a 38 million doc index which had a term in around 50% of docs and was causing TermQueries for * this term to take 2 seconds. * </p> * <p> * Use the various "addStopWords" methods in this class to automate the identification and addition of * stop words found in an already existing index. * </p> */ public class QueryAutoStopWordAnalyzer : Analyzer { Analyzer _delegate; HashMap<String,ISet<String>> stopWordsPerField = new HashMap<String,ISet<String>>(); //The default maximum percentage (40%) of index documents which //can contain a term, after which the term is considered to be a stop word. public const float defaultMaxDocFreqPercent = 0.4f; private readonly Version matchVersion; /* * Initializes this analyzer with the Analyzer object that actually produces the tokens * * @param _delegate The choice of {@link Analyzer} that is used to produce the token stream which needs filtering */ public QueryAutoStopWordAnalyzer(Version matchVersion, Analyzer _delegate) { this._delegate = _delegate; SetOverridesTokenStreamMethod<QueryAutoStopWordAnalyzer>(); this.matchVersion = matchVersion; } /* * Automatically adds stop words for all fields with terms exceeding the defaultMaxDocFreqPercent * * @param reader The {@link IndexReader} which will be consulted to identify potential stop words that * exceed the required document frequency * @return The number of stop words identified. * @throws IOException */ public int AddStopWords(IndexReader reader) { return AddStopWords(reader, defaultMaxDocFreqPercent); } /* * Automatically adds stop words for all fields with terms exceeding the maxDocFreqPercent * * @param reader The {@link IndexReader} which will be consulted to identify potential stop words that * exceed the required document frequency * @param maxDocFreq The maximum number of index documents which can contain a term, after which * the term is considered to be a stop word * @return The number of stop words identified. * @throws IOException */ public int AddStopWords(IndexReader reader, int maxDocFreq) { int numStopWords = 0; ICollection<String> fieldNames = reader.GetFieldNames(IndexReader.FieldOption.INDEXED); for (IEnumerator<String> iter = fieldNames.GetEnumerator(); iter.MoveNext();) { String fieldName = iter.Current; numStopWords += AddStopWords(reader, fieldName, maxDocFreq); } return numStopWords; } /* * Automatically adds stop words for all fields with terms exceeding the maxDocFreqPercent * * @param reader The {@link IndexReader} which will be consulted to identify potential stop words that * exceed the required document frequency * @param maxPercentDocs The maximum percentage (between 0.0 and 1.0) of index documents which * contain a term, after which the word is considered to be a stop word. * @return The number of stop words identified. * @throws IOException */ public int AddStopWords(IndexReader reader, float maxPercentDocs) { int numStopWords = 0; ICollection<String> fieldNames = reader.GetFieldNames(IndexReader.FieldOption.INDEXED); for (IEnumerator<String> iter = fieldNames.GetEnumerator(); iter.MoveNext();) { String fieldName = iter.Current; numStopWords += AddStopWords(reader, fieldName, maxPercentDocs); } return numStopWords; } /* * Automatically adds stop words for the given field with terms exceeding the maxPercentDocs * * @param reader The {@link IndexReader} which will be consulted to identify potential stop words that * exceed the required document frequency * @param fieldName The field for which stopwords will be added * @param maxPercentDocs The maximum percentage (between 0.0 and 1.0) of index documents which * contain a term, after which the word is considered to be a stop word. * @return The number of stop words identified. * @throws IOException */ public int AddStopWords(IndexReader reader, String fieldName, float maxPercentDocs) { return AddStopWords(reader, fieldName, (int) (reader.NumDocs() * maxPercentDocs)); } /* * Automatically adds stop words for the given field with terms exceeding the maxPercentDocs * * @param reader The {@link IndexReader} which will be consulted to identify potential stop words that * exceed the required document frequency * @param fieldName The field for which stopwords will be added * @param maxDocFreq The maximum number of index documents which * can contain a term, after which the term is considered to be a stop word. * @return The number of stop words identified. * @throws IOException */ public int AddStopWords(IndexReader reader, String fieldName, int maxDocFreq) { var stopWords = Support.Compatibility.SetFactory.CreateHashSet<string>(); String internedFieldName = StringHelper.Intern(fieldName); TermEnum te = reader.Terms(new Term(fieldName)); Term term = te.Term; while (term != null) { if (term.Field != internedFieldName) { break; } if (te.DocFreq() > maxDocFreq) { stopWords.Add(term.Text); } if (!te.Next()) { break; } term = te.Term; } stopWordsPerField.Add(fieldName, stopWords); /* if the stopwords for a field are changed, * then saved streams for that field are erased. */ IDictionary<String,SavedStreams> streamMap = (IDictionary<String,SavedStreams>) PreviousTokenStream; if (streamMap != null) streamMap.Remove(fieldName); return stopWords.Count; } public override TokenStream TokenStream(String fieldName, TextReader reader) { TokenStream result; try { result = _delegate.ReusableTokenStream(fieldName, reader); } catch (IOException) { result = _delegate.TokenStream(fieldName, reader); } var stopWords = stopWordsPerField[fieldName]; if (stopWords != null) { result = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion), result, stopWords); } return result; } private class SavedStreams { /* the underlying stream */ protected internal TokenStream Wrapped; /* * when there are no stopwords for the field, refers to wrapped. * if there stopwords, it is a StopFilter around wrapped. */ protected internal TokenStream WithStopFilter; }; public override TokenStream ReusableTokenStream(String fieldName, TextReader reader) { if (overridesTokenStreamMethod) { // LUCENE-1678: force fallback to tokenStream() if we // have been subclassed and that subclass overrides // tokenStream but not reusableTokenStream return TokenStream(fieldName, reader); } /* map of SavedStreams for each field */ IDictionary<String, SavedStreams> streamMap = (IDictionary<String, SavedStreams>)PreviousTokenStream; if (streamMap == null) { streamMap = new HashMap<String, SavedStreams>(); PreviousTokenStream = streamMap; } SavedStreams streams = streamMap[fieldName]; if (streams == null) { /* an entry for this field does not exist, create one */ streams = new SavedStreams(); streamMap.Add(fieldName, streams); streams.Wrapped = _delegate.ReusableTokenStream(fieldName, reader); /* if there are any stopwords for the field, save the stopfilter */ var stopWords = stopWordsPerField[fieldName]; if (stopWords != null) streams.WithStopFilter = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion), streams.Wrapped, stopWords); else streams.WithStopFilter = streams.Wrapped; } else { /* * an entry for this field exists, verify the wrapped stream has not * changed. if it has not, reuse it, otherwise wrap the new stream. */ TokenStream result = _delegate.ReusableTokenStream(fieldName, reader); if (result == streams.Wrapped) { /* the wrapped analyzer reused the stream */ streams.WithStopFilter.Reset(); } else { /* * the wrapped analyzer did not. if there are any stopwords for the * field, create a new StopFilter around the new stream */ streams.Wrapped = result; var stopWords = stopWordsPerField[fieldName]; if (stopWords != null) streams.WithStopFilter = new StopFilter(StopFilter.GetEnablePositionIncrementsVersionDefault(matchVersion), streams.Wrapped, stopWords); else streams.WithStopFilter = streams.Wrapped; } } return streams.WithStopFilter; } /* * Provides information on which stop words have been identified for a field * * @param fieldName The field for which stop words identified in "addStopWords" * method calls will be returned * @return the stop words identified for a field */ public String[] GetStopWords(String fieldName) { String[] result; var stopWords = stopWordsPerField[fieldName]; if (stopWords != null) { result = stopWords.ToArray(); } else { result = new String[0]; } return result; } /* * Provides information on which stop words have been identified for all fields * * @return the stop words (as terms) */ public Term[] GetStopWords() { List<Term> allStopWords = new List<Term>(); foreach(var fieldName in stopWordsPerField.Keys) { var stopWords = stopWordsPerField[fieldName]; foreach(var text in stopWords) { allStopWords.Add(new Term(fieldName, text)); } } return allStopWords.ToArray(); } } }
/* * QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals. * Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ //------------------------------------------------------------------------------ // This code was generated by a tool. // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. //------------------------------------------------------------------------------ // To get up to date fundamental definition files for your hedgefund contact sales@quantconnect.com using System; using System.IO; using Newtonsoft.Json; namespace QuantConnect.Data.Fundamental { /// <summary> /// Definition of the EarningReports class /// </summary> public class EarningReports { /// <summary> /// The exact date that is given in the financial statements for each quarter's end. /// </summary> /// <remarks> /// Morningstar DataId: 20001 /// </remarks> [JsonProperty("20001")] public DateTime PeriodEndingDate { get; set; } /// <summary> /// Specific date on which a company released its filing to the public. /// </summary> /// <remarks> /// Morningstar DataId: 20002 /// </remarks> [JsonProperty("20002")] public DateTime FileDate { get; set; } /// <summary> /// The accession number is a unique number that EDGAR assigns to each submission as the submission is received. /// </summary> /// <remarks> /// Morningstar DataId: 20003 /// </remarks> [JsonProperty("20003")] public string AccessionNumber { get; set; } /// <summary> /// The type of filing of the report: for instance, 10-K (annual report) or 10-Q (quarterly report). /// </summary> /// <remarks> /// Morningstar DataId: 20004 /// </remarks> [JsonProperty("20004")] public string FormType { get; set; } /// <summary> /// Basic EPS from Continuing Operations is the earnings from continuing operations reported by the company divided by the weighted /// average number of common shares outstanding. /// </summary> /// <remarks> /// Morningstar DataId: 29000 /// </remarks> [JsonProperty("29000")] public BasicContinuousOperations BasicContinuousOperations { get; set; } /// <summary> /// Basic EPS from Discontinued Operations is the earnings from discontinued operations reported by the company divided by the /// weighted average number of common shares outstanding. This only includes gain or loss from discontinued operations. /// </summary> /// <remarks> /// Morningstar DataId: 29001 /// </remarks> [JsonProperty("29001")] public BasicDiscontinuousOperations BasicDiscontinuousOperations { get; set; } /// <summary> /// Basic EPS from the Extraordinary Gains/Losses is the earnings attributable to the gains or losses (during the reporting period) from /// extraordinary items divided by the weighted average number of common shares outstanding. /// </summary> /// <remarks> /// Morningstar DataId: 29002 /// </remarks> [JsonProperty("29002")] public BasicExtraordinary BasicExtraordinary { get; set; } /// <summary> /// Basic EPS from the Cumulative Effect of Accounting Change is the earnings attributable to the accounting change (during the /// reporting period) divided by the weighted average number of common shares outstanding. /// </summary> /// <remarks> /// Morningstar DataId: 29003 /// </remarks> [JsonProperty("29003")] public BasicAccountingChange BasicAccountingChange { get; set; } /// <summary> /// Basic EPS is the bottom line net income divided by the weighted average number of common shares outstanding. /// </summary> /// <remarks> /// Morningstar DataId: 29004 /// </remarks> [JsonProperty("29004")] public BasicEPS BasicEPS { get; set; } /// <summary> /// Diluted EPS from Continuing Operations is the earnings from continuing operations divided by the common shares outstanding /// adjusted for the assumed conversion of all potentially dilutive securities. Securities having a dilutive effect may include convertible /// debentures, warrants, options, and convertible preferred stock. /// </summary> /// <remarks> /// Morningstar DataId: 29005 /// </remarks> [JsonProperty("29005")] public DilutedContinuousOperations DilutedContinuousOperations { get; set; } /// <summary> /// Diluted EPS from Discontinued Operations is the earnings from discontinued operations divided by the common shares outstanding /// adjusted for the assumed conversion of all potentially dilutive securities. Securities having a dilutive effect may include convertible /// debentures, warrants, options, and convertible preferred stock. This only includes gain or loss from discontinued operations. /// </summary> /// <remarks> /// Morningstar DataId: 29006 /// </remarks> [JsonProperty("29006")] public DilutedDiscontinuousOperations DilutedDiscontinuousOperations { get; set; } /// <summary> /// Diluted EPS from Extraordinary Gain/Losses is the gain or loss from extraordinary items divided by the common shares outstanding /// adjusted for the assumed conversion of all potentially dilutive securities. Securities having a dilutive effect may include convertible /// debentures, warrants, options, and convertible preferred stock. /// </summary> /// <remarks> /// Morningstar DataId: 29007 /// </remarks> [JsonProperty("29007")] public DilutedExtraordinary DilutedExtraordinary { get; set; } /// <summary> /// Diluted EPS from Cumulative Effect Accounting Changes is the earnings from accounting changes (in the reporting period) divided /// by the common shares outstanding adjusted for the assumed conversion of all potentially dilutive securities. Securities having a /// dilutive effect may include convertible debentures, warrants, options, and convertible preferred stock. /// </summary> /// <remarks> /// Morningstar DataId: 29008 /// </remarks> [JsonProperty("29008")] public DilutedAccountingChange DilutedAccountingChange { get; set; } /// <summary> /// Diluted EPS is the bottom line net income divided by the common shares outstanding adjusted for the assumed conversion of all /// potentially dilutive securities. Securities having a dilutive effect may include convertible debentures, warrants, options, and /// convertible preferred stock. This value will be derived when not reported for the fourth quarter and will be less than or equal to /// Basic EPS. /// </summary> /// <remarks> /// Morningstar DataId: 29009 /// </remarks> [JsonProperty("29009")] public DilutedEPS DilutedEPS { get; set; } /// <summary> /// The shares outstanding used to calculate Basic EPS, which is the weighted average common share outstanding through the whole /// accounting period. Note: If Basic Average Shares are not presented by the firm in the Income Statement, this data point will be /// null. /// </summary> /// <remarks> /// Morningstar DataId: 29010 /// </remarks> [JsonProperty("29010")] public BasicAverageShares BasicAverageShares { get; set; } /// <summary> /// The shares outstanding used to calculate the diluted EPS, assuming the conversion of all convertible securities and the exercise of /// warrants or stock options. It is the weighted average diluted share outstanding through the whole accounting period. Note: If /// Diluted Average Shares are not presented by the firm in the Income Statement and Basic Average Shares are presented, Diluted /// Average Shares will equal Basic Average Shares. However, if neither value is presented by the firm, Diluted Average Shares will be /// null. /// </summary> /// <remarks> /// Morningstar DataId: 29011 /// </remarks> [JsonProperty("29011")] public DilutedAverageShares DilutedAverageShares { get; set; } /// <summary> /// The amount of dividend that a stockholder will receive for each share of stock held. It can be calculated by taking the total amount /// of dividends paid and dividing it by the total shares outstanding. Dividend per share = total dividend payment/total number of /// outstanding shares /// </summary> /// <remarks> /// Morningstar DataId: 29012 /// </remarks> [JsonProperty("29012")] public DividendPerShare DividendPerShare { get; set; } /// <summary> /// Basic EPS from the Other Gains/Losses is the earnings attributable to the other gains/losses (during the reporting period) divided by /// the weighted average number of common shares outstanding. /// </summary> /// <remarks> /// Morningstar DataId: 29013 /// </remarks> [JsonProperty("29013")] public BasicEPSOtherGainsLosses BasicEPSOtherGainsLosses { get; set; } /// <summary> /// Basic EPS from Continuing Operations plus Basic EPS from Discontinued Operations. /// </summary> /// <remarks> /// Morningstar DataId: 29014 /// </remarks> [JsonProperty("29014")] public ContinuingAndDiscontinuedBasicEPS ContinuingAndDiscontinuedBasicEPS { get; set; } /// <summary> /// The earnings attributable to the tax loss carry forward (during the reporting period). /// </summary> /// <remarks> /// Morningstar DataId: 29015 /// </remarks> [JsonProperty("29015")] public TaxLossCarryforwardBasicEPS TaxLossCarryforwardBasicEPS { get; set; } /// <summary> /// The earnings from gains and losses (in the reporting period) divided by the common shares outstanding adjusted for the assumed /// conversion of all potentially dilutive securities. Securities having a dilutive effect may include convertible debentures, warrants, /// options, convertible preferred stock, etc. /// </summary> /// <remarks> /// Morningstar DataId: 29016 /// </remarks> [JsonProperty("29016")] public DilutedEPSOtherGainsLosses DilutedEPSOtherGainsLosses { get; set; } /// <summary> /// Diluted EPS from Continuing Operations plus Diluted EPS from Discontinued Operations. /// </summary> /// <remarks> /// Morningstar DataId: 29017 /// </remarks> [JsonProperty("29017")] public ContinuingAndDiscontinuedDilutedEPS ContinuingAndDiscontinuedDilutedEPS { get; set; } /// <summary> /// The earnings from any tax loss carry forward (in the reporting period). /// </summary> /// <remarks> /// Morningstar DataId: 29018 /// </remarks> [JsonProperty("29018")] public TaxLossCarryforwardDilutedEPS TaxLossCarryforwardDilutedEPS { get; set; } /// <summary> /// The basic normalized earnings per share. Normalized EPS removes onetime and unusual items from EPS, to provide investors with a /// more accurate measure of the company's true earnings. Normalized Earnings / Basic Weighted Average Shares Outstanding. /// </summary> /// <remarks> /// Morningstar DataId: 29019 /// </remarks> [JsonProperty("29019")] public NormalizedBasicEPS NormalizedBasicEPS { get; set; } /// <summary> /// The diluted normalized earnings per share. Normalized EPS removes onetime and unusual items from EPS, to provide investors with /// a more accurate measure of the company's true earnings. Normalized Earnings / Diluted Weighted Average Shares Outstanding. /// </summary> /// <remarks> /// Morningstar DataId: 29020 /// </remarks> [JsonProperty("29020")] public NormalizedDilutedEPS NormalizedDilutedEPS { get; set; } /// <summary> /// Total Dividend Per Share is cash dividends and special cash dividends paid per share over a certain period of time. /// </summary> /// <remarks> /// Morningstar DataId: 29021 /// </remarks> [JsonProperty("29021")] public TotalDividendPerShare TotalDividendPerShare { get; set; } /// <summary> /// Normalized Basic EPS as reported by the company in the financial statements. /// </summary> /// <remarks> /// Morningstar DataId: 29022 /// </remarks> [JsonProperty("29022")] public ReportedNormalizedBasicEPS ReportedNormalizedBasicEPS { get; set; } /// <summary> /// Normalized Diluted EPS as reported by the company in the financial statements. /// </summary> /// <remarks> /// Morningstar DataId: 29023 /// </remarks> [JsonProperty("29023")] public ReportedNormalizedDilutedEPS ReportedNormalizedDilutedEPS { get; set; } /// <summary> /// Reflects a firm's capacity to pay a dividend, and is defined as Earnings Per Share / Dividend Per Share /// </summary> /// <remarks> /// Morningstar DataId: 29024 /// </remarks> [JsonProperty("29024")] public DividendCoverageRatio DividendCoverageRatio { get; set; } /// <summary> /// The nature of the period covered by an individual set of financial results. The output can be: Quarter, Semi-annual or Annual. /// Assuming a 12-month fiscal year, quarter typically covers a three-month period, semi-annual a six-month period, and annual a /// twelve-month period. Annual could cover results collected either from preliminary results or an annual report /// </summary> /// <remarks> /// Morningstar DataId: 28006 /// </remarks> [JsonProperty("28006")] public string PeriodType { get; set; } /// <summary> /// Creates an instance of the EarningReports class /// </summary> public EarningReports() { BasicContinuousOperations = new BasicContinuousOperations(); BasicDiscontinuousOperations = new BasicDiscontinuousOperations(); BasicExtraordinary = new BasicExtraordinary(); BasicAccountingChange = new BasicAccountingChange(); BasicEPS = new BasicEPS(); DilutedContinuousOperations = new DilutedContinuousOperations(); DilutedDiscontinuousOperations = new DilutedDiscontinuousOperations(); DilutedExtraordinary = new DilutedExtraordinary(); DilutedAccountingChange = new DilutedAccountingChange(); DilutedEPS = new DilutedEPS(); BasicAverageShares = new BasicAverageShares(); DilutedAverageShares = new DilutedAverageShares(); DividendPerShare = new DividendPerShare(); BasicEPSOtherGainsLosses = new BasicEPSOtherGainsLosses(); ContinuingAndDiscontinuedBasicEPS = new ContinuingAndDiscontinuedBasicEPS(); TaxLossCarryforwardBasicEPS = new TaxLossCarryforwardBasicEPS(); DilutedEPSOtherGainsLosses = new DilutedEPSOtherGainsLosses(); ContinuingAndDiscontinuedDilutedEPS = new ContinuingAndDiscontinuedDilutedEPS(); TaxLossCarryforwardDilutedEPS = new TaxLossCarryforwardDilutedEPS(); NormalizedBasicEPS = new NormalizedBasicEPS(); NormalizedDilutedEPS = new NormalizedDilutedEPS(); TotalDividendPerShare = new TotalDividendPerShare(); ReportedNormalizedBasicEPS = new ReportedNormalizedBasicEPS(); ReportedNormalizedDilutedEPS = new ReportedNormalizedDilutedEPS(); DividendCoverageRatio = new DividendCoverageRatio(); } /// <summary> /// Applies updated values from <paramref name="update"/> to this instance /// </summary> /// <remarks>Used to apply data updates to the current instance. This WILL overwrite existing values. Default update values are ignored.</remarks> /// <param name="update">The next data update for this instance</param> public void UpdateValues(EarningReports update) { if (update == null) return; if (update.PeriodEndingDate != default(DateTime)) PeriodEndingDate = update.PeriodEndingDate; if (update.FileDate != default(DateTime)) FileDate = update.FileDate; if (!string.IsNullOrWhiteSpace(update.AccessionNumber)) AccessionNumber = update.AccessionNumber; if (!string.IsNullOrWhiteSpace(update.FormType)) FormType = update.FormType; BasicContinuousOperations?.UpdateValues(update.BasicContinuousOperations); BasicDiscontinuousOperations?.UpdateValues(update.BasicDiscontinuousOperations); BasicExtraordinary?.UpdateValues(update.BasicExtraordinary); BasicAccountingChange?.UpdateValues(update.BasicAccountingChange); BasicEPS?.UpdateValues(update.BasicEPS); DilutedContinuousOperations?.UpdateValues(update.DilutedContinuousOperations); DilutedDiscontinuousOperations?.UpdateValues(update.DilutedDiscontinuousOperations); DilutedExtraordinary?.UpdateValues(update.DilutedExtraordinary); DilutedAccountingChange?.UpdateValues(update.DilutedAccountingChange); DilutedEPS?.UpdateValues(update.DilutedEPS); BasicAverageShares?.UpdateValues(update.BasicAverageShares); DilutedAverageShares?.UpdateValues(update.DilutedAverageShares); DividendPerShare?.UpdateValues(update.DividendPerShare); BasicEPSOtherGainsLosses?.UpdateValues(update.BasicEPSOtherGainsLosses); ContinuingAndDiscontinuedBasicEPS?.UpdateValues(update.ContinuingAndDiscontinuedBasicEPS); TaxLossCarryforwardBasicEPS?.UpdateValues(update.TaxLossCarryforwardBasicEPS); DilutedEPSOtherGainsLosses?.UpdateValues(update.DilutedEPSOtherGainsLosses); ContinuingAndDiscontinuedDilutedEPS?.UpdateValues(update.ContinuingAndDiscontinuedDilutedEPS); TaxLossCarryforwardDilutedEPS?.UpdateValues(update.TaxLossCarryforwardDilutedEPS); NormalizedBasicEPS?.UpdateValues(update.NormalizedBasicEPS); NormalizedDilutedEPS?.UpdateValues(update.NormalizedDilutedEPS); TotalDividendPerShare?.UpdateValues(update.TotalDividendPerShare); ReportedNormalizedBasicEPS?.UpdateValues(update.ReportedNormalizedBasicEPS); ReportedNormalizedDilutedEPS?.UpdateValues(update.ReportedNormalizedDilutedEPS); DividendCoverageRatio?.UpdateValues(update.DividendCoverageRatio); if (!string.IsNullOrWhiteSpace(update.PeriodType)) PeriodType = update.PeriodType; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Collections.Generic; using System.Reflection.TypeLoading; using RuntimeTypeInfo = System.Reflection.TypeLoading.RoType; namespace System.Reflection.Runtime.BindingFlagSupport { //================================================================================================================= // This class encapsulates the minimum set of arcane desktop CLR policies needed to implement the Get*(BindingFlags) apis. // // In particular, it encapsulates behaviors such as what exactly determines the "visibility" of a property and event, and // what determines whether and how they are overridden. //================================================================================================================= internal abstract class MemberPolicies<M> where M : MemberInfo { //================================================================================================================= // Subclasses for specific MemberInfo types must override these: //================================================================================================================= // // Returns all of the directly declared members on the given TypeInfo. // public abstract IEnumerable<M> GetDeclaredMembers(TypeInfo typeInfo); // // Returns all of the directly declared members on the given TypeInfo whose name matches filter. If filter is null, // returns all directly declared members. // public abstract IEnumerable<M> CoreGetDeclaredMembers(RuntimeTypeInfo type, NameFilter filter, RuntimeTypeInfo reflectedType); // // Policy to decide whether a member is considered "virtual", "virtual new" and what its member visibility is. // (For "visibility", we reuse the MethodAttributes enum since Reflection lacks an element-agnostic enum for this. // Only the MemberAccessMask bits are set.) // public abstract void GetMemberAttributes(M member, out MethodAttributes visibility, out bool isStatic, out bool isVirtual, out bool isNewSlot); // // Policy to decide whether "derivedMember" is a virtual override of "baseMember." Used to implement MethodInfo.GetBaseDefinition(), // parent chain traversal for discovering inherited custom attributes, and suppressing lookup results in the Type.Get*() api family. // // Does not consider explicit overrides (methodimpls.) Does not consider "overrides" of interface methods. // public abstract bool ImplicitlyOverrides(M baseMember, M derivedMember); // // Policy to decide how BindingFlags should be reinterpreted for a given member type. // This is overridden for nested types which all match on any combination Instance | Static and are never inherited. // It is also overridden for constructors which are never inherited. // public virtual BindingFlags ModifyBindingFlags(BindingFlags bindingFlags) { return bindingFlags; } // // Policy to decide if BindingFlags is always interpreted as having set DeclaredOnly. // public abstract bool AlwaysTreatAsDeclaredOnly { get; } // // Policy to decide how or if members in more derived types hide same-named members in base types. // Due to desktop compat concerns, the definitions are a bit more arbitrary than we'd like. // public abstract bool IsSuppressedByMoreDerivedMember(M member, M[] priorMembers, int startIndex, int endIndex); // // Policy to decide whether to throw an AmbiguousMatchException on an ambiguous Type.Get*() call. // Does not apply to GetConstructor/GetMethod/GetProperty calls that have a non-null Type[] array passed to it. // // If method returns true, the Get() api will pick the member that's in the most derived type. // If method returns false, the Get() api throws AmbiguousMatchException. // public abstract bool OkToIgnoreAmbiguity(M m1, M m2); // // Helper method for determining whether two methods are signature-compatible. // protected static bool AreNamesAndSignaturesEqual(MethodInfo method1, MethodInfo method2) { if (method1.Name != method2.Name) return false; ParameterInfo[] p1 = method1.GetParametersNoCopy(); ParameterInfo[] p2 = method2.GetParametersNoCopy(); if (p1.Length != p2.Length) return false; bool isGenericMethod1 = method1.IsGenericMethodDefinition; bool isGenericMethod2 = method2.IsGenericMethodDefinition; if (isGenericMethod1 != isGenericMethod2) return false; if (!isGenericMethod1) { for (int i = 0; i < p1.Length; i++) { Type parameterType1 = p1[i].ParameterType; Type parameterType2 = p2[i].ParameterType; if (!(parameterType1.Equals(parameterType2))) { return false; } } } else { if (method1.GetGenericArguments().Length != method2.GetGenericArguments().Length) return false; for (int i = 0; i < p1.Length; i++) { Type parameterType1 = p1[i].ParameterType; Type parameterType2 = p2[i].ParameterType; if (!GenericMethodAwareAreParameterTypesEqual(parameterType1, parameterType2)) { return false; } } } return true; } // // This helper compares the types of the corresponding parameters of two methods to see if one method is signature equivalent to the other. // This is needed when comparing the signatures of two generic methods as Type.Equals() is not up to that job. // private static bool GenericMethodAwareAreParameterTypesEqual(Type t1, Type t2) { // Fast-path - if Reflection has already deemed them equivalent, we can trust its result. if (t1.Equals(t2)) return true; // If we got here, Reflection determined the types not equivalent. Most of the time, that's the result we want. // There is however, one wrinkle. If the type is or embeds a generic method parameter type, Reflection will always report them // non-equivalent, since generic parameter type comparison always compares both the position and the declaring method. For our purposes, though, // we only want to consider the position. // Fast-path: if the types don't embed any generic parameters, we can go ahead and use Reflection's result. if (!(t1.ContainsGenericParameters && t2.ContainsGenericParameters)) return false; if ((t1.IsArray && t2.IsArray) || (t1.IsByRef && t2.IsByRef) || (t1.IsPointer && t2.IsPointer)) { if (t1.IsSZArray() != t2.IsSZArray()) return false; if (t1.IsArray && (t1.GetArrayRank() != t2.GetArrayRank())) return false; return GenericMethodAwareAreParameterTypesEqual(t1.GetElementType(), t2.GetElementType()); } if (t1.IsConstructedGenericType) { // We can use regular old Equals() rather than recursing into GenericMethodAwareAreParameterTypesEqual() since the // generic type definition will always be a plain old named type and won't embed any generic method parameters. if (!(t1.GetGenericTypeDefinition().Equals(t2.GetGenericTypeDefinition()))) return false; Type[] ga1 = t1.GenericTypeArguments; Type[] ga2 = t2.GenericTypeArguments; if (ga1.Length != ga2.Length) return false; for (int i = 0; i < ga1.Length; i++) { if (!GenericMethodAwareAreParameterTypesEqual(ga1[i], ga2[i])) return false; } return true; } if (t1.IsGenericMethodParameter() && t2.IsGenericMethodParameter()) { // A generic method parameter. The DeclaringMethods will be different but we don't care about that - we can assume that // the declaring method will be the method that declared the parameter's whose type we're testing. We only need to // compare the positions. return t1.GenericParameterPosition == t2.GenericParameterPosition; } // If we got here, either t1 and t2 are different flavors of types or they are both simple named types or both generic type parameters. // Either way, we can trust Reflection's result here. return false; } static MemberPolicies() { Type t = typeof(M); if (t.Equals(typeof(FieldInfo))) { MemberTypeIndex = BindingFlagSupport.MemberTypeIndex.Field; Default = (MemberPolicies<M>)(Object)(new FieldPolicies()); } else if (t.Equals(typeof(MethodInfo))) { MemberTypeIndex = BindingFlagSupport.MemberTypeIndex.Method; Default = (MemberPolicies<M>)(Object)(new MethodPolicies()); } else if (t.Equals(typeof(ConstructorInfo))) { MemberTypeIndex = BindingFlagSupport.MemberTypeIndex.Constructor; Default = (MemberPolicies<M>)(Object)(new ConstructorPolicies()); } else if (t.Equals(typeof(PropertyInfo))) { MemberTypeIndex = BindingFlagSupport.MemberTypeIndex.Property; ; Default = (MemberPolicies<M>)(Object)(new PropertyPolicies()); } else if (t.Equals(typeof(EventInfo))) { MemberTypeIndex = BindingFlagSupport.MemberTypeIndex.Event; Default = (MemberPolicies<M>)(Object)(new EventPolicies()); } else if (t.Equals(typeof(Type))) { MemberTypeIndex = BindingFlagSupport.MemberTypeIndex.NestedType; Default = (MemberPolicies<M>)(Object)(new NestedTypePolicies()); } else { Debug.Fail("Unknown MemberInfo type."); } } // // This is a singleton class one for each MemberInfo category: Return the appropriate one. // public static readonly MemberPolicies<M> Default; // // This returns a fixed value from 0 to MemberIndex.Count-1 with each possible type of M // being assigned a unique index (see the MemberTypeIndex for possible values). This is useful // for converting a type reference to M to an array index or switch case label. // public static readonly int MemberTypeIndex; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void BlendVariableDouble() { var test = new SimpleTernaryOpTest__BlendVariableDouble(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Avx.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleTernaryOpTest__BlendVariableDouble { private const int VectorSize = 32; private const int Op1ElementCount = VectorSize / sizeof(Double); private const int Op2ElementCount = VectorSize / sizeof(Double); private const int Op3ElementCount = VectorSize / sizeof(Double); private const int RetElementCount = VectorSize / sizeof(Double); private static Double[] _data1 = new Double[Op1ElementCount]; private static Double[] _data2 = new Double[Op2ElementCount]; private static Double[] _data3 = new Double[Op3ElementCount]; private static Vector256<Double> _clsVar1; private static Vector256<Double> _clsVar2; private static Vector256<Double> _clsVar3; private Vector256<Double> _fld1; private Vector256<Double> _fld2; private Vector256<Double> _fld3; private SimpleTernaryOpTest__DataTable<Double, Double, Double, Double> _dataTable; static SimpleTernaryOpTest__BlendVariableDouble() { var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _clsVar1), ref Unsafe.As<Double, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (double)(random.NextDouble()); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _clsVar2), ref Unsafe.As<Double, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < Op3ElementCount; i++) { _data3[i] = (double)(((i % 2) == 0) ? -0.0 : 1.0); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _clsVar3), ref Unsafe.As<Double, byte>(ref _data3[0]), VectorSize); } public SimpleTernaryOpTest__BlendVariableDouble() { Succeeded = true; var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (double)(random.NextDouble()); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < Op3ElementCount; i++) { _data3[i] = (double)(((i % 2) == 0) ? -0.0 : 1.0); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _fld3), ref Unsafe.As<Double, byte>(ref _data3[0]), VectorSize); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (double)(random.NextDouble()); } for (var i = 0; i < Op3ElementCount; i++) { _data3[i] = (double)(((i % 2) == 0) ? -0.0 : 1.0); } _dataTable = new SimpleTernaryOpTest__DataTable<Double, Double, Double, Double>(_data1, _data2, _data3, new Double[RetElementCount], VectorSize); } public bool IsSupported => Avx.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Avx.BlendVariable( Unsafe.Read<Vector256<Double>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Double>>(_dataTable.inArray2Ptr), Unsafe.Read<Vector256<Double>>(_dataTable.inArray3Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { var result = Avx.BlendVariable( Avx.LoadVector256((Double*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Double*)(_dataTable.inArray2Ptr)), Avx.LoadVector256((Double*)(_dataTable.inArray3Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { var result = Avx.BlendVariable( Avx.LoadAlignedVector256((Double*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Double*)(_dataTable.inArray2Ptr)), Avx.LoadAlignedVector256((Double*)(_dataTable.inArray3Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { var result = typeof(Avx).GetMethod(nameof(Avx.BlendVariable), new Type[] { typeof(Vector256<Double>), typeof(Vector256<Double>), typeof(Vector256<Double>) }) .Invoke(null, new object[] { Unsafe.Read<Vector256<Double>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector256<Double>>(_dataTable.inArray2Ptr), Unsafe.Read<Vector256<Double>>(_dataTable.inArray3Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Double>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { var result = typeof(Avx).GetMethod(nameof(Avx.BlendVariable), new Type[] { typeof(Vector256<Double>), typeof(Vector256<Double>), typeof(Vector256<Double>) }) .Invoke(null, new object[] { Avx.LoadVector256((Double*)(_dataTable.inArray1Ptr)), Avx.LoadVector256((Double*)(_dataTable.inArray2Ptr)), Avx.LoadVector256((Double*)(_dataTable.inArray3Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Double>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { var result = typeof(Avx).GetMethod(nameof(Avx.BlendVariable), new Type[] { typeof(Vector256<Double>), typeof(Vector256<Double>), typeof(Vector256<Double>) }) .Invoke(null, new object[] { Avx.LoadAlignedVector256((Double*)(_dataTable.inArray1Ptr)), Avx.LoadAlignedVector256((Double*)(_dataTable.inArray2Ptr)), Avx.LoadAlignedVector256((Double*)(_dataTable.inArray3Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Double>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.inArray3Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { var result = Avx.BlendVariable( _clsVar1, _clsVar2, _clsVar3 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _clsVar3, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { var firstOp = Unsafe.Read<Vector256<Double>>(_dataTable.inArray1Ptr); var secondOp = Unsafe.Read<Vector256<Double>>(_dataTable.inArray2Ptr); var thirdOp = Unsafe.Read<Vector256<Double>>(_dataTable.inArray3Ptr); var result = Avx.BlendVariable(firstOp, secondOp, thirdOp); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(firstOp, secondOp, thirdOp, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { var firstOp = Avx.LoadVector256((Double*)(_dataTable.inArray1Ptr)); var secondOp = Avx.LoadVector256((Double*)(_dataTable.inArray2Ptr)); var thirdOp = Avx.LoadVector256((Double*)(_dataTable.inArray3Ptr)); var result = Avx.BlendVariable(firstOp, secondOp, thirdOp); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(firstOp, secondOp, thirdOp, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { var firstOp = Avx.LoadAlignedVector256((Double*)(_dataTable.inArray1Ptr)); var secondOp = Avx.LoadAlignedVector256((Double*)(_dataTable.inArray2Ptr)); var thirdOp = Avx.LoadAlignedVector256((Double*)(_dataTable.inArray3Ptr)); var result = Avx.BlendVariable(firstOp, secondOp, thirdOp); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(firstOp, secondOp, thirdOp, _dataTable.outArrayPtr); } public void RunLclFldScenario() { var test = new SimpleTernaryOpTest__BlendVariableDouble(); var result = Avx.BlendVariable(test._fld1, test._fld2, test._fld3); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, test._fld3, _dataTable.outArrayPtr); } public void RunFldScenario() { var result = Avx.BlendVariable(_fld1, _fld2, _fld3); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _fld3, _dataTable.outArrayPtr); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector256<Double> firstOp, Vector256<Double> secondOp, Vector256<Double> thirdOp, void* result, [CallerMemberName] string method = "") { Double[] inArray1 = new Double[Op1ElementCount]; Double[] inArray2 = new Double[Op2ElementCount]; Double[] inArray3 = new Double[Op3ElementCount]; Double[] outArray = new Double[RetElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), firstOp); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), secondOp); Unsafe.Write(Unsafe.AsPointer(ref inArray3[0]), thirdOp); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, inArray3, outArray, method); } private void ValidateResult(void* firstOp, void* secondOp, void* thirdOp, void* result, [CallerMemberName] string method = "") { Double[] inArray1 = new Double[Op1ElementCount]; Double[] inArray2 = new Double[Op2ElementCount]; Double[] inArray3 = new Double[Op3ElementCount]; Double[] outArray = new Double[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(firstOp), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(secondOp), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray3[0]), ref Unsafe.AsRef<byte>(thirdOp), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, inArray3, outArray, method); } private void ValidateResult(Double[] firstOp, Double[] secondOp, Double[] thirdOp, Double[] result, [CallerMemberName] string method = "") { if (((BitConverter.DoubleToInt64Bits(thirdOp[0]) >> 63) & 1) == 1 ? BitConverter.DoubleToInt64Bits(secondOp[0]) != BitConverter.DoubleToInt64Bits(result[0]) : BitConverter.DoubleToInt64Bits(firstOp[0]) != BitConverter.DoubleToInt64Bits(result[0])) { Succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if (((BitConverter.DoubleToInt64Bits(thirdOp[i]) >> 63) & 1) == 1 ? BitConverter.DoubleToInt64Bits(secondOp[i]) != BitConverter.DoubleToInt64Bits(result[i]) : BitConverter.DoubleToInt64Bits(firstOp[i]) != BitConverter.DoubleToInt64Bits(result[i])) { Succeeded = false; break; } } } if (!Succeeded) { Console.WriteLine($"{nameof(Avx)}.{nameof(Avx.BlendVariable)}<Double>(Vector256<Double>, Vector256<Double>, Vector256<Double>): {method} failed:"); Console.WriteLine($" firstOp: ({string.Join(", ", firstOp)})"); Console.WriteLine($" secondOp: ({string.Join(", ", secondOp)})"); Console.WriteLine($" thirdOp: ({string.Join(", ", thirdOp)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
// Graph Engine // Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE.md file in the project root for full license information. // using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Security; using System.Text; using System.Threading.Tasks; using Trinity.Core.Lib; using Trinity.Diagnostics; using Trinity.TSL.Lib; using Trinity.Utilities; namespace Trinity.Storage { #region Write-ahead-log and image signature data structures [StructLayout(LayoutKind.Sequential, Pack = 1, Size = 16)] internal unsafe struct MD5_SIGNATURE { public long LowBits; public long HighBits; } [StructLayout(LayoutKind.Sequential, Pack = 1, Size = sizeof(ulong) + 16 * 256)] internal unsafe struct TRINITY_IMAGE_SIGNATURE { public ulong IMAGE_VERSION; public fixed byte TRUNK_SIGNATURES[256 * 16]; } [StructLayout(LayoutKind.Sequential, Pack = 1, Size = 1 + 8 + 2 + 4)] internal unsafe struct LOG_RECORD_HEADER { /// <summary> /// As the LOG_RECORD_HEADER is only used within LocalMemoryStorage, /// which already calls InternalCalls.__init(), it is not necessary to /// make a C wrapper for this struct. /// </summary> static LOG_RECORD_HEADER() { InternalCalls.__init(); } public long CELL_ID; public int CONTENT_LEN; public ushort CELL_TYPE; public byte CHECKSUM; // 8-bit second-order check [SecurityCritical] [MethodImpl(MethodImplOptions.InternalCall)] internal static extern void CWriteAheadLogComputeChecksum(LOG_RECORD_HEADER* plog, byte* bufferPtr); [SecurityCritical] [MethodImpl(MethodImplOptions.InternalCall)] internal static extern bool CWriteAheadLogValidateChecksum(LOG_RECORD_HEADER* plog, byte* content); } [StructLayout(LayoutKind.Sequential, Pack = 1, Size = 8 + sizeof(ulong) + 16 * 256)] internal unsafe struct LOG_FILE_HEADER { public fixed byte LOG_MAGIC_HEAD[4]; public ushort LOG_VER_MINOR; public ushort LOG_VER_MAJOR; public TRINITY_IMAGE_SIGNATURE LOG_ASSOCIATED_IMAGE_SIGNATURE; public static LOG_FILE_HEADER New() { LOG_FILE_HEADER ret = new LOG_FILE_HEADER(); ret.Initialize(); return ret; } internal void Initialize() { fixed (LOG_FILE_HEADER *p = &this) { p->LOG_MAGIC_HEAD[0] = 0x54; p->LOG_MAGIC_HEAD[1] = 0x4c; p->LOG_MAGIC_HEAD[2] = 0x4f; p->LOG_MAGIC_HEAD[3] = 0x47;// "TLOG" } LOG_VER_MAJOR = 1; LOG_VER_MINOR = 0; } internal bool CompatibilityCheck() { LOG_FILE_HEADER default_header = LOG_FILE_HEADER.New(); fixed (LOG_FILE_HEADER* p_lhs = &this) { /* Currently we make a strict compare of magic header and version. */ return Memory.Compare((byte*)p_lhs, (byte*)&default_header, 8); } } } #endregion /** * Note: There are a lot of stdio operations in the code in this file. * These lines are inside LocalMemoryStorage, which calls InternalCalls.__init(), * so that we can call CStdio directly. */ public unsafe partial class LocalMemoryStorage : Storage, IDisposable, IEnumerable<CellInfo>, IEnumerable { #region Write-ahead-log logic /// <summary> /// Tries to close the WAL file. /// Only called when the local storage is being disposed. /// </summary> [MethodImpl(MethodImplOptions.Synchronized)] private unsafe void CloseWriteAheadLogFile() { if (m_logfile == null) return; if (0 != CStdio.fclose(m_logfile)) { Log.WriteLine(LogLevel.Error, "Failed to close the log file"); } } /// <summary> /// Initialises the write-ahead logging file associated /// with the primary image. /// </summary> [MethodImpl(MethodImplOptions.Synchronized)] private unsafe void InitializeWriteAheadLogFile() { if (TrinityConfig.ReadOnly) return; Log.WriteLine(LogLevel.Info, "Initializing logging facility"); try { LoadWriteAheadLogFile(); /* After loading, the log file will be dropped. * So we proceed to create a new one. */ CreateWriteAheadLogFile(); } catch (Exception ex) { Log.WriteLine(LogLevel.Warning, "Failed to setup the log-ahead directory: {0}", ex); } } private unsafe void _update_write_ahead_log_file(string path, void* fp) { m_logfile = fp; m_logfile_path = path; CLocalMemoryStorage.CSetWriteAheadLogFile(fp); } /// <summary> /// Drops the current log file and clean up the member variables. /// </summary> [MethodImpl(MethodImplOptions.Synchronized)] private unsafe void DropWriteAheadLogFile() { if (TrinityConfig.ReadOnly) return; if (m_logfile == null) return; Debug.Assert(m_logfile != null); Debug.Assert(m_logfile_path != null); Log.WriteLine(LogLevel.Info, "Dropping write-ahead-log file {0}", m_logfile_path); if (0 != CStdio.fclose(m_logfile)) { Log.WriteLine(LogLevel.Error, "Failed to close the log file"); } try { File.Delete(m_logfile_path); } catch (Exception ex) { Log.WriteLine(LogLevel.Error, "Failed to delete the log file: {0}", ex); } _update_write_ahead_log_file(null, null); } /// <summary> /// Creates a new log file for current storage. /// If the file exists, it will be overwritten. /// </summary> [MethodImpl(MethodImplOptions.Synchronized)] private unsafe void CreateWriteAheadLogFile() { if (TrinityConfig.ReadOnly) return; string path = WriteAheadLogFilePath; Log.WriteLine(LogLevel.Info, "Creating write-ahead log file {0}", path); DropWriteAheadLogFile(); if (File.Exists(path)) { BackupWriteAheadLogFile(path); } void* new_fp = null; if (0 != Stdio._wfopen_s(out new_fp, path, "wb")) { Log.WriteLine(LogLevel.Error, "Cannot open the log file"); return; } LOG_FILE_HEADER header = LOG_FILE_HEADER.New(); GetTrinityImageSignature(&header.LOG_ASSOCIATED_IMAGE_SIGNATURE); CStdio.fwrite(&header, (ulong)sizeof(LOG_FILE_HEADER), 1, new_fp); CStdio.fflush(new_fp); _update_write_ahead_log_file(path, new_fp); } [MethodImpl(MethodImplOptions.Synchronized)] private void ResetWriteAheadLog(string path) { if (TrinityConfig.ReadOnly) return; DropWriteAheadLogFile(); if (File.Exists(path)) BackupWriteAheadLogFile(path); InitializeWriteAheadLogFile(); } /// <summary> /// Move the current log file to a backup(.old file). /// Caller should guarantee that log is not opened. /// </summary> /// <param name="path">Path of the log file to backup.</param> private void BackupWriteAheadLogFile(string path) { try { Log.WriteLine(LogLevel.Info, "Backing up current log file {0}", path); string path_old = Path.Combine(Path.GetDirectoryName(path), Path.GetFileName(path) + ".old"); if (File.Exists(path_old)) { Log.WriteLine(LogLevel.Warning, "Deleting old log file {0}", path_old); File.Delete(path_old); } Log.WriteLine(LogLevel.Info, "Moving current log file {0} to {1}", path, path_old); File.Move(path, path_old); } catch (Exception ex) { Log.WriteLine(LogLevel.Error, "Cannot backup the log file {0}: {1}", path, ex); } } /// <summary> /// Opens the log file in read mode and replay the actions inside, /// and when the logs are synced, save the storage to an image, then /// drop the old log file. /// </summary> [MethodImpl(MethodImplOptions.Synchronized)] private void LoadWriteAheadLogFile() { if (TrinityConfig.ReadOnly) return; string path = WriteAheadLogFilePath; Log.WriteLine(LogLevel.Info, "Loading write-ahead log file {0}", path); LOG_FILE_HEADER header = LOG_FILE_HEADER.New(); TRINITY_IMAGE_SIGNATURE current_sig = new TRINITY_IMAGE_SIGNATURE(); LOG_RECORD_HEADER record_header = new LOG_RECORD_HEADER(); long record_cnt = 0; byte[] cell_buff = new byte[128]; void* new_fp = null; bool ver_compatible = true; bool img_compatible = true; GetTrinityImageSignature(&current_sig); DropWriteAheadLogFile(); if (!File.Exists(path)) { Log.WriteLine(LogLevel.Info, "Write ahead log doesn't exist, quit loading."); return; } if (0 != Stdio._wfopen_s(out new_fp, path, "rb")) { Log.WriteLine(LogLevel.Fatal, "Cannot open write ahead log for read. Exiting."); goto load_fail; } /* Read log header */ if (1 != CStdio.fread(&header, (ulong)sizeof(LOG_FILE_HEADER), 1, new_fp)) { Log.WriteLine(LogLevel.Fatal, "Cannot read write-ahead-log header. Exiting."); goto load_fail; } ver_compatible = header.CompatibilityCheck(); img_compatible = Memory.Compare((byte*)&header.LOG_ASSOCIATED_IMAGE_SIGNATURE, (byte*)&current_sig, sizeof(TRINITY_IMAGE_SIGNATURE)); if (!ver_compatible || !img_compatible) { /* The log is not ours. Ignore if it's empty. */ if (0 == CStdio.feof(new_fp)) { Log.WriteLine(LogLevel.Warning, "Found incompatible empty write-ahead-log file, ignoring."); CStdio.fclose(new_fp); return; } else if (this.CellCount != 0) { goto load_incompatible; } /* Otherwise, (CellCount is 0), it indicates that we're recovering from a fresh start. */ } Log.WriteLine(LogLevel.Info, "Reading log file."); while (1 == CStdio.fread(&record_header, (ulong)sizeof(LOG_RECORD_HEADER), 1, new_fp)) { if (record_header.CONTENT_LEN >= 0) { /* Ensure space for the cell buffer */ if (record_header.CONTENT_LEN > cell_buff.Length) { if (record_header.CONTENT_LEN < 1<<20) { cell_buff = new byte[record_header.CONTENT_LEN * 2]; } else { cell_buff = new byte[record_header.CONTENT_LEN]; } } fixed (byte* p_buff = cell_buff) { if (1 != CStdio.fread(p_buff, (ulong)record_header.CONTENT_LEN, 1, new_fp) && record_header.CONTENT_LEN != 0) { Log.WriteLine(LogLevel.Error, "Incomplete write-ahead-log record at the end of file"); break; } if (false == LOG_RECORD_HEADER.CWriteAheadLogValidateChecksum(&record_header, p_buff)) { Log.WriteLine(LogLevel.Fatal, "Checksum mismatch for log record #{0}", record_cnt); goto load_fail; } this.SaveCell(record_header.CELL_ID, p_buff, record_header.CONTENT_LEN, record_header.CELL_TYPE); } } else /* if (record_header.CONTENT_LEN < 0) */ { if (false == LOG_RECORD_HEADER.CWriteAheadLogValidateChecksum(&record_header, null)) { Log.WriteLine(LogLevel.Fatal, "Checksum mismatch for log record #{0}", record_cnt); goto load_fail; } this.RemoveCell(record_header.CELL_ID); } ++record_cnt; } goto load_success; //////////////////////////////////////// load_incompatible: if (ver_compatible) { Log.WriteLine(LogLevel.Fatal, "The log file is incompatible with the current version. Cannot recover."); } if (img_compatible) { Log.WriteLine(LogLevel.Fatal, "The log file has a different signature than the current image. Cannot recover."); } goto load_fail; //////////////////////////////////////// load_success: Log.WriteLine(LogLevel.Info, "Write-ahead-log successfully loaded. Recovered {0} records.", record_cnt); if (0 != CStdio.fclose(new_fp)) { Log.WriteLine(LogLevel.Error, "Cannot close the write-ahead-log file. Logging disabled."); return; } /* Only save storage when the log is not empty. */ if (record_cnt == 0 || TrinityErrorCode.E_SUCCESS == SaveStorage()) { /* Save storage succeded. Dropping old logs now. */ try { File.Delete(path); } catch (Exception ex) { Log.WriteLine(LogLevel.Error, "Failed to delete the old logs: {0}", ex); } } else { /* Save storage failed. */ Log.WriteLine(LogLevel.Fatal, "Failed to save the recovered storage. The old log is retained"); goto load_fail; } return; //////////////////////////////////////// load_fail: if (new_fp != null) CStdio.fclose(new_fp); Environment.Exit(-1); } /// <summary> /// Only for unit test purpose. /// </summary> [MethodImpl(MethodImplOptions.Synchronized)] internal void _reset_write_ahead_log_status() { if (m_logfile != null) CStdio.fclose(m_logfile); _update_write_ahead_log_file(null, null); } /// <summary> /// Logs a cell action to the persistent storage. /// </summary> /// <param name="cellId">The 64-bit cell id.</param> /// <param name="cellPtr">A pointer pointing to the underlying cell buffer.</param> /// <param name="cellSize">The size of the cell in bytes.</param> /// <param name="cellType">A 16-bit unsigned integer indicating the cell type.</param> /// <param name="options">An flag indicating a cell access option.</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public unsafe static void WriteAheadLog(long cellId, byte* cellPtr, int cellSize, ushort cellType, CellAccessOptions options) { CLocalMemoryStorage.CWriteAheadLog(cellId, cellPtr, cellSize, cellType, options); } #endregion #region Overridden write-ahead logged cell interfaces /// <summary> /// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <returns>true if saving succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode SaveCell(CellAccessOptions writeAheadLogOptions, long cellId, byte[] buff) { fixed (byte* p = buff) { TrinityErrorCode eResult= CLocalMemoryStorage.CSaveCell(cellId, p, buff.Length, ushort.MaxValue, writeAheadLogOptions); return eResult; } } /// <summary> /// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="cellType">Indicates the cell type.</param> /// <returns>true if saving succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode SaveCell(CellAccessOptions writeAheadLogOptions, long cellId, byte[] buff, ushort cellType) { fixed (byte* p = buff) { TrinityErrorCode eResult= CLocalMemoryStorage.CSaveCell(cellId, p, buff.Length, cellType, writeAheadLogOptions); return eResult; } } /// <summary> /// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <returns>true if saving succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode SaveCell(CellAccessOptions writeAheadLogOptions, long cellId, byte[] buff, int offset, int cellSize) { fixed (byte* p = buff) { TrinityErrorCode eResult= CLocalMemoryStorage.CSaveCell(cellId, p + offset, cellSize, ushort.MaxValue, writeAheadLogOptions); return eResult; } } /// <summary> /// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <param name="cellType">Indicates the cell type.</param> /// <returns>true if saving succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode SaveCell(CellAccessOptions writeAheadLogOptions, long cellId, byte[] buff, int offset, int cellSize, ushort cellType) { fixed (byte* p = buff) { TrinityErrorCode eResult= CLocalMemoryStorage.CSaveCell(cellId, p + offset, cellSize, cellType, writeAheadLogOptions); return eResult; } } /// <summary> /// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <param name="cellType">Indicates the cell type.</param> /// <returns>true if saving succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode SaveCell(CellAccessOptions writeAheadLogOptions, long cellId, byte* buff, int offset, int cellSize, ushort cellType) { TrinityErrorCode eResult= CLocalMemoryStorage.CSaveCell(cellId, buff + offset, cellSize, cellType, writeAheadLogOptions); return eResult; } /// <summary> /// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <returns>true if saving succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode SaveCell(CellAccessOptions writeAheadLogOptions, long cellId, byte* buff, int offset, int cellSize) { TrinityErrorCode eResult= CLocalMemoryStorage.CSaveCell(cellId, buff + offset, cellSize, ushort.MaxValue, writeAheadLogOptions); return eResult; } /// <summary> /// Adds a new cell to the key-value store if the cell Id does not exist, or updates an existing cell in the key-value store if the cell Id already exists. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="cellSize">The size of the cell.</param> /// <returns>true if saving succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode SaveCell(CellAccessOptions writeAheadLogOptions, long cellId, byte* buff, int cellSize) { TrinityErrorCode eResult= CLocalMemoryStorage.CSaveCell(cellId, buff, cellSize, ushort.MaxValue, writeAheadLogOptions); return eResult; } /// <summary> /// Adds a new cell to the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="cellSize">The size of the cell.</param> /// <returns>true if adding succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode AddCell(CellAccessOptions writeAheadLogOptions, long cellId, byte* buff, int cellSize) { TrinityErrorCode eResult= CLocalMemoryStorage.CAddCell(cellId, buff, cellSize, ushort.MaxValue, writeAheadLogOptions); return eResult; } /// <summary> /// Adds a new cell to the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <returns>true if adding succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode AddCell(CellAccessOptions writeAheadLogOptions, long cellId, byte* buff, int offset, int cellSize) { TrinityErrorCode eResult= CLocalMemoryStorage.CAddCell(cellId, buff + offset, cellSize, ushort.MaxValue, writeAheadLogOptions); return eResult; } /// <summary> /// Adds a new cell to the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <returns>true if adding succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode AddCell(CellAccessOptions writeAheadLogOptions, long cellId, byte[] buff) { fixed (byte* p = buff) { TrinityErrorCode eResult= CLocalMemoryStorage.CAddCell(cellId, p, buff.Length, ushort.MaxValue, writeAheadLogOptions); return eResult; } } /// <summary> /// Adds a new cell to the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <returns>true if adding succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode AddCell(CellAccessOptions writeAheadLogOptions, long cellId, byte[] buff, int offset, int cellSize) { fixed (byte* p = buff) { TrinityErrorCode eResult= CLocalMemoryStorage.CAddCell(cellId, p + offset, cellSize, ushort.MaxValue, writeAheadLogOptions); return eResult; } } /// <summary> /// Adds a new cell to the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <param name="cellType">Indicates the cell type.</param> /// <returns>true if adding succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode AddCell(CellAccessOptions writeAheadLogOptions, long cellId, byte[] buff, int offset, int cellSize, ushort cellType) { fixed (byte* p = buff) { TrinityErrorCode eResult= CLocalMemoryStorage.CAddCell(cellId, p + offset, cellSize, cellType, writeAheadLogOptions); return eResult; } } /// <summary> /// Adds a new cell to the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <param name="cellType">Indicates the cell type.</param> /// <returns>true if adding succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode AddCell(CellAccessOptions writeAheadLogOptions, long cellId, byte* buff, int offset, int cellSize, ushort cellType) { TrinityErrorCode eResult= CLocalMemoryStorage.CAddCell(cellId, buff + offset, cellSize, cellType, writeAheadLogOptions); return eResult; } /// <summary> /// Updates an existing cell in the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <returns>true if updating succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode UpdateCell(CellAccessOptions writeAheadLogOptions, long cellId, byte* buff, int offset, int cellSize) { TrinityErrorCode eResult= CLocalMemoryStorage.CUpdateCell(cellId, buff + offset, cellSize, writeAheadLogOptions); return eResult; } /// <summary> /// Updates an existing cell in the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="cellSize">The size of the cell.</param> /// <returns>true if updating succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode UpdateCell(CellAccessOptions writeAheadLogOptions, long cellId, byte* buff, int cellSize) { TrinityErrorCode eResult= CLocalMemoryStorage.CUpdateCell(cellId, buff, cellSize, writeAheadLogOptions); return eResult; } /// <summary> /// Updates an existing cell in the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <returns>true if updating succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode UpdateCell(CellAccessOptions writeAheadLogOptions, long cellId, byte[] buff) { fixed (byte* p = buff) { TrinityErrorCode eResult= CLocalMemoryStorage.CUpdateCell(cellId, p, buff.Length, writeAheadLogOptions); return eResult; } } /// <summary> /// Updates an existing cell in the Trinity key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <param name="buff">A memory buffer that contains the cell content.</param> /// <param name="offset">The byte offset into the buff.</param> /// <param name="cellSize">The size of the cell.</param> /// <returns>true if updating succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode UpdateCell(CellAccessOptions writeAheadLogOptions, long cellId, byte[] buff, int offset, int cellSize) { fixed (byte* p = buff) { TrinityErrorCode eResult= CLocalMemoryStorage.CUpdateCell(cellId, p + offset, cellSize, writeAheadLogOptions); return eResult; } } /// <summary> /// Removes the cell with the specified cell Id from the key-value store. /// </summary> /// <param name="writeAheadLogOptions">Specifies write-ahead logging behavior. Valid values are CellAccessOptions.StrongLogAhead(default) and CellAccessOptions.WeakLogAhead. Other values are ignored.</param> /// <param name="cellId">A 64-bit cell Id.</param> /// <returns>true if removing succeeds; otherwise, false.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public TrinityErrorCode RemoveCell(CellAccessOptions writeAheadLogOptions, long cellId) { TrinityErrorCode eResult= CLocalMemoryStorage.CRemoveCell(cellId, writeAheadLogOptions); return eResult; } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Text; using System.Diagnostics.CodeAnalysis; using System.Threading; using System.Threading.Tasks; using System.Diagnostics; using System.Diagnostics.Contracts; namespace System.IO { // This abstract base class represents a reader that can read a sequential // stream of characters. This is not intended for reading bytes - // there are methods on the Stream class to read bytes. // A subclass must minimally implement the Peek() and Read() methods. // // This class is intended for character input, not bytes. // There are methods on the Stream class for reading bytes. public abstract class TextReader : IDisposable { public static readonly TextReader Null = new NullTextReader(); protected TextReader() { } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } protected virtual void Dispose(bool disposing) { } // Returns the next available character without actually reading it from // the input stream. The current position of the TextReader is not changed by // this operation. The returned value is -1 if no further characters are // available. // // This default method simply returns -1. // [Pure] public virtual int Peek() { return -1; } // Reads the next character from the input stream. The returned value is // -1 if no further characters are available. // // This default method simply returns -1. // public virtual int Read() { return -1; } // Reads a block of characters. This method will read up to // count characters from this TextReader into the // buffer character array starting at position // index. Returns the actual number of characters read. // public virtual int Read(char[] buffer, int index, int count) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer); } if (index < 0) { throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_NeedNonNegNum); } if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_NeedNonNegNum); } if (buffer.Length - index < count) { throw new ArgumentException(SR.Argument_InvalidOffLen); } int n = 0; do { int ch = Read(); if (ch == -1) { break; } buffer[index + n++] = (char)ch; } while (n < count); return n; } // Reads all characters from the current position to the end of the // TextReader, and returns them as one string. public virtual string ReadToEnd() { char[] chars = new char[4096]; int len; StringBuilder sb = new StringBuilder(4096); while ((len = Read(chars, 0, chars.Length)) != 0) { sb.Append(chars, 0, len); } return sb.ToString(); } // Blocking version of read. Returns only when count // characters have been read or the end of the file was reached. // public virtual int ReadBlock(char[] buffer, int index, int count) { int i, n = 0; do { n += (i = Read(buffer, index + n, count - n)); } while (i > 0 && n < count); return n; } // Reads a line. A line is defined as a sequence of characters followed by // a carriage return ('\r'), a line feed ('\n'), or a carriage return // immediately followed by a line feed. The resulting string does not // contain the terminating carriage return and/or line feed. The returned // value is null if the end of the input stream has been reached. // public virtual string ReadLine() { StringBuilder sb = new StringBuilder(); while (true) { int ch = Read(); if (ch == -1) break; if (ch == '\r' || ch == '\n') { if (ch == '\r' && Peek() == '\n') { Read(); } return sb.ToString(); } sb.Append((char)ch); } if (sb.Length > 0) { return sb.ToString(); } return null; } #region Task based Async APIs public virtual Task<string> ReadLineAsync() { return Task<String>.Factory.StartNew(state => { return ((TextReader)state).ReadLine(); }, this, CancellationToken.None, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default); } public async virtual Task<string> ReadToEndAsync() { char[] chars = new char[4096]; int len; StringBuilder sb = new StringBuilder(4096); while ((len = await ReadAsyncInternal(chars, 0, chars.Length).ConfigureAwait(false)) != 0) { sb.Append(chars, 0, len); } return sb.ToString(); } public virtual Task<int> ReadAsync(char[] buffer, int index, int count) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer); } if (index < 0 || count < 0) { throw new ArgumentOutOfRangeException((index < 0 ? "index" : "count"), SR.ArgumentOutOfRange_NeedNonNegNum); } if (buffer.Length - index < count) { throw new ArgumentException(SR.Argument_InvalidOffLen); } return ReadAsyncInternal(buffer, index, count); } internal virtual Task<int> ReadAsyncInternal(char[] buffer, int index, int count) { Debug.Assert(buffer != null); Debug.Assert(index >= 0); Debug.Assert(count >= 0); Debug.Assert(buffer.Length - index >= count); var tuple = new Tuple<TextReader, char[], int, int>(this, buffer, index, count); return Task<int>.Factory.StartNew(state => { var t = (Tuple<TextReader, char[], int, int>)state; return t.Item1.Read(t.Item2, t.Item3, t.Item4); }, tuple, CancellationToken.None, TaskCreationOptions.DenyChildAttach, TaskScheduler.Default); } public virtual Task<int> ReadBlockAsync(char[] buffer, int index, int count) { if (buffer == null) { throw new ArgumentNullException(nameof(buffer), SR.ArgumentNull_Buffer); } if (index < 0 || count < 0) { throw new ArgumentOutOfRangeException((index < 0 ? "index" : "count"), SR.ArgumentOutOfRange_NeedNonNegNum); } if (buffer.Length - index < count) { throw new ArgumentException(SR.Argument_InvalidOffLen); } return ReadBlockAsyncInternal(buffer, index, count); } private async Task<int> ReadBlockAsyncInternal(char[] buffer, int index, int count) { Debug.Assert(buffer != null); Debug.Assert(index >= 0); Debug.Assert(count >= 0); Debug.Assert(buffer.Length - index >= count); int i, n = 0; do { i = await ReadAsyncInternal(buffer, index + n, count - n).ConfigureAwait(false); n += i; } while (i > 0 && n < count); return n; } #endregion private sealed class NullTextReader : TextReader { public NullTextReader() { } [SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems. public override int Read(char[] buffer, int index, int count) { return 0; } public override string ReadLine() { return null; } } } }
namespace ATABBI.TexE { partial class FindReplaceWindow { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(FindReplaceWindow)); this.tabAll = new System.Windows.Forms.TabControl(); this.tpgFind = new System.Windows.Forms.TabPage(); this.grpFindAll = new System.Windows.Forms.GroupBox(); this.btnClear = new System.Windows.Forms.Button(); this.btnFindAll = new System.Windows.Forms.Button(); this.chkHighlightMatches = new System.Windows.Forms.CheckBox(); this.chkMarkLine = new System.Windows.Forms.CheckBox(); this.chkSearchSelectionF = new System.Windows.Forms.CheckBox(); this.chkWrapF = new System.Windows.Forms.CheckBox(); this.btnFindPrevious = new System.Windows.Forms.Button(); this.btnFindNext = new System.Windows.Forms.Button(); this.cboFindF = new System.Windows.Forms.ComboBox(); this.grpOptionsF = new System.Windows.Forms.GroupBox(); this.pnlStandardOptionsF = new System.Windows.Forms.Panel(); this.chkWordStartF = new System.Windows.Forms.CheckBox(); this.chkWholeWordF = new System.Windows.Forms.CheckBox(); this.chkMatchCaseF = new System.Windows.Forms.CheckBox(); this.pnlRegexpOptionsF = new System.Windows.Forms.Panel(); this.chkSinglelineF = new System.Windows.Forms.CheckBox(); this.chkRightToLeftF = new System.Windows.Forms.CheckBox(); this.chkMultilineF = new System.Windows.Forms.CheckBox(); this.chkIgnorePatternWhitespaceF = new System.Windows.Forms.CheckBox(); this.chkIgnoreCaseF = new System.Windows.Forms.CheckBox(); this.chkExplicitCaptureF = new System.Windows.Forms.CheckBox(); this.chkEcmaScriptF = new System.Windows.Forms.CheckBox(); this.chkCultureInvariantF = new System.Windows.Forms.CheckBox(); this.chkCompiledF = new System.Windows.Forms.CheckBox(); this.lblSearchTypeF = new System.Windows.Forms.Label(); this.rdoRegexF = new System.Windows.Forms.RadioButton(); this.rdoStandardF = new System.Windows.Forms.RadioButton(); this.lblFindF = new System.Windows.Forms.Label(); this.tpgReplace = new System.Windows.Forms.TabPage(); this.btnReplaceAll = new System.Windows.Forms.Button(); this.cboReplace = new System.Windows.Forms.ComboBox(); this.lblReplace = new System.Windows.Forms.Label(); this.chkSearchSelectionR = new System.Windows.Forms.CheckBox(); this.chkWrapR = new System.Windows.Forms.CheckBox(); this.btnReplacePrevious = new System.Windows.Forms.Button(); this.btnReplaceNext = new System.Windows.Forms.Button(); this.cboFindR = new System.Windows.Forms.ComboBox(); this.grdOptionsR = new System.Windows.Forms.GroupBox(); this.pnlStandardOptionsR = new System.Windows.Forms.Panel(); this.chkWordStartR = new System.Windows.Forms.CheckBox(); this.chkWholeWordR = new System.Windows.Forms.CheckBox(); this.chkMatchCaseR = new System.Windows.Forms.CheckBox(); this.pnlRegexpOptionsR = new System.Windows.Forms.Panel(); this.chkSinglelineR = new System.Windows.Forms.CheckBox(); this.chkRightToLeftR = new System.Windows.Forms.CheckBox(); this.chkMultilineR = new System.Windows.Forms.CheckBox(); this.chkIgnorePatternWhitespaceR = new System.Windows.Forms.CheckBox(); this.chkIgnoreCaseR = new System.Windows.Forms.CheckBox(); this.chkExplicitCaptureR = new System.Windows.Forms.CheckBox(); this.chkEcmaScriptR = new System.Windows.Forms.CheckBox(); this.chkCultureInvariantR = new System.Windows.Forms.CheckBox(); this.chkCompiledR = new System.Windows.Forms.CheckBox(); this.lblSearchTypeR = new System.Windows.Forms.Label(); this.rdoRegexR = new System.Windows.Forms.RadioButton(); this.rdoStandardR = new System.Windows.Forms.RadioButton(); this.lblFindR = new System.Windows.Forms.Label(); this.tabAll.SuspendLayout(); this.tpgFind.SuspendLayout(); this.grpFindAll.SuspendLayout(); this.grpOptionsF.SuspendLayout(); this.pnlStandardOptionsF.SuspendLayout(); this.pnlRegexpOptionsF.SuspendLayout(); this.tpgReplace.SuspendLayout(); this.grdOptionsR.SuspendLayout(); this.pnlStandardOptionsR.SuspendLayout(); this.pnlRegexpOptionsR.SuspendLayout(); this.SuspendLayout(); // // tabAll // this.tabAll.Controls.Add(this.tpgFind); this.tabAll.Controls.Add(this.tpgReplace); this.tabAll.Dock = System.Windows.Forms.DockStyle.Fill; this.tabAll.Location = new System.Drawing.Point(0, 3); this.tabAll.Name = "tabAll"; this.tabAll.SelectedIndex = 0; this.tabAll.Size = new System.Drawing.Size(509, 298); this.tabAll.TabIndex = 6; // // tpgFind // this.tpgFind.Controls.Add(this.grpFindAll); this.tpgFind.Controls.Add(this.chkSearchSelectionF); this.tpgFind.Controls.Add(this.chkWrapF); this.tpgFind.Controls.Add(this.btnFindPrevious); this.tpgFind.Controls.Add(this.btnFindNext); this.tpgFind.Controls.Add(this.cboFindF); this.tpgFind.Controls.Add(this.grpOptionsF); this.tpgFind.Controls.Add(this.lblSearchTypeF); this.tpgFind.Controls.Add(this.rdoRegexF); this.tpgFind.Controls.Add(this.rdoStandardF); this.tpgFind.Controls.Add(this.lblFindF); this.tpgFind.Location = new System.Drawing.Point(4, 22); this.tpgFind.Name = "tpgFind"; this.tpgFind.Padding = new System.Windows.Forms.Padding(3); this.tpgFind.Size = new System.Drawing.Size(501, 272); this.tpgFind.TabIndex = 0; this.tpgFind.Text = "Find"; this.tpgFind.UseVisualStyleBackColor = true; // // grpFindAll // this.grpFindAll.Controls.Add(this.btnClear); this.grpFindAll.Controls.Add(this.btnFindAll); this.grpFindAll.Controls.Add(this.chkHighlightMatches); this.grpFindAll.Controls.Add(this.chkMarkLine); this.grpFindAll.Location = new System.Drawing.Point(5, 176); this.grpFindAll.Name = "grpFindAll"; this.grpFindAll.Size = new System.Drawing.Size(209, 65); this.grpFindAll.TabIndex = 8; this.grpFindAll.TabStop = false; this.grpFindAll.Text = "Find All"; // // btnClear // this.btnClear.Location = new System.Drawing.Point(116, 37); this.btnClear.Name = "btnClear"; this.btnClear.Size = new System.Drawing.Size(88, 23); this.btnClear.TabIndex = 3; this.btnClear.Text = "C&lear"; this.btnClear.UseVisualStyleBackColor = true; this.btnClear.Click += new System.EventHandler(this.btnClear_Click); // // btnFindAll // this.btnFindAll.Location = new System.Drawing.Point(116, 13); this.btnFindAll.Name = "btnFindAll"; this.btnFindAll.Size = new System.Drawing.Size(88, 23); this.btnFindAll.TabIndex = 2; this.btnFindAll.Text = "Find &All"; this.btnFindAll.UseVisualStyleBackColor = true; this.btnFindAll.Click += new System.EventHandler(this.btnFindAll_Click); // // chkHighlightMatches // this.chkHighlightMatches.AutoSize = true; this.chkHighlightMatches.Location = new System.Drawing.Point(6, 37); this.chkHighlightMatches.Name = "chkHighlightMatches"; this.chkHighlightMatches.Size = new System.Drawing.Size(110, 17); this.chkHighlightMatches.TabIndex = 1; this.chkHighlightMatches.Text = "&Highlight Matches"; this.chkHighlightMatches.UseVisualStyleBackColor = true; // // chkMarkLine // this.chkMarkLine.AutoSize = true; this.chkMarkLine.Location = new System.Drawing.Point(6, 20); this.chkMarkLine.Name = "chkMarkLine"; this.chkMarkLine.Size = new System.Drawing.Size(71, 17); this.chkMarkLine.TabIndex = 0; this.chkMarkLine.Text = "&Mark Line"; this.chkMarkLine.UseVisualStyleBackColor = true; // // chkSearchSelectionF // this.chkSearchSelectionF.AutoSize = true; this.chkSearchSelectionF.Location = new System.Drawing.Point(251, 72); this.chkSearchSelectionF.Name = "chkSearchSelectionF"; this.chkSearchSelectionF.Size = new System.Drawing.Size(105, 17); this.chkSearchSelectionF.TabIndex = 6; this.chkSearchSelectionF.Text = "Search Selection"; this.chkSearchSelectionF.UseVisualStyleBackColor = true; // // chkWrapF // this.chkWrapF.AutoSize = true; this.chkWrapF.Checked = true; this.chkWrapF.CheckState = System.Windows.Forms.CheckState.Checked; this.chkWrapF.Location = new System.Drawing.Point(251, 55); this.chkWrapF.Name = "chkWrapF"; this.chkWrapF.Size = new System.Drawing.Size(52, 17); this.chkWrapF.TabIndex = 5; this.chkWrapF.Text = "&Wrap"; this.chkWrapF.UseVisualStyleBackColor = true; // // btnFindPrevious // this.btnFindPrevious.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); this.btnFindPrevious.Enabled = false; this.btnFindPrevious.Location = new System.Drawing.Point(385, 183); this.btnFindPrevious.Name = "btnFindPrevious"; this.btnFindPrevious.Size = new System.Drawing.Size(107, 23); this.btnFindPrevious.TabIndex = 9; this.btnFindPrevious.Text = "Find &Previous"; this.btnFindPrevious.UseVisualStyleBackColor = true; // // btnFindNext // this.btnFindNext.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); this.btnFindNext.Location = new System.Drawing.Point(385, 212); this.btnFindNext.Name = "btnFindNext"; this.btnFindNext.Size = new System.Drawing.Size(107, 23); this.btnFindNext.TabIndex = 10; this.btnFindNext.Text = "Find &Next"; this.btnFindNext.UseVisualStyleBackColor = true; this.btnFindNext.Click += new System.EventHandler(this.btnFindNext_Click); // // cboFindF // this.cboFindF.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.cboFindF.FormattingEnabled = true; this.cboFindF.Location = new System.Drawing.Point(59, 6); this.cboFindF.Name = "cboFindF"; this.cboFindF.Size = new System.Drawing.Size(434, 21); this.cboFindF.TabIndex = 1; this.cboFindF.TextChanged += new System.EventHandler(this.cboFindF_TextChanged); this.cboFindF.KeyUp += new System.Windows.Forms.KeyEventHandler(this.cboFindF_KeyUp); // // grpOptionsF // this.grpOptionsF.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.grpOptionsF.Controls.Add(this.pnlStandardOptionsF); this.grpOptionsF.Controls.Add(this.pnlRegexpOptionsF); this.grpOptionsF.Location = new System.Drawing.Point(4, 94); this.grpOptionsF.Name = "grpOptionsF"; this.grpOptionsF.Size = new System.Drawing.Size(491, 77); this.grpOptionsF.TabIndex = 7; this.grpOptionsF.TabStop = false; this.grpOptionsF.Text = "Options"; // // pnlStandardOptionsF // this.pnlStandardOptionsF.Controls.Add(this.chkWordStartF); this.pnlStandardOptionsF.Controls.Add(this.chkWholeWordF); this.pnlStandardOptionsF.Controls.Add(this.chkMatchCaseF); this.pnlStandardOptionsF.Dock = System.Windows.Forms.DockStyle.Fill; this.pnlStandardOptionsF.Location = new System.Drawing.Point(3, 16); this.pnlStandardOptionsF.Name = "pnlStandardOptionsF"; this.pnlStandardOptionsF.Size = new System.Drawing.Size(485, 58); this.pnlStandardOptionsF.TabIndex = 0; // // chkWordStartF // this.chkWordStartF.AutoSize = true; this.chkWordStartF.Location = new System.Drawing.Point(10, 37); this.chkWordStartF.Name = "chkWordStartF"; this.chkWordStartF.Size = new System.Drawing.Size(79, 17); this.chkWordStartF.TabIndex = 2; this.chkWordStartF.Text = "W&ord Start"; this.chkWordStartF.UseVisualStyleBackColor = true; // // chkWholeWordF // this.chkWholeWordF.AutoSize = true; this.chkWholeWordF.Location = new System.Drawing.Point(10, 20); this.chkWholeWordF.Name = "chkWholeWordF"; this.chkWholeWordF.Size = new System.Drawing.Size(85, 17); this.chkWholeWordF.TabIndex = 1; this.chkWholeWordF.Text = "Whole Wor&d"; this.chkWholeWordF.UseVisualStyleBackColor = true; // // chkMatchCaseF // this.chkMatchCaseF.AutoSize = true; this.chkMatchCaseF.Location = new System.Drawing.Point(10, 3); this.chkMatchCaseF.Name = "chkMatchCaseF"; this.chkMatchCaseF.Size = new System.Drawing.Size(82, 17); this.chkMatchCaseF.TabIndex = 0; this.chkMatchCaseF.Text = "Match &Case"; this.chkMatchCaseF.UseVisualStyleBackColor = true; // // pnlRegexpOptionsF // this.pnlRegexpOptionsF.Controls.Add(this.chkSinglelineF); this.pnlRegexpOptionsF.Controls.Add(this.chkRightToLeftF); this.pnlRegexpOptionsF.Controls.Add(this.chkMultilineF); this.pnlRegexpOptionsF.Controls.Add(this.chkIgnorePatternWhitespaceF); this.pnlRegexpOptionsF.Controls.Add(this.chkIgnoreCaseF); this.pnlRegexpOptionsF.Controls.Add(this.chkExplicitCaptureF); this.pnlRegexpOptionsF.Controls.Add(this.chkEcmaScriptF); this.pnlRegexpOptionsF.Controls.Add(this.chkCultureInvariantF); this.pnlRegexpOptionsF.Controls.Add(this.chkCompiledF); this.pnlRegexpOptionsF.Dock = System.Windows.Forms.DockStyle.Fill; this.pnlRegexpOptionsF.Location = new System.Drawing.Point(3, 16); this.pnlRegexpOptionsF.Name = "pnlRegexpOptionsF"; this.pnlRegexpOptionsF.Size = new System.Drawing.Size(485, 58); this.pnlRegexpOptionsF.TabIndex = 1; // // chkSinglelineF // this.chkSinglelineF.AutoSize = true; this.chkSinglelineF.Location = new System.Drawing.Point(279, 37); this.chkSinglelineF.Name = "chkSinglelineF"; this.chkSinglelineF.Size = new System.Drawing.Size(70, 17); this.chkSinglelineF.TabIndex = 8; this.chkSinglelineF.Text = "Singleline"; this.chkSinglelineF.UseVisualStyleBackColor = true; // // chkRightToLeftF // this.chkRightToLeftF.AutoSize = true; this.chkRightToLeftF.Location = new System.Drawing.Point(279, 20); this.chkRightToLeftF.Name = "chkRightToLeftF"; this.chkRightToLeftF.Size = new System.Drawing.Size(88, 17); this.chkRightToLeftF.TabIndex = 5; this.chkRightToLeftF.Text = "Right To Left"; this.chkRightToLeftF.UseVisualStyleBackColor = true; // // chkMultilineF // this.chkMultilineF.AutoSize = true; this.chkMultilineF.Location = new System.Drawing.Point(279, 3); this.chkMultilineF.Name = "chkMultilineF"; this.chkMultilineF.Size = new System.Drawing.Size(64, 17); this.chkMultilineF.TabIndex = 2; this.chkMultilineF.Text = "Multiline"; this.chkMultilineF.UseVisualStyleBackColor = true; // // chkIgnorePatternWhitespaceF // this.chkIgnorePatternWhitespaceF.AutoSize = true; this.chkIgnorePatternWhitespaceF.Location = new System.Drawing.Point(113, 37); this.chkIgnorePatternWhitespaceF.Name = "chkIgnorePatternWhitespaceF"; this.chkIgnorePatternWhitespaceF.Size = new System.Drawing.Size(156, 17); this.chkIgnorePatternWhitespaceF.TabIndex = 7; this.chkIgnorePatternWhitespaceF.Text = "I&gnore Pattern Whitespace"; this.chkIgnorePatternWhitespaceF.UseVisualStyleBackColor = true; // // chkIgnoreCaseF // this.chkIgnoreCaseF.AutoSize = true; this.chkIgnoreCaseF.Location = new System.Drawing.Point(113, 20); this.chkIgnoreCaseF.Name = "chkIgnoreCaseF"; this.chkIgnoreCaseF.Size = new System.Drawing.Size(85, 17); this.chkIgnoreCaseF.TabIndex = 4; this.chkIgnoreCaseF.Text = "&Ignore Case"; this.chkIgnoreCaseF.UseVisualStyleBackColor = true; // // chkExplicitCaptureF // this.chkExplicitCaptureF.AutoSize = true; this.chkExplicitCaptureF.Location = new System.Drawing.Point(113, 3); this.chkExplicitCaptureF.Name = "chkExplicitCaptureF"; this.chkExplicitCaptureF.Size = new System.Drawing.Size(101, 17); this.chkExplicitCaptureF.TabIndex = 1; this.chkExplicitCaptureF.Text = "E&xplicit Capture"; this.chkExplicitCaptureF.UseVisualStyleBackColor = true; // // chkEcmaScriptF // this.chkEcmaScriptF.AutoSize = true; this.chkEcmaScriptF.Location = new System.Drawing.Point(3, 37); this.chkEcmaScriptF.Name = "chkEcmaScriptF"; this.chkEcmaScriptF.Size = new System.Drawing.Size(84, 17); this.chkEcmaScriptF.TabIndex = 6; this.chkEcmaScriptF.Text = "ECMA Script"; this.chkEcmaScriptF.UseVisualStyleBackColor = true; // // chkCultureInvariantF // this.chkCultureInvariantF.AutoSize = true; this.chkCultureInvariantF.Location = new System.Drawing.Point(3, 20); this.chkCultureInvariantF.Name = "chkCultureInvariantF"; this.chkCultureInvariantF.Size = new System.Drawing.Size(108, 17); this.chkCultureInvariantF.TabIndex = 3; this.chkCultureInvariantF.Text = "C&ulture Invariant"; this.chkCultureInvariantF.UseVisualStyleBackColor = true; // // chkCompiledF // this.chkCompiledF.AutoSize = true; this.chkCompiledF.Location = new System.Drawing.Point(3, 3); this.chkCompiledF.Name = "chkCompiledF"; this.chkCompiledF.Size = new System.Drawing.Size(69, 17); this.chkCompiledF.TabIndex = 0; this.chkCompiledF.Text = "&Compiled"; this.chkCompiledF.UseVisualStyleBackColor = true; // // lblSearchTypeF // this.lblSearchTypeF.AutoSize = true; this.lblSearchTypeF.Location = new System.Drawing.Point(8, 52); this.lblSearchTypeF.Name = "lblSearchTypeF"; this.lblSearchTypeF.Size = new System.Drawing.Size(67, 13); this.lblSearchTypeF.TabIndex = 2; this.lblSearchTypeF.Text = "Search Type"; // // rdoRegexF // this.rdoRegexF.AutoSize = true; this.rdoRegexF.Location = new System.Drawing.Point(102, 71); this.rdoRegexF.Name = "rdoRegexF"; this.rdoRegexF.Size = new System.Drawing.Size(117, 17); this.rdoRegexF.TabIndex = 4; this.rdoRegexF.Text = "Regular &Expression"; this.rdoRegexF.UseVisualStyleBackColor = true; // // rdoStandardF // this.rdoStandardF.AutoSize = true; this.rdoStandardF.Checked = true; this.rdoStandardF.Location = new System.Drawing.Point(27, 71); this.rdoStandardF.Name = "rdoStandardF"; this.rdoStandardF.Size = new System.Drawing.Size(69, 17); this.rdoStandardF.TabIndex = 3; this.rdoStandardF.TabStop = true; this.rdoStandardF.Text = "&Standard"; this.rdoStandardF.UseVisualStyleBackColor = true; // // lblFindF // this.lblFindF.AutoSize = true; this.lblFindF.Location = new System.Drawing.Point(8, 10); this.lblFindF.Name = "lblFindF"; this.lblFindF.Size = new System.Drawing.Size(27, 13); this.lblFindF.TabIndex = 0; this.lblFindF.Text = "&Find"; // // tpgReplace // this.tpgReplace.Controls.Add(this.btnReplaceAll); this.tpgReplace.Controls.Add(this.cboReplace); this.tpgReplace.Controls.Add(this.lblReplace); this.tpgReplace.Controls.Add(this.chkSearchSelectionR); this.tpgReplace.Controls.Add(this.chkWrapR); this.tpgReplace.Controls.Add(this.btnReplacePrevious); this.tpgReplace.Controls.Add(this.btnReplaceNext); this.tpgReplace.Controls.Add(this.cboFindR); this.tpgReplace.Controls.Add(this.grdOptionsR); this.tpgReplace.Controls.Add(this.lblSearchTypeR); this.tpgReplace.Controls.Add(this.rdoRegexR); this.tpgReplace.Controls.Add(this.rdoStandardR); this.tpgReplace.Controls.Add(this.lblFindR); this.tpgReplace.Location = new System.Drawing.Point(4, 22); this.tpgReplace.Name = "tpgReplace"; this.tpgReplace.Padding = new System.Windows.Forms.Padding(3); this.tpgReplace.Size = new System.Drawing.Size(501, 272); this.tpgReplace.TabIndex = 1; this.tpgReplace.Text = "Replace"; this.tpgReplace.UseVisualStyleBackColor = true; // // btnReplaceAll // this.btnReplaceAll.Enabled = false; this.btnReplaceAll.Location = new System.Drawing.Point(7, 183); this.btnReplaceAll.Name = "btnReplaceAll"; this.btnReplaceAll.Size = new System.Drawing.Size(107, 23); this.btnReplaceAll.TabIndex = 10; this.btnReplaceAll.Text = "Replace &All"; this.btnReplaceAll.UseVisualStyleBackColor = true; this.btnReplaceAll.Click += new System.EventHandler(this.btnReplaceAll_Click); // // cboReplace // this.cboReplace.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.cboReplace.FormattingEnabled = true; this.cboReplace.Location = new System.Drawing.Point(59, 28); this.cboReplace.Name = "cboReplace"; this.cboReplace.Size = new System.Drawing.Size(471, 21); this.cboReplace.TabIndex = 3; // // lblReplace // this.lblReplace.AutoSize = true; this.lblReplace.Location = new System.Drawing.Point(8, 32); this.lblReplace.Name = "lblReplace"; this.lblReplace.Size = new System.Drawing.Size(45, 13); this.lblReplace.TabIndex = 2; this.lblReplace.Text = "&Replace"; // // chkSearchSelectionR // this.chkSearchSelectionR.AutoSize = true; this.chkSearchSelectionR.Location = new System.Drawing.Point(251, 72); this.chkSearchSelectionR.Name = "chkSearchSelectionR"; this.chkSearchSelectionR.Size = new System.Drawing.Size(105, 17); this.chkSearchSelectionR.TabIndex = 8; this.chkSearchSelectionR.Text = "Search Selection"; this.chkSearchSelectionR.UseVisualStyleBackColor = true; // // chkWrapR // this.chkWrapR.AutoSize = true; this.chkWrapR.Checked = true; this.chkWrapR.CheckState = System.Windows.Forms.CheckState.Checked; this.chkWrapR.Location = new System.Drawing.Point(251, 55); this.chkWrapR.Name = "chkWrapR"; this.chkWrapR.Size = new System.Drawing.Size(52, 17); this.chkWrapR.TabIndex = 7; this.chkWrapR.Text = "&Wrap"; this.chkWrapR.UseVisualStyleBackColor = true; // // btnReplacePrevious // this.btnReplacePrevious.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); this.btnReplacePrevious.Enabled = false; this.btnReplacePrevious.Location = new System.Drawing.Point(368, 183); this.btnReplacePrevious.Name = "btnReplacePrevious"; this.btnReplacePrevious.Size = new System.Drawing.Size(107, 23); this.btnReplacePrevious.TabIndex = 11; this.btnReplacePrevious.Text = "Replace &Previous"; this.btnReplacePrevious.UseVisualStyleBackColor = true; // // btnReplaceNext // this.btnReplaceNext.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right))); this.btnReplaceNext.Location = new System.Drawing.Point(368, 212); this.btnReplaceNext.Name = "btnReplaceNext"; this.btnReplaceNext.Size = new System.Drawing.Size(107, 23); this.btnReplaceNext.TabIndex = 12; this.btnReplaceNext.Text = "Replace &Next"; this.btnReplaceNext.UseVisualStyleBackColor = true; this.btnReplaceNext.Click += new System.EventHandler(this.btnReplaceNext_Click); // // cboFindR // this.cboFindR.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.cboFindR.FormattingEnabled = true; this.cboFindR.Location = new System.Drawing.Point(59, 6); this.cboFindR.Name = "cboFindR"; this.cboFindR.Size = new System.Drawing.Size(471, 21); this.cboFindR.TabIndex = 1; // // grdOptionsR // this.grdOptionsR.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.grdOptionsR.Controls.Add(this.pnlStandardOptionsR); this.grdOptionsR.Controls.Add(this.pnlRegexpOptionsR); this.grdOptionsR.Location = new System.Drawing.Point(4, 94); this.grdOptionsR.Name = "grdOptionsR"; this.grdOptionsR.Size = new System.Drawing.Size(528, 77); this.grdOptionsR.TabIndex = 9; this.grdOptionsR.TabStop = false; this.grdOptionsR.Text = "Options"; // // pnlStandardOptionsR // this.pnlStandardOptionsR.Controls.Add(this.chkWordStartR); this.pnlStandardOptionsR.Controls.Add(this.chkWholeWordR); this.pnlStandardOptionsR.Controls.Add(this.chkMatchCaseR); this.pnlStandardOptionsR.Dock = System.Windows.Forms.DockStyle.Fill; this.pnlStandardOptionsR.Location = new System.Drawing.Point(3, 16); this.pnlStandardOptionsR.Name = "pnlStandardOptionsR"; this.pnlStandardOptionsR.Size = new System.Drawing.Size(522, 58); this.pnlStandardOptionsR.TabIndex = 0; // // chkWordStartR // this.chkWordStartR.AutoSize = true; this.chkWordStartR.Location = new System.Drawing.Point(10, 37); this.chkWordStartR.Name = "chkWordStartR"; this.chkWordStartR.Size = new System.Drawing.Size(79, 17); this.chkWordStartR.TabIndex = 2; this.chkWordStartR.Text = "W&ord Start"; this.chkWordStartR.UseVisualStyleBackColor = true; // // chkWholeWordR // this.chkWholeWordR.AutoSize = true; this.chkWholeWordR.Location = new System.Drawing.Point(10, 20); this.chkWholeWordR.Name = "chkWholeWordR"; this.chkWholeWordR.Size = new System.Drawing.Size(85, 17); this.chkWholeWordR.TabIndex = 1; this.chkWholeWordR.Text = "Whole Wor&d"; this.chkWholeWordR.UseVisualStyleBackColor = true; // // chkMatchCaseR // this.chkMatchCaseR.AutoSize = true; this.chkMatchCaseR.Location = new System.Drawing.Point(10, 3); this.chkMatchCaseR.Name = "chkMatchCaseR"; this.chkMatchCaseR.Size = new System.Drawing.Size(82, 17); this.chkMatchCaseR.TabIndex = 0; this.chkMatchCaseR.Text = "Match &Case"; this.chkMatchCaseR.UseVisualStyleBackColor = true; // // pnlRegexpOptionsR // this.pnlRegexpOptionsR.Controls.Add(this.chkSinglelineR); this.pnlRegexpOptionsR.Controls.Add(this.chkRightToLeftR); this.pnlRegexpOptionsR.Controls.Add(this.chkMultilineR); this.pnlRegexpOptionsR.Controls.Add(this.chkIgnorePatternWhitespaceR); this.pnlRegexpOptionsR.Controls.Add(this.chkIgnoreCaseR); this.pnlRegexpOptionsR.Controls.Add(this.chkExplicitCaptureR); this.pnlRegexpOptionsR.Controls.Add(this.chkEcmaScriptR); this.pnlRegexpOptionsR.Controls.Add(this.chkCultureInvariantR); this.pnlRegexpOptionsR.Controls.Add(this.chkCompiledR); this.pnlRegexpOptionsR.Dock = System.Windows.Forms.DockStyle.Fill; this.pnlRegexpOptionsR.Location = new System.Drawing.Point(3, 16); this.pnlRegexpOptionsR.Name = "pnlRegexpOptionsR"; this.pnlRegexpOptionsR.Size = new System.Drawing.Size(522, 58); this.pnlRegexpOptionsR.TabIndex = 1; // // chkSinglelineR // this.chkSinglelineR.AutoSize = true; this.chkSinglelineR.Location = new System.Drawing.Point(279, 37); this.chkSinglelineR.Name = "chkSinglelineR"; this.chkSinglelineR.Size = new System.Drawing.Size(70, 17); this.chkSinglelineR.TabIndex = 8; this.chkSinglelineR.Text = "Singleline"; this.chkSinglelineR.UseVisualStyleBackColor = true; // // chkRightToLeftR // this.chkRightToLeftR.AutoSize = true; this.chkRightToLeftR.Location = new System.Drawing.Point(279, 20); this.chkRightToLeftR.Name = "chkRightToLeftR"; this.chkRightToLeftR.Size = new System.Drawing.Size(88, 17); this.chkRightToLeftR.TabIndex = 7; this.chkRightToLeftR.Text = "Right To Left"; this.chkRightToLeftR.UseVisualStyleBackColor = true; // // chkMultilineR // this.chkMultilineR.AutoSize = true; this.chkMultilineR.Location = new System.Drawing.Point(279, 3); this.chkMultilineR.Name = "chkMultilineR"; this.chkMultilineR.Size = new System.Drawing.Size(64, 17); this.chkMultilineR.TabIndex = 6; this.chkMultilineR.Text = "Multiline"; this.chkMultilineR.UseVisualStyleBackColor = true; // // chkIgnorePatternWhitespaceR // this.chkIgnorePatternWhitespaceR.AutoSize = true; this.chkIgnorePatternWhitespaceR.Location = new System.Drawing.Point(113, 37); this.chkIgnorePatternWhitespaceR.Name = "chkIgnorePatternWhitespaceR"; this.chkIgnorePatternWhitespaceR.Size = new System.Drawing.Size(156, 17); this.chkIgnorePatternWhitespaceR.TabIndex = 5; this.chkIgnorePatternWhitespaceR.Text = "I&gnore Pattern Whitespace"; this.chkIgnorePatternWhitespaceR.UseVisualStyleBackColor = true; // // chkIgnoreCaseR // this.chkIgnoreCaseR.AutoSize = true; this.chkIgnoreCaseR.Location = new System.Drawing.Point(113, 20); this.chkIgnoreCaseR.Name = "chkIgnoreCaseR"; this.chkIgnoreCaseR.Size = new System.Drawing.Size(85, 17); this.chkIgnoreCaseR.TabIndex = 4; this.chkIgnoreCaseR.Text = "&Ignore Case"; this.chkIgnoreCaseR.UseVisualStyleBackColor = true; // // chkExplicitCaptureR // this.chkExplicitCaptureR.AutoSize = true; this.chkExplicitCaptureR.Location = new System.Drawing.Point(113, 3); this.chkExplicitCaptureR.Name = "chkExplicitCaptureR"; this.chkExplicitCaptureR.Size = new System.Drawing.Size(101, 17); this.chkExplicitCaptureR.TabIndex = 3; this.chkExplicitCaptureR.Text = "E&xplicit Capture"; this.chkExplicitCaptureR.UseVisualStyleBackColor = true; // // chkEcmaScriptR // this.chkEcmaScriptR.AutoSize = true; this.chkEcmaScriptR.Location = new System.Drawing.Point(3, 37); this.chkEcmaScriptR.Name = "chkEcmaScriptR"; this.chkEcmaScriptR.Size = new System.Drawing.Size(84, 17); this.chkEcmaScriptR.TabIndex = 2; this.chkEcmaScriptR.Text = "ECMA Script"; this.chkEcmaScriptR.UseVisualStyleBackColor = true; // // chkCultureInvariantR // this.chkCultureInvariantR.AutoSize = true; this.chkCultureInvariantR.Location = new System.Drawing.Point(3, 20); this.chkCultureInvariantR.Name = "chkCultureInvariantR"; this.chkCultureInvariantR.Size = new System.Drawing.Size(108, 17); this.chkCultureInvariantR.TabIndex = 1; this.chkCultureInvariantR.Text = "C&ulture Invariant"; this.chkCultureInvariantR.UseVisualStyleBackColor = true; // // chkCompiledR // this.chkCompiledR.AutoSize = true; this.chkCompiledR.Location = new System.Drawing.Point(3, 3); this.chkCompiledR.Name = "chkCompiledR"; this.chkCompiledR.Size = new System.Drawing.Size(69, 17); this.chkCompiledR.TabIndex = 0; this.chkCompiledR.Text = "&Compiled"; this.chkCompiledR.UseVisualStyleBackColor = true; // // lblSearchTypeR // this.lblSearchTypeR.AutoSize = true; this.lblSearchTypeR.Location = new System.Drawing.Point(8, 52); this.lblSearchTypeR.Name = "lblSearchTypeR"; this.lblSearchTypeR.Size = new System.Drawing.Size(67, 13); this.lblSearchTypeR.TabIndex = 4; this.lblSearchTypeR.Text = "Search Type"; // // rdoRegexR // this.rdoRegexR.AutoSize = true; this.rdoRegexR.Location = new System.Drawing.Point(102, 71); this.rdoRegexR.Name = "rdoRegexR"; this.rdoRegexR.Size = new System.Drawing.Size(117, 17); this.rdoRegexR.TabIndex = 6; this.rdoRegexR.Text = "Regular &Expression"; this.rdoRegexR.UseVisualStyleBackColor = true; // // rdoStandardR // this.rdoStandardR.AutoSize = true; this.rdoStandardR.Checked = true; this.rdoStandardR.Location = new System.Drawing.Point(27, 71); this.rdoStandardR.Name = "rdoStandardR"; this.rdoStandardR.Size = new System.Drawing.Size(69, 17); this.rdoStandardR.TabIndex = 5; this.rdoStandardR.TabStop = true; this.rdoStandardR.Text = "&Standard"; this.rdoStandardR.UseVisualStyleBackColor = true; // // lblFindR // this.lblFindR.AutoSize = true; this.lblFindR.Location = new System.Drawing.Point(8, 10); this.lblFindR.Name = "lblFindR"; this.lblFindR.Size = new System.Drawing.Size(27, 13); this.lblFindR.TabIndex = 0; this.lblFindR.Text = "&Find"; // // FindReplaceWindow // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.ClientSize = new System.Drawing.Size(509, 304); this.Controls.Add(this.tabAll); this.HideOnClose = true; this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon"))); this.Name = "FindReplaceWindow"; this.Padding = new System.Windows.Forms.Padding(0, 3, 0, 3); this.ShowHint = WeifenLuo.WinFormsUI.Docking.DockState.Float; this.TabText = "Properties"; this.Text = "Find an Replace"; this.Load += new System.EventHandler(this.PropertyWindow_Load); this.tabAll.ResumeLayout(false); this.tpgFind.ResumeLayout(false); this.tpgFind.PerformLayout(); this.grpFindAll.ResumeLayout(false); this.grpFindAll.PerformLayout(); this.grpOptionsF.ResumeLayout(false); this.pnlStandardOptionsF.ResumeLayout(false); this.pnlStandardOptionsF.PerformLayout(); this.pnlRegexpOptionsF.ResumeLayout(false); this.pnlRegexpOptionsF.PerformLayout(); this.tpgReplace.ResumeLayout(false); this.tpgReplace.PerformLayout(); this.grdOptionsR.ResumeLayout(false); this.pnlStandardOptionsR.ResumeLayout(false); this.pnlStandardOptionsR.PerformLayout(); this.pnlRegexpOptionsR.ResumeLayout(false); this.pnlRegexpOptionsR.PerformLayout(); this.ResumeLayout(false); } #endregion private System.Windows.Forms.MainMenu mainMenu1; private System.Windows.Forms.MenuItem menuItem1; internal System.Windows.Forms.TabControl tabAll; private System.Windows.Forms.TabPage tpgFind; public System.Windows.Forms.GroupBox grpFindAll; private System.Windows.Forms.Button btnClear; private System.Windows.Forms.Button btnFindAll; private System.Windows.Forms.CheckBox chkHighlightMatches; private System.Windows.Forms.CheckBox chkMarkLine; internal System.Windows.Forms.CheckBox chkSearchSelectionF; private System.Windows.Forms.CheckBox chkWrapF; private System.Windows.Forms.Button btnFindPrevious; private System.Windows.Forms.Button btnFindNext; internal System.Windows.Forms.ComboBox cboFindF; private System.Windows.Forms.GroupBox grpOptionsF; private System.Windows.Forms.Panel pnlStandardOptionsF; private System.Windows.Forms.CheckBox chkWordStartF; private System.Windows.Forms.CheckBox chkWholeWordF; private System.Windows.Forms.CheckBox chkMatchCaseF; private System.Windows.Forms.Panel pnlRegexpOptionsF; private System.Windows.Forms.CheckBox chkSinglelineF; private System.Windows.Forms.CheckBox chkRightToLeftF; private System.Windows.Forms.CheckBox chkMultilineF; private System.Windows.Forms.CheckBox chkIgnorePatternWhitespaceF; private System.Windows.Forms.CheckBox chkIgnoreCaseF; private System.Windows.Forms.CheckBox chkExplicitCaptureF; private System.Windows.Forms.CheckBox chkEcmaScriptF; private System.Windows.Forms.CheckBox chkCultureInvariantF; private System.Windows.Forms.CheckBox chkCompiledF; private System.Windows.Forms.Label lblSearchTypeF; private System.Windows.Forms.RadioButton rdoRegexF; private System.Windows.Forms.RadioButton rdoStandardF; private System.Windows.Forms.Label lblFindF; private System.Windows.Forms.TabPage tpgReplace; private System.Windows.Forms.Button btnReplaceAll; private System.Windows.Forms.ComboBox cboReplace; private System.Windows.Forms.Label lblReplace; internal System.Windows.Forms.CheckBox chkSearchSelectionR; private System.Windows.Forms.CheckBox chkWrapR; private System.Windows.Forms.Button btnReplacePrevious; private System.Windows.Forms.Button btnReplaceNext; internal System.Windows.Forms.ComboBox cboFindR; private System.Windows.Forms.GroupBox grdOptionsR; private System.Windows.Forms.Panel pnlStandardOptionsR; private System.Windows.Forms.CheckBox chkWordStartR; private System.Windows.Forms.CheckBox chkWholeWordR; private System.Windows.Forms.CheckBox chkMatchCaseR; private System.Windows.Forms.Panel pnlRegexpOptionsR; private System.Windows.Forms.CheckBox chkSinglelineR; private System.Windows.Forms.CheckBox chkRightToLeftR; private System.Windows.Forms.CheckBox chkMultilineR; private System.Windows.Forms.CheckBox chkIgnorePatternWhitespaceR; private System.Windows.Forms.CheckBox chkIgnoreCaseR; private System.Windows.Forms.CheckBox chkExplicitCaptureR; private System.Windows.Forms.CheckBox chkEcmaScriptR; private System.Windows.Forms.CheckBox chkCultureInvariantR; private System.Windows.Forms.CheckBox chkCompiledR; private System.Windows.Forms.Label lblSearchTypeR; private System.Windows.Forms.RadioButton rdoRegexR; private System.Windows.Forms.RadioButton rdoStandardR; private System.Windows.Forms.Label lblFindR; } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; namespace DalSic { /// <summary> /// Strongly-typed collection for the RisEvaluacionRechazada class. /// </summary> [Serializable] public partial class RisEvaluacionRechazadaCollection : ActiveList<RisEvaluacionRechazada, RisEvaluacionRechazadaCollection> { public RisEvaluacionRechazadaCollection() {} /// <summary> /// Filters an existing collection based on the set criteria. This is an in-memory filter /// Thanks to developingchris for this! /// </summary> /// <returns>RisEvaluacionRechazadaCollection</returns> public RisEvaluacionRechazadaCollection Filter() { for (int i = this.Count - 1; i > -1; i--) { RisEvaluacionRechazada o = this[i]; foreach (SubSonic.Where w in this.wheres) { bool remove = false; System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName); if (pi.CanRead) { object val = pi.GetValue(o, null); switch (w.Comparison) { case SubSonic.Comparison.Equals: if (!val.Equals(w.ParameterValue)) { remove = true; } break; } } if (remove) { this.Remove(o); break; } } } return this; } } /// <summary> /// This is an ActiveRecord class which wraps the RIS_EvaluacionRechazada table. /// </summary> [Serializable] public partial class RisEvaluacionRechazada : ActiveRecord<RisEvaluacionRechazada>, IActiveRecord { #region .ctors and Default Settings public RisEvaluacionRechazada() { SetSQLProps(); InitSetDefaults(); MarkNew(); } private void InitSetDefaults() { SetDefaults(); } public RisEvaluacionRechazada(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) ForceDefaults(); MarkNew(); } public RisEvaluacionRechazada(object keyID) { SetSQLProps(); InitSetDefaults(); LoadByKey(keyID); } public RisEvaluacionRechazada(string columnName, object columnValue) { SetSQLProps(); InitSetDefaults(); LoadByParam(columnName,columnValue); } protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema and Query Accessor public static Query CreateQuery() { return new Query(Schema); } public static TableSchema.Table Schema { get { if (BaseSchema == null) SetSQLProps(); return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("RIS_EvaluacionRechazada", TableType.Table, DataService.GetInstance("sicProvider")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @"dbo"; //columns TableSchema.TableColumn colvarIdEvaluacionRechazada = new TableSchema.TableColumn(schema); colvarIdEvaluacionRechazada.ColumnName = "idEvaluacionRechazada"; colvarIdEvaluacionRechazada.DataType = DbType.Int32; colvarIdEvaluacionRechazada.MaxLength = 0; colvarIdEvaluacionRechazada.AutoIncrement = true; colvarIdEvaluacionRechazada.IsNullable = false; colvarIdEvaluacionRechazada.IsPrimaryKey = true; colvarIdEvaluacionRechazada.IsForeignKey = false; colvarIdEvaluacionRechazada.IsReadOnly = false; colvarIdEvaluacionRechazada.DefaultSetting = @""; colvarIdEvaluacionRechazada.ForeignKeyTableName = ""; schema.Columns.Add(colvarIdEvaluacionRechazada); TableSchema.TableColumn colvarIdEstudio = new TableSchema.TableColumn(schema); colvarIdEstudio.ColumnName = "idEstudio"; colvarIdEstudio.DataType = DbType.Int32; colvarIdEstudio.MaxLength = 0; colvarIdEstudio.AutoIncrement = false; colvarIdEstudio.IsNullable = true; colvarIdEstudio.IsPrimaryKey = false; colvarIdEstudio.IsForeignKey = false; colvarIdEstudio.IsReadOnly = false; colvarIdEstudio.DefaultSetting = @""; colvarIdEstudio.ForeignKeyTableName = ""; schema.Columns.Add(colvarIdEstudio); TableSchema.TableColumn colvarNumeroRegistro = new TableSchema.TableColumn(schema); colvarNumeroRegistro.ColumnName = "numeroRegistro"; colvarNumeroRegistro.DataType = DbType.String; colvarNumeroRegistro.MaxLength = 50; colvarNumeroRegistro.AutoIncrement = false; colvarNumeroRegistro.IsNullable = true; colvarNumeroRegistro.IsPrimaryKey = false; colvarNumeroRegistro.IsForeignKey = false; colvarNumeroRegistro.IsReadOnly = false; colvarNumeroRegistro.DefaultSetting = @""; colvarNumeroRegistro.ForeignKeyTableName = ""; schema.Columns.Add(colvarNumeroRegistro); TableSchema.TableColumn colvarFecha = new TableSchema.TableColumn(schema); colvarFecha.ColumnName = "fecha"; colvarFecha.DataType = DbType.DateTime; colvarFecha.MaxLength = 0; colvarFecha.AutoIncrement = false; colvarFecha.IsNullable = true; colvarFecha.IsPrimaryKey = false; colvarFecha.IsForeignKey = false; colvarFecha.IsReadOnly = false; colvarFecha.DefaultSetting = @""; colvarFecha.ForeignKeyTableName = ""; schema.Columns.Add(colvarFecha); TableSchema.TableColumn colvarInstitucionPertenece = new TableSchema.TableColumn(schema); colvarInstitucionPertenece.ColumnName = "institucionPertenece"; colvarInstitucionPertenece.DataType = DbType.String; colvarInstitucionPertenece.MaxLength = 100; colvarInstitucionPertenece.AutoIncrement = false; colvarInstitucionPertenece.IsNullable = true; colvarInstitucionPertenece.IsPrimaryKey = false; colvarInstitucionPertenece.IsForeignKey = false; colvarInstitucionPertenece.IsReadOnly = false; colvarInstitucionPertenece.DefaultSetting = @""; colvarInstitucionPertenece.ForeignKeyTableName = ""; schema.Columns.Add(colvarInstitucionPertenece); TableSchema.TableColumn colvarResponsableComite = new TableSchema.TableColumn(schema); colvarResponsableComite.ColumnName = "responsableComite"; colvarResponsableComite.DataType = DbType.String; colvarResponsableComite.MaxLength = 100; colvarResponsableComite.AutoIncrement = false; colvarResponsableComite.IsNullable = true; colvarResponsableComite.IsPrimaryKey = false; colvarResponsableComite.IsForeignKey = false; colvarResponsableComite.IsReadOnly = false; colvarResponsableComite.DefaultSetting = @""; colvarResponsableComite.ForeignKeyTableName = ""; schema.Columns.Add(colvarResponsableComite); TableSchema.TableColumn colvarDomicilio = new TableSchema.TableColumn(schema); colvarDomicilio.ColumnName = "domicilio"; colvarDomicilio.DataType = DbType.String; colvarDomicilio.MaxLength = 100; colvarDomicilio.AutoIncrement = false; colvarDomicilio.IsNullable = true; colvarDomicilio.IsPrimaryKey = false; colvarDomicilio.IsForeignKey = false; colvarDomicilio.IsReadOnly = false; colvarDomicilio.DefaultSetting = @""; colvarDomicilio.ForeignKeyTableName = ""; schema.Columns.Add(colvarDomicilio); TableSchema.TableColumn colvarTelefono = new TableSchema.TableColumn(schema); colvarTelefono.ColumnName = "telefono"; colvarTelefono.DataType = DbType.String; colvarTelefono.MaxLength = 100; colvarTelefono.AutoIncrement = false; colvarTelefono.IsNullable = true; colvarTelefono.IsPrimaryKey = false; colvarTelefono.IsForeignKey = false; colvarTelefono.IsReadOnly = false; colvarTelefono.DefaultSetting = @""; colvarTelefono.ForeignKeyTableName = ""; schema.Columns.Add(colvarTelefono); TableSchema.TableColumn colvarMail = new TableSchema.TableColumn(schema); colvarMail.ColumnName = "mail"; colvarMail.DataType = DbType.String; colvarMail.MaxLength = 100; colvarMail.AutoIncrement = false; colvarMail.IsNullable = true; colvarMail.IsPrimaryKey = false; colvarMail.IsForeignKey = false; colvarMail.IsReadOnly = false; colvarMail.DefaultSetting = @""; colvarMail.ForeignKeyTableName = ""; schema.Columns.Add(colvarMail); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["sicProvider"].AddSchema("RIS_EvaluacionRechazada",schema); } } #endregion #region Props [XmlAttribute("IdEvaluacionRechazada")] [Bindable(true)] public int IdEvaluacionRechazada { get { return GetColumnValue<int>(Columns.IdEvaluacionRechazada); } set { SetColumnValue(Columns.IdEvaluacionRechazada, value); } } [XmlAttribute("IdEstudio")] [Bindable(true)] public int? IdEstudio { get { return GetColumnValue<int?>(Columns.IdEstudio); } set { SetColumnValue(Columns.IdEstudio, value); } } [XmlAttribute("NumeroRegistro")] [Bindable(true)] public string NumeroRegistro { get { return GetColumnValue<string>(Columns.NumeroRegistro); } set { SetColumnValue(Columns.NumeroRegistro, value); } } [XmlAttribute("Fecha")] [Bindable(true)] public DateTime? Fecha { get { return GetColumnValue<DateTime?>(Columns.Fecha); } set { SetColumnValue(Columns.Fecha, value); } } [XmlAttribute("InstitucionPertenece")] [Bindable(true)] public string InstitucionPertenece { get { return GetColumnValue<string>(Columns.InstitucionPertenece); } set { SetColumnValue(Columns.InstitucionPertenece, value); } } [XmlAttribute("ResponsableComite")] [Bindable(true)] public string ResponsableComite { get { return GetColumnValue<string>(Columns.ResponsableComite); } set { SetColumnValue(Columns.ResponsableComite, value); } } [XmlAttribute("Domicilio")] [Bindable(true)] public string Domicilio { get { return GetColumnValue<string>(Columns.Domicilio); } set { SetColumnValue(Columns.Domicilio, value); } } [XmlAttribute("Telefono")] [Bindable(true)] public string Telefono { get { return GetColumnValue<string>(Columns.Telefono); } set { SetColumnValue(Columns.Telefono, value); } } [XmlAttribute("Mail")] [Bindable(true)] public string Mail { get { return GetColumnValue<string>(Columns.Mail); } set { SetColumnValue(Columns.Mail, value); } } #endregion //no foreign key tables defined (0) //no ManyToMany tables defined (0) #region ObjectDataSource support /// <summary> /// Inserts a record, can be used with the Object Data Source /// </summary> public static void Insert(int? varIdEstudio,string varNumeroRegistro,DateTime? varFecha,string varInstitucionPertenece,string varResponsableComite,string varDomicilio,string varTelefono,string varMail) { RisEvaluacionRechazada item = new RisEvaluacionRechazada(); item.IdEstudio = varIdEstudio; item.NumeroRegistro = varNumeroRegistro; item.Fecha = varFecha; item.InstitucionPertenece = varInstitucionPertenece; item.ResponsableComite = varResponsableComite; item.Domicilio = varDomicilio; item.Telefono = varTelefono; item.Mail = varMail; if (System.Web.HttpContext.Current != null) item.Save(System.Web.HttpContext.Current.User.Identity.Name); else item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name); } /// <summary> /// Updates a record, can be used with the Object Data Source /// </summary> public static void Update(int varIdEvaluacionRechazada,int? varIdEstudio,string varNumeroRegistro,DateTime? varFecha,string varInstitucionPertenece,string varResponsableComite,string varDomicilio,string varTelefono,string varMail) { RisEvaluacionRechazada item = new RisEvaluacionRechazada(); item.IdEvaluacionRechazada = varIdEvaluacionRechazada; item.IdEstudio = varIdEstudio; item.NumeroRegistro = varNumeroRegistro; item.Fecha = varFecha; item.InstitucionPertenece = varInstitucionPertenece; item.ResponsableComite = varResponsableComite; item.Domicilio = varDomicilio; item.Telefono = varTelefono; item.Mail = varMail; item.IsNew = false; if (System.Web.HttpContext.Current != null) item.Save(System.Web.HttpContext.Current.User.Identity.Name); else item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name); } #endregion #region Typed Columns public static TableSchema.TableColumn IdEvaluacionRechazadaColumn { get { return Schema.Columns[0]; } } public static TableSchema.TableColumn IdEstudioColumn { get { return Schema.Columns[1]; } } public static TableSchema.TableColumn NumeroRegistroColumn { get { return Schema.Columns[2]; } } public static TableSchema.TableColumn FechaColumn { get { return Schema.Columns[3]; } } public static TableSchema.TableColumn InstitucionPerteneceColumn { get { return Schema.Columns[4]; } } public static TableSchema.TableColumn ResponsableComiteColumn { get { return Schema.Columns[5]; } } public static TableSchema.TableColumn DomicilioColumn { get { return Schema.Columns[6]; } } public static TableSchema.TableColumn TelefonoColumn { get { return Schema.Columns[7]; } } public static TableSchema.TableColumn MailColumn { get { return Schema.Columns[8]; } } #endregion #region Columns Struct public struct Columns { public static string IdEvaluacionRechazada = @"idEvaluacionRechazada"; public static string IdEstudio = @"idEstudio"; public static string NumeroRegistro = @"numeroRegistro"; public static string Fecha = @"fecha"; public static string InstitucionPertenece = @"institucionPertenece"; public static string ResponsableComite = @"responsableComite"; public static string Domicilio = @"domicilio"; public static string Telefono = @"telefono"; public static string Mail = @"mail"; } #endregion #region Update PK Collections #endregion #region Deep Save #endregion } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using System.Diagnostics; using System.Threading; using DokanNet; namespace DokanSSHFS { public partial class SettingForm : Form { private SSHFS sshfs; private DokanOptions opt; private string mountPoint; private int threadCount; private Settings settings = new Settings(); private int selectedIndex = 0; private Thread dokan; private bool isUnmounted_ = false; public SettingForm() { InitializeComponent(); } private void SettingForm_Load(object sender, EventArgs e) { FormBorderStyle = FormBorderStyle.FixedSingle; //notifyIcon1.Icon = SystemIcons.Application; notifyIcon1.Visible = true; SettingLoad(); } private void open_Click(object sender, EventArgs e) { if (openFileDialog1.ShowDialog() == DialogResult.OK) { privatekey.Text = openFileDialog1.FileName; } } private void usePassword_CheckedChanged(object sender, EventArgs e) { if (usePassword.Checked) { usePrivateKey.Checked = false; privatekey.Enabled = false; passphrase.Enabled = false; password.Enabled = true; open.Enabled = false; } } private void usePrivateKey_CheckedChanged(object sender, EventArgs e) { if (usePrivateKey.Checked) { usePassword.Checked = false; privatekey.Enabled = true; passphrase.Enabled = true; password.Enabled = false; open.Enabled = true; } } private void cancel_Click(object sender, EventArgs e) { notifyIcon1.Visible = false; Application.Exit(); } private void connect_Click(object sender, EventArgs e) { this.Hide(); int p = 22; sshfs = new SSHFS(); opt = new DokanOptions(); if (DokanSSHFS.DokanDebug) opt |= DokanOptions.DebugMode; opt |= DokanOptions.AltStream; // DokanOptions.KeepAlive always enabled. mountPoint = "n:\\"; threadCount = 0; string message = ""; if (host.Text == "") message += "Host name is empty\n"; if (user.Text == "") message += "User name is empty\n"; if (port.Text == "") message += "Port is empty\n"; else { try { p = Int32.Parse(port.Text); } catch(Exception) { message += "Port format error\n"; } } if (drive.Text.Length != 1) { message += "Drive letter is invalid\n"; } else { char letter = drive.Text[0]; letter = Char.ToLower(letter); if (!('e' <= letter && letter <= 'z')) message += "Drive letter is invalid\n"; mountPoint = string.Format("{0}:\\", letter); unmount.Text = "Unmount (" + mountPoint + ")"; } threadCount = DokanSSHFS.DokanThread; if (message.Length != 0) { this.Show(); MessageBox.Show(message, "Error"); return; } DokanSSHFS.UseOffline = !withoutOfflineAttribute.Checked; sshfs.Initialize( user.Text, host.Text, p, usePrivateKey.Checked ? null : password.Text, usePrivateKey.Checked ? privatekey.Text : null, usePrivateKey.Checked ? passphrase.Text : null, root.Text, DokanSSHFS.SSHDebug); if (sshfs.SSHConnect()) { unmount.Visible = true; mount.Visible = false; isUnmounted_ = false; MountWorker worker = null; if (disableCache.Checked) { worker = new MountWorker(sshfs, opt, mountPoint, threadCount); } else { worker = new MountWorker(new CacheOperations(sshfs), opt, mountPoint, threadCount); } dokan = new Thread(worker.Start); dokan.Start(); } else { this.Show(); MessageBox.Show("failed to connect", "Error"); return; } MessageBox.Show("sshfs start", "info"); } private void Unmount() { if (sshfs != null) { Debug.WriteLine(string.Format("SSHFS Trying unmount : {0}", mountPoint)); try { Dokan.RemoveMountPoint(mountPoint); Debug.WriteLine("DokanReveMountPoint success\n"); } catch (DokanException ex) { Debug.WriteLine("DokanRemoveMountPoint failed: " + ex.Message + "\n"); } // This should be called from Dokan, but not called. // Call here explicitly. sshfs.Unmounted(null); } unmount.Visible = false; mount.Visible = true; } class MountWorker { private IDokanOperations sshfs_; private DokanOptions opt_; private string mountPoint_; private int threadCount_; public MountWorker(IDokanOperations sshfs, DokanOptions opt, string mountPoint, int threadCount) { sshfs_ = sshfs; opt_ = opt; mountPoint_ = mountPoint; threadCount_ = threadCount; } public void Start() { System.IO.Directory.SetCurrentDirectory(Application.StartupPath); try { sshfs_.Mount(mountPoint_, opt_, threadCount_); } catch (DokanException ex) { MessageBox.Show(ex.Message, "Error"); Application.Exit(); } Debug.WriteLine("DokanNet.Main end"); } } private void exit_Click(object sender, EventArgs e) { notifyIcon1.Visible = false; if (!isUnmounted_) { Debug.WriteLine("unmount is visible"); unmount.Visible = false; Unmount(); isUnmounted_ = true; } Debug.WriteLine("SSHFS Thread Waitting"); if (dokan != null && dokan.IsAlive) { Debug.WriteLine("doka.Join"); dokan.Join(); } Debug.WriteLine("SSHFS Thread End"); Application.Exit(); } private void unmount_Click(object sender, EventArgs e) { Debug.WriteLine("unmount_Click"); this.Unmount(); isUnmounted_ = true; } private void save_Click(object sender, EventArgs e) { Setting s = settings[selectedIndex]; s.Name = settingNames.Text; if (settingNames.Text == "New Setting") s.Name = settings.GetNewName(); s.Host = host.Text; s.User = user.Text; try { s.Port = Int32.Parse(port.Text); } catch (Exception) { s.Port = 22; } s.PrivateKey = privatekey.Text; s.UsePassword = usePassword.Checked; s.Drive = drive.Text; s.ServerRoot = root.Text; s.DisableCache = disableCache.Checked; s.WithoutOfflineAttribute = withoutOfflineAttribute.Checked; settings.Save(); SettingLoad(); SettingLoad(selectedIndex); } private void settingNames_SelectedIndexChanged(object sender, EventArgs e) { selectedIndex = settingNames.SelectedIndex; SettingLoad(settingNames.SelectedIndex); } private void SettingLoad(int index) { Setting s = settings[index]; host.Text = s.Host; user.Text = s.User; port.Text = s.Port.ToString(); privatekey.Text = s.PrivateKey; password.Text = ""; usePassword.Checked = s.UsePassword; usePrivateKey.Checked = !s.UsePassword; usePassword_CheckedChanged(null, null); usePrivateKey_CheckedChanged(null, null); disableCache.Checked = s.DisableCache; withoutOfflineAttribute.Checked = s.WithoutOfflineAttribute; drive.Text = s.Drive; root.Text = s.ServerRoot; } private void SettingLoad() { settings.Load(); settingNames.Items.Clear(); int count = settings.Count; for (int i = 0; i < count; ++i) { settingNames.Items.Add(settings[i].Name); } settingNames.Items.Add("New Setting"); settingNames.SelectedIndex = 0; SettingLoad(0); } private void delete_Click(object sender, EventArgs e) { settings.Delete(selectedIndex); settings.Save(); SettingLoad(); } private void mount_Click(object sender, EventArgs e) { unmount.Visible = false; this.Show(); } } }
// Copyright 2005-2010 Gallio Project - http://www.gallio.org/ // Portions Copyright 2000-2004 Jonathan de Halleux // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using Gallio.Common.Collections; using Gallio.Common.Concurrency; using Gallio.Common.Messaging; using Gallio.Common.Messaging.MessageSinks; using Gallio.Common.Normalization; using Gallio.Common.Policies; using Gallio.Model; using Gallio.Common.Diagnostics; using Gallio.Common.Markup; using Gallio.Model.Messages.Exploration; using Gallio.Model.Messages.Logging; using Gallio.Model.Schema; using Gallio.Common.Reflection; using Gallio.Runner.Events; using Gallio.Runner.Extensions; using Gallio.Runner.Reports.Schema; using Gallio.Runtime.Logging; using Gallio.Runtime.ProgressMonitoring; using Gallio.Model.Isolation; using Gallio.Model.Messages.Execution; namespace Gallio.Runner { /// <summary> /// A default implementation of <see cref="ITestRunner" />. /// </summary> public class DefaultTestRunner : ITestRunner { private readonly ITestIsolationProvider testIsolationProvider; private readonly ITestFrameworkManager testFrameworkManager; private readonly TestRunnerEventDispatcher eventDispatcher; private readonly List<ITestRunnerExtension> extensions; private TappedLogger tappedLogger; private TestRunnerOptions testRunnerOptions; private State state; private ITestIsolationContext testIsolationContext; private enum State { Created, Initialized, Disposed } /// <summary> /// Creates a test runner. /// </summary> /// <param name="testIsolationProvider">The test isolation provider.</param> /// <param name="testFrameworkManager">The test framework manager.</param> /// <exception cref="ArgumentNullException">Thrown if <paramref name="testIsolationProvider"/> /// or <paramref name="testFrameworkManager"/> is null.</exception> public DefaultTestRunner(ITestIsolationProvider testIsolationProvider, ITestFrameworkManager testFrameworkManager) { if (testIsolationProvider == null) throw new ArgumentNullException("testIsolationProvider"); if (testFrameworkManager == null) throw new ArgumentNullException("testFrameworkManager"); this.testIsolationProvider = testIsolationProvider; this.testFrameworkManager = testFrameworkManager; eventDispatcher = new TestRunnerEventDispatcher(); state = State.Created; extensions = new List<ITestRunnerExtension>(); } /// <summary> /// Gets the logger, or null if the test runner has not been initialized. /// </summary> protected ILogger Logger { get { return tappedLogger; } } /// <summary> /// Gets the test runner options, or null if the test runner has not been initialized. /// </summary> protected TestRunnerOptions TestRunnerOptions { get { return testRunnerOptions; } } /// <inheritdoc /> public ITestRunnerEvents Events { get { return eventDispatcher; } } /// <inheritdoc /> public void RegisterExtension(ITestRunnerExtension extension) { if (extension == null) throw new ArgumentNullException("extension"); ThrowIfDisposed(); if (state != State.Created) throw new InvalidOperationException("Extensions cannot be registered after the test runner has been initialized."); foreach (ITestRunnerExtension currentExtension in extensions) { if (currentExtension.GetType() == extension.GetType() && currentExtension.Parameters == extension.Parameters) throw new InvalidOperationException(string.Format("There is already an extension of type '{0}' registered with parameters '{1}'.", extension.GetType(), extension.Parameters)); } extensions.Add(extension); } /// <inheritdoc /> public void Initialize(TestRunnerOptions testRunnerOptions, ILogger logger, IProgressMonitor progressMonitor) { if (testRunnerOptions == null) throw new ArgumentNullException("testRunnerOptions"); if (logger == null) throw new ArgumentNullException("logger"); if (progressMonitor == null) throw new ArgumentNullException("progressMonitor"); ThrowIfDisposed(); if (state != State.Created) throw new InvalidOperationException("The test runner has already been initialized."); testRunnerOptions = testRunnerOptions.Copy(); this.testRunnerOptions = testRunnerOptions; tappedLogger = new TappedLogger(this, logger); int extensionCount = extensions.Count; using (progressMonitor.BeginTask("Initializing the test runner.", 1 + extensionCount)) { foreach (ITestRunnerExtension extension in extensions) { string extensionName = extension.GetType().Name; // TODO: improve me progressMonitor.SetStatus(String.Format("Installing extension '{0}'.", extensionName)); try { // Note: We don't pass the tapped logger to the extensions because the // extensions frequently write to the console a bunch of information we // already have represented in the report. We are more interested in what // the test driver has to tell us. extension.Install(eventDispatcher, logger); progressMonitor.Worked(1); } catch (Exception ex) { throw new RunnerException(String.Format("Failed to install extension '{0}'.", extensionName), ex); } progressMonitor.SetStatus(""); } try { UnhandledExceptionPolicy.ReportUnhandledException += OnUnhandledException; eventDispatcher.NotifyInitializeStarted(new InitializeStartedEventArgs(testRunnerOptions)); progressMonitor.SetStatus("Initializing the test isolation context."); TestIsolationOptions testIsolationOptions = new TestIsolationOptions(); GenericCollectionUtils.ForEach(testRunnerOptions.Properties, x => testIsolationOptions.AddProperty(x.Key, x.Value)); testIsolationContext = testIsolationProvider.CreateContext(testIsolationOptions, tappedLogger); progressMonitor.Worked(1); } catch (Exception ex) { eventDispatcher.NotifyInitializeFinished(new InitializeFinishedEventArgs(false)); UnhandledExceptionPolicy.ReportUnhandledException -= OnUnhandledException; throw new RunnerException("A fatal exception occurred while initializing the test isolation context.", ex); } state = State.Initialized; eventDispatcher.NotifyInitializeFinished(new InitializeFinishedEventArgs(true)); } } /// <inheritdoc /> public Report Explore(TestPackage testPackage, TestExplorationOptions testExplorationOptions, IProgressMonitor progressMonitor) { if (testPackage == null) throw new ArgumentNullException("testPackageConfig"); if (testExplorationOptions == null) throw new ArgumentNullException("testExplorationOptions"); if (progressMonitor == null) throw new ArgumentNullException("progressMonitor"); ThrowIfDisposed(); if (state != State.Initialized) throw new InvalidOperationException("The test runner must be initialized before this operation is performed."); testPackage = testPackage.Copy(); testExplorationOptions = testExplorationOptions.Copy(); GenericCollectionUtils.ForEach(testRunnerOptions.Properties, x => testPackage.AddProperty(x.Key, x.Value)); using (progressMonitor.BeginTask("Exploring the tests.", 10)) { Report report = new Report() { TestPackage = new TestPackageData(testPackage), TestModel = new TestModelData() }; var reportLockBox = new LockBox<Report>(report); eventDispatcher.NotifyExploreStarted(new ExploreStartedEventArgs(testPackage, testExplorationOptions, reportLockBox)); bool success; using (Listener listener = new Listener(eventDispatcher, tappedLogger, reportLockBox)) { try { ITestDriver testDriver = testFrameworkManager.GetTestDriver( testPackage.CreateTestFrameworkSelector(), tappedLogger); using (testIsolationContext.BeginBatch(progressMonitor.SetStatus)) { testDriver.Explore(testIsolationContext, testPackage, testExplorationOptions, listener, progressMonitor.CreateSubProgressMonitor(10)); } success = true; } catch (Exception ex) { success = false; tappedLogger.Log(LogSeverity.Error, "A fatal exception occurred while exploring tests. Possible causes include invalid test runner parameters.", ex); report.TestModel.Annotations.Add(new AnnotationData(AnnotationType.Error, CodeLocation.Unknown, CodeReference.Unknown, "A fatal exception occurred while exploring tests. See log for details.", null)); } } eventDispatcher.NotifyExploreFinished(new ExploreFinishedEventArgs(success, report)); return report; } } /// <inheritdoc /> public Report Run(TestPackage testPackage, TestExplorationOptions testExplorationOptions, TestExecutionOptions testExecutionOptions, IProgressMonitor progressMonitor) { if (testPackage == null) throw new ArgumentNullException("testPackageConfig"); if (testExplorationOptions == null) throw new ArgumentNullException("testExplorationOptions"); if (testExecutionOptions == null) throw new ArgumentNullException("testExecutionOptions"); if (progressMonitor == null) throw new ArgumentNullException("progressMonitor"); ThrowIfDisposed(); if (state != State.Initialized) throw new InvalidOperationException("The test runner must be initialized before this operation is performed."); testPackage = testPackage.Copy(); testExplorationOptions = testExplorationOptions.Copy(); testExecutionOptions = testExecutionOptions.Copy(); GenericCollectionUtils.ForEach(testRunnerOptions.Properties, x => testPackage.AddProperty(x.Key, x.Value)); using (progressMonitor.BeginTask("Running the tests.", 10)) { Stopwatch stopwatch = Stopwatch.StartNew(); Report report = new Report() { TestPackage = new TestPackageData(testPackage), TestModel = new TestModelData(), TestPackageRun = new TestPackageRun() { StartTime = DateTime.Now } }; var reportLockBox = new LockBox<Report>(report); eventDispatcher.NotifyRunStarted(new RunStartedEventArgs(testPackage, testExplorationOptions, testExecutionOptions, reportLockBox)); bool success; using (Listener listener = new Listener(eventDispatcher, tappedLogger, reportLockBox)) { try { ITestDriver testDriver = testFrameworkManager.GetTestDriver( testPackage.CreateTestFrameworkSelector(), tappedLogger); using (testIsolationContext.BeginBatch(progressMonitor.SetStatus)) { testDriver.Run(testIsolationContext, testPackage, testExplorationOptions, testExecutionOptions, listener, progressMonitor.CreateSubProgressMonitor(10)); } success = true; } catch (Exception ex) { success = false; tappedLogger.Log(LogSeverity.Error, "A fatal exception occurred while running tests. Possible causes include invalid test runner parameters and stack overflows.", ex); report.TestModel.Annotations.Add(new AnnotationData(AnnotationType.Error, CodeLocation.Unknown, CodeReference.Unknown, "A fatal exception occurred while running tests. See log for details.", null)); } finally { report.TestPackageRun.EndTime = DateTime.Now; report.TestPackageRun.Statistics.Duration = stopwatch.Elapsed.TotalSeconds; } } eventDispatcher.NotifyRunFinished(new RunFinishedEventArgs(success, report)); return report; } } /// <inheritdoc /> public void Dispose(IProgressMonitor progressMonitor) { if (progressMonitor == null) throw new ArgumentNullException("progressMonitor"); if (state == State.Disposed) return; using (progressMonitor.BeginTask("Disposing the test runner.", 10)) { bool success; try { eventDispatcher.NotifyDisposeStarted(new DisposeStartedEventArgs()); if (testIsolationContext != null) { progressMonitor.SetStatus("Disposing the test isolation context."); testIsolationContext.Dispose(); testIsolationContext = null; } progressMonitor.Worked(10); success = true; } catch (Exception ex) { if (tappedLogger != null) tappedLogger.Log(LogSeverity.Warning, "An exception occurred while disposing the test isolation context. This may indicate that the test isolation context previously encountered another fault from which it could not recover.", ex); success = false; } state = State.Disposed; eventDispatcher.NotifyDisposeFinished(new DisposeFinishedEventArgs(success)); UnhandledExceptionPolicy.ReportUnhandledException -= OnUnhandledException; } } private void OnUnhandledException(object sender, CorrelatedExceptionEventArgs e) { tappedLogger.RecordLogMessage(LogSeverity.Error, e.GetDescription(), null); } private void ThrowIfDisposed() { if (state == State.Disposed) throw new ObjectDisposedException(GetType().Name); } private sealed class Listener : IMessageSink, IDisposable { private readonly TestRunnerEventDispatcher eventDispatcher; private readonly TappedLogger tappedLogger; private readonly LockBox<Report> reportBox; private readonly MessageConsumer consumer; private Dictionary<string, TestStepState> states; private readonly List<string> rootTestStepIds; private TestStepData rootTestStepData; private TestResult rootTestStepResult; private Stopwatch rootTestStepStopwatch; public Listener(TestRunnerEventDispatcher eventDispatcher, TappedLogger tappedLogger, LockBox<Report> reportBox) { this.eventDispatcher = eventDispatcher; this.tappedLogger = tappedLogger; this.reportBox = reportBox; states = new Dictionary<string, TestStepState>(); rootTestStepIds = new List<string>(); rootTestStepStopwatch = Stopwatch.StartNew(); rootTestStepResult = new TestResult() { Outcome = TestOutcome.Passed }; consumer = new MessageConsumer() .Handle<TestDiscoveredMessage>(HandleTestDiscoveredMessage) .Handle<AnnotationDiscoveredMessage>(HandleAnnotationDiscoveredMessage) .Handle<TestStepStartedMessage>(HandleTestStepStartedMessage) .Handle<TestStepLifecyclePhaseChangedMessage>(HandleTestStepLifecyclePhaseChangedMessage) .Handle<TestStepMetadataAddedMessage>(HandleTestStepMetadataAddedMessage) .Handle<TestStepFinishedMessage>(HandleTestStepFinishedMessage) .Handle<TestStepLogAttachMessage>(HandleTestStepLogAttachMessage) .Handle<TestStepLogStreamWriteMessage>(HandleTestStepLogStreamWriteMessage) .Handle<TestStepLogStreamEmbedMessage>(HandleTestStepLogStreamEmbedMessage) .Handle<TestStepLogStreamBeginSectionBlockMessage>(HandleTestStepLogStreamBeginSectionBlockMessage) .Handle<TestStepLogStreamBeginMarkerBlockMessage>(HandleTestStepLogStreamBeginMarkerBlockMessage) .Handle<TestStepLogStreamEndBlockMessage>(HandleTestStepLogStreamEndBlockMessage) .Handle<LogEntrySubmittedMessage>(HandleLogEntrySubmittedMessage); tappedLogger.SetListener(this); } public void Dispose() { reportBox.Write(report => { FinishRoot(report); states = null; }); tappedLogger.SetListener(null); } public void Publish(Message message) { message.Validate(); message = message.Normalize(); eventDispatcher.NotifyMessageReceived(new MessageReceivedEventArgs(message)); consumer.Consume(message); } private void HandleTestDiscoveredMessage(TestDiscoveredMessage message) { reportBox.Write(report => { ThrowIfDisposed(); TestData mergedTest = report.TestModel.MergeSubtree(message.ParentTestId, message.Test); eventDispatcher.NotifyTestDiscovered( new TestDiscoveredEventArgs(report, mergedTest)); }); } private void HandleAnnotationDiscoveredMessage(AnnotationDiscoveredMessage message) { reportBox.Write(report => { ThrowIfDisposed(); report.TestModel.Annotations.Add(message.Annotation); eventDispatcher.NotifyAnnotationDiscovered( new AnnotationDiscoveredEventArgs(report, message.Annotation)); }); } private void HandleLogEntrySubmittedMessage(LogEntrySubmittedMessage message) { tappedLogger.Log(message.Severity, message.Message, message.ExceptionData); } public void RecordLogEntry(LogSeverity severity, string message, ExceptionData exceptionData) { reportBox.Write(report => { if (states == null) return; // ignore the message if the listener was disposed before it could be written report.AddLogEntry(new LogEntry() { Severity = severity, Message = message, Details = exceptionData != null ? exceptionData.ToString() : null }); eventDispatcher.NotifyLogEntrySubmitted(new LogEntrySubmittedEventArgs(severity, message, exceptionData)); }); } private void HandleTestStepStartedMessage(TestStepStartedMessage message) { reportBox.Write(report => { ThrowIfDisposed(); if (message.Step.ParentId == null) { rootTestStepIds.Add(message.Step.Id); if (! IsRootStarted) StartRoot(report, message.Step); } else { TestStepData step = RedirectParentIdOfTestStepData(message.Step); StartStep(report, step); } }); } private void HandleTestStepFinishedMessage(TestStepFinishedMessage message) { reportBox.Write(report => { ThrowIfDisposed(); if (rootTestStepIds.Contains(message.StepId)) { rootTestStepResult.AssertCount += message.Result.AssertCount; rootTestStepResult.Outcome = rootTestStepResult.Outcome.CombineWith(message.Result.Outcome); } else { FinishStep(report, message.StepId, message.Result); } }); } private bool IsRootStarted { get { return rootTestStepData != null; } } private void StartRoot(Report report, TestStepData step) { rootTestStepData = step; StartStep(report, step); } private void FinishRoot(Report report) { if (rootTestStepData != null) { rootTestStepResult.DurationInSeconds = rootTestStepStopwatch.Elapsed.TotalSeconds; FinishStep(report, rootTestStepData.Id, rootTestStepResult); rootTestStepData = null; } } private void StartStep(Report report, TestStepData step) { TestData testData = GetTestData(report, step.TestId); TestStepRun testStepRun = new TestStepRun(step); testStepRun.StartTime = DateTime.Now; TestStepState parentState; if (step.ParentId != null) { parentState = GetTestStepState(step.ParentId); parentState.TestStepRun.Children.Add(testStepRun); } else { parentState = null; report.TestPackageRun.RootTestStepRun = testStepRun; } TestStepState state = new TestStepState(parentState, testData, testStepRun); states.Add(step.Id, state); eventDispatcher.NotifyTestStepStarted( new TestStepStartedEventArgs(report, testData, testStepRun)); } private void FinishStep(Report report, string stepId, TestResult result) { TestStepState state = GetTestStepState(stepId); state.TestStepRun.EndTime = DateTime.Now; state.TestStepRun.Result = result; PromoteToTestCaseIfStepAppearsToHaveBlockedChildrenFromRunning(state); report.TestPackageRun.Statistics.MergeStepStatistics(state.TestStepRun); state.LogWriter.Close(); eventDispatcher.NotifyTestStepFinished( new TestStepFinishedEventArgs(report, state.TestData, state.TestStepRun)); } /// <summary> /// In some situations, we may receive a report that a test step representing an /// inner node of the test tree failed and therefore prevented other test cases /// from running. When this happens, we automatically promote the test step to /// behave as if it were a test case and report the failure. /// </summary> /// <remarks> /// This is really a hack to make up for the fact that most of the information /// presented to users is about test cases rather than test suites and other inner /// nodes of the test tree. Because test cases can be constructed dynamically, /// we have a bit of a problem counting and presenting them when inner nodes fail. /// I hope someday we come up with a better solution to this issue with our test model. /// Perhaps we could introduce a "blocked" status. /// -- Jeff. /// </remarks> private static void PromoteToTestCaseIfStepAppearsToHaveBlockedChildrenFromRunning(TestStepState state) { if (state.TestStepRun.Result.Outcome.Status != TestStatus.Passed && state.TestStepRun.Children.Count == 0 && ! IsTestCaseAncestorOrSelf(state)) { state.TestStepRun.Step.IsTestCase = true; } } private static bool IsTestCaseAncestorOrSelf(TestStepState state) { do { if (state.TestStepRun.Step.IsTestCase) return true; state = state.Parent; } while (state != null); return false; } private void HandleTestStepLifecyclePhaseChangedMessage(TestStepLifecyclePhaseChangedMessage message) { reportBox.Write(report => { ThrowIfDisposed(); string stepId = RedirectTestStepId(message.StepId); TestStepState state = GetTestStepState(stepId); eventDispatcher.NotifyTestStepLifecyclePhaseChanged( new TestStepLifecyclePhaseChangedEventArgs(report, state.TestData, state.TestStepRun, message.LifecyclePhase)); }); } private void HandleTestStepMetadataAddedMessage(TestStepMetadataAddedMessage message) { reportBox.Write(report => { ThrowIfDisposed(); string stepId = RedirectTestStepId(message.StepId); TestStepState state = GetTestStepState(stepId); state.TestStepRun.Step.Metadata.Add(message.MetadataKey, message.MetadataValue); eventDispatcher.NotifyTestStepMetadataAdded( new TestStepMetadataAddedEventArgs(report, state.TestData, state.TestStepRun, message.MetadataKey, message.MetadataValue)); }); } private void HandleTestStepLogAttachMessage(TestStepLogAttachMessage message) { reportBox.Write(report => { ThrowIfDisposed(); string stepId = RedirectTestStepId(message.StepId); TestStepState state = GetTestStepState(stepId); state.LogWriter.Attach(message.Attachment); eventDispatcher.NotifyTestStepLogAttach( new TestStepLogAttachEventArgs(report, state.TestData, state.TestStepRun, message.Attachment)); }); } private void HandleTestStepLogStreamWriteMessage(TestStepLogStreamWriteMessage message) { reportBox.Write(report => { ThrowIfDisposed(); string stepId = RedirectTestStepId(message.StepId); TestStepState state = GetTestStepState(stepId); state.LogWriter[message.StreamName].Write(message.Text); eventDispatcher.NotifyTestStepLogStreamWrite( new TestStepLogStreamWriteEventArgs(report, state.TestData, state.TestStepRun, message.StreamName, message.Text)); }); } private void HandleTestStepLogStreamEmbedMessage(TestStepLogStreamEmbedMessage message) { reportBox.Write(report => { ThrowIfDisposed(); string stepId = RedirectTestStepId(message.StepId); TestStepState state = GetTestStepState(stepId); state.LogWriter[message.StreamName].EmbedExisting(message.AttachmentName); eventDispatcher.NotifyTestStepLogStreamEmbed( new TestStepLogStreamEmbedEventArgs(report, state.TestData, state.TestStepRun, message.StreamName, message.AttachmentName)); }); } private void HandleTestStepLogStreamBeginSectionBlockMessage(TestStepLogStreamBeginSectionBlockMessage message) { reportBox.Write(report => { ThrowIfDisposed(); string stepId = RedirectTestStepId(message.StepId); TestStepState state = GetTestStepState(stepId); state.LogWriter[message.StreamName].BeginSection(message.SectionName); eventDispatcher.NotifyTestStepLogStreamBeginSectionBlock( new TestStepLogStreamBeginSectionBlockEventArgs(report, state.TestData, state.TestStepRun, message.StreamName, message.SectionName)); }); } private void HandleTestStepLogStreamBeginMarkerBlockMessage(TestStepLogStreamBeginMarkerBlockMessage message) { reportBox.Write(report => { ThrowIfDisposed(); string stepId = RedirectTestStepId(message.StepId); TestStepState state = GetTestStepState(stepId); state.LogWriter[message.StreamName].BeginMarker(message.Marker); eventDispatcher.NotifyTestStepLogStreamBeginMarkerBlock( new TestStepLogStreamBeginMarkerBlockEventArgs(report, state.TestData, state.TestStepRun, message.StreamName, message.Marker)); }); } private void HandleTestStepLogStreamEndBlockMessage(TestStepLogStreamEndBlockMessage message) { reportBox.Write(report => { ThrowIfDisposed(); string stepId = RedirectTestStepId(message.StepId); TestStepState state = GetTestStepState(stepId); state.LogWriter[message.StreamName].End(); eventDispatcher.NotifyTestStepLogStreamEndBlock( new TestStepLogStreamEndBlockEventArgs(report, state.TestData, state.TestStepRun, message.StreamName)); }); } private static TestData GetTestData(Report report, string testId) { TestData testData = report.TestModel.GetTestById(testId); if (testData == null) throw new InvalidOperationException("The test id was not recognized. It may belong to an earlier test run that has since completed."); return testData; } private TestStepState GetTestStepState(string testStepId) { TestStepState testStepData; if (!states.TryGetValue(testStepId, out testStepData)) throw new InvalidOperationException("The test step id was not recognized. It may belong to an earlier test run that has since completed."); return testStepData; } private TestStepData RedirectParentIdOfTestStepData(TestStepData step) { if (step.ParentId != null && rootTestStepIds.Contains(step.ParentId)) { TestStepData targetStep = new TestStepData(step.Id, step.Name, step.FullName, step.TestId) { CodeLocation = step.CodeLocation, CodeReference = step.CodeReference, IsDynamic = step.IsDynamic, IsPrimary = step.IsPrimary, IsTestCase = step.IsTestCase, Metadata = step.Metadata, ParentId = rootTestStepData.Id }; return targetStep; } return step; } private string RedirectTestStepId(string stepId) { return rootTestStepIds.Contains(stepId) ? rootTestStepData.Id : stepId; } private void ThrowIfDisposed() { if (states == null) throw new ObjectDisposedException(GetType().Name); } private sealed class TestStepState { public readonly TestStepState Parent; public readonly TestData TestData; public readonly TestStepRun TestStepRun; public readonly StructuredDocumentWriter LogWriter; public TestStepState(TestStepState parent, TestData testData, TestStepRun testStepRun) { Parent = parent; TestData = testData; TestStepRun = testStepRun; LogWriter = new StructuredDocumentWriter(); testStepRun.TestLog = LogWriter.Document; } } } private sealed class TappedLogger : BaseLogger { private readonly DefaultTestRunner runner; private readonly ILogger inner; private volatile Listener listener; public TappedLogger(DefaultTestRunner runner, ILogger inner) { this.runner = runner; this.inner = inner; } public void SetListener(Listener listener) { this.listener = listener; } protected override void LogImpl(LogSeverity severity, string message, ExceptionData exceptionData) { Handle(severity, message, exceptionData, true); } public void RecordLogMessage(LogSeverity severity, string message, ExceptionData exceptionData) { Handle(severity, message, exceptionData, false); } private void Handle(LogSeverity severity, string message, ExceptionData exceptionData, bool log) { message = NormalizationUtils.NormalizeXmlText(message); if (exceptionData != null) exceptionData = exceptionData.Normalize(); if (log) inner.Log(severity, message, exceptionData); // Note: We avoid taking any locks here because it would be too easy to end up // in a deadlock between logging and reporting code. Instead we compensate in // the listener by dropping the log message if the listener has been disposed. Listener currentListener = listener; if (currentListener != null) listener.RecordLogEntry(severity, message, exceptionData); } } } }
using System; using System.Collections.Specialized; using System.ComponentModel; using System.Reflection; using System.Windows; using System.Windows.Controls; using System.Windows.Media; using Microsoft.Phone.Controls; namespace Xamarin.Forms.Platform.WinPhone { public class PickerRenderer : ViewRenderer<Picker, FrameworkElement> { bool _isChanging; FormsListPicker _listPicker; Brush _defaultBrush; protected override void OnElementChanged(ElementChangedEventArgs<Picker> e) { _listPicker = new FormsListPicker(); UpdateAlignment(); UpdateIsEnabled(); base.OnElementChanged(e); if (e.OldElement != null) ((ObservableList<string>)Element.Items).CollectionChanged -= ItemsCollectionChanged; ((ObservableList<string>)Element.Items).CollectionChanged += ItemsCollectionChanged; _listPicker.ItemTemplate = (System.Windows.DataTemplate)System.Windows.Application.Current.Resources["PickerItemTemplate"]; _listPicker.FullModeItemTemplate = (System.Windows.DataTemplate)System.Windows.Application.Current.Resources["PickerFullItemTemplate"]; _listPicker.ExpansionMode = ExpansionMode.FullScreenOnly; _listPicker.Items.Add(new ItemViewModel(" ") { MaxHeight = 0 }); _listPicker.ListPickerModeChanged += ListPickerModeChanged; _listPicker.Loaded += (sender, args) => { // The defaults from the control template won't be available // right away; we have to wait until after the template has been applied _defaultBrush = _listPicker.Foreground; UpdateTextColor(); }; var grid = new System.Windows.Controls.Grid { Children = { _listPicker }, MaxWidth = Device.Info.PixelScreenSize.Width }; SetNativeControl(grid); UpdatePicker(); _listPicker.SelectionChanged += PickerSelectionChanged; } protected override void OnElementPropertyChanged(object sender, PropertyChangedEventArgs e) { base.OnElementPropertyChanged(sender, e); if (e.PropertyName == Picker.TitleProperty.PropertyName) { _listPicker.FullModeHeader = Element.Title; } else if (e.PropertyName == VisualElement.IsEnabledProperty.PropertyName) { UpdateIsEnabled(); UpdateTextColor(); } else if (e.PropertyName == Picker.SelectedIndexProperty.PropertyName) { if (Element.SelectedIndex >= 0 && Element.SelectedIndex < Element.Items.Count) _listPicker.SelectedIndex = Element.SelectedIndex + 1; } else if (e.PropertyName == View.HorizontalOptionsProperty.PropertyName) { UpdateAlignment(); } else if (e.PropertyName == Picker.TextColorProperty.PropertyName) { UpdateTextColor(); } } protected override void OnGotFocus(object sender, RoutedEventArgs args) { // Do nothing. ListPickerModeChanged is handling the IsFocusProperty setter // Required because FrameworkElement.GotFocus and FrameworkElement.LostFocus () are fired by ListPicker.Open () } protected override void OnLostFocus(object sender, RoutedEventArgs args) { // Do nothing. ListPickerModeChanged is handling the IsFocusProperty setter // Required because FrameworkElement.GotFocus and FrameworkElement.LostFocus () are fired by ListPicker.Open () } protected override void UpdateNativeWidget() { base.UpdateNativeWidget(); UpdateIsEnabled(); } internal override void OnModelFocusChangeRequested(object sender, VisualElement.FocusRequestArgs args) { if (Control == null) return; if (args.Focus) args.Result = OpenPickerPage(); else { args.Result = ClosePickerPage(); UnfocusControl(_listPicker); } } bool ClosePickerPage() { FieldInfo pickerPageField = typeof(ListPicker).GetField("_listPickerPage", BindingFlags.NonPublic | BindingFlags.Instance); var pickerPage = pickerPageField.GetValue(Control) as ListPickerPage; typeof(ListPickerPage).InvokeMember("ClosePickerPage", BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.InvokeMethod, Type.DefaultBinder, pickerPage, null); return true; } void ItemsCollectionChanged(object sender, NotifyCollectionChangedEventArgs e) { UpdateItems(); } void ListPickerModeChanged(object sender, DependencyPropertyChangedEventArgs e) { if (e.OldValue == null || e.NewValue == null) return; var oldVal = (ListPickerMode)e.OldValue; var newVal = (ListPickerMode)e.NewValue; if (oldVal == ListPickerMode.Normal && newVal == ListPickerMode.Full) { // Picker Page is now showing ((IElementController)Element).SetValueFromRenderer(VisualElement.IsFocusedPropertyKey, true); } else if (oldVal == ListPickerMode.Full && newVal == ListPickerMode.Normal) { // PickerPage is now dismissed ((IElementController)Element).SetValueFromRenderer(VisualElement.IsFocusedPropertyKey, false); } } bool OpenPickerPage() { bool result = _listPicker.Open(); if (result) return true; return false; } void PickerSelectionChanged(object sender, SelectionChangedEventArgs e) { if (_isChanging) return; var picker = (ListPicker)sender; // initializing picker if (picker.SelectedIndex == -1) return; int elementSelectedIndex = picker.SelectedIndex - 1; ((IElementController)Element).SetValueFromRenderer(Picker.SelectedIndexProperty, elementSelectedIndex); } void UpdateAlignment() { if (Element.HorizontalOptions.Alignment != LayoutAlignment.Fill) _listPicker.HorizontalAlignment = HorizontalAlignment.Left; } void UpdateIsEnabled() { if (_listPicker != null) _listPicker.IsEnabled = Element.IsEnabled; } void UpdateItems() { // supress notification of non-user generated events (e.g. adding\syncing list values) _isChanging = true; FormsListPicker picker = _listPicker; // add/remove slots from control to match element while (picker.Items.Count < Element.Items.Count + 1) picker.Items.Add(new ItemViewModel(string.Empty)); while (picker.Items.Count > Element.Items.Count + 1) picker.Items.RemoveAt(picker.Items.Count - 1); // update all control values to match element values for (var i = 0; i < Element.Items.Count; i++) { var item = (ItemViewModel)picker.Items[i + 1]; if (item.Data == Element.Items[i]) continue; item.Data = Element.Items[i]; } picker.SelectedIndex = Element.SelectedIndex + 1; _isChanging = false; } void UpdatePicker() { _listPicker.FullModeHeader = Element.Title; UpdateItems(); _listPicker.SelectedIndex = Element.SelectedIndex + 1; } void UpdateTextColor() { if (!_listPicker.IsEnabled) { return; } Color color = Element.TextColor; _listPicker.Foreground = color.IsDefault ? (_defaultBrush ?? color.ToBrush()) : color.ToBrush(); } class ItemViewModel : INotifyPropertyChanged { string _data; int _maxHeight; float _opacity; public ItemViewModel(string item) { _opacity = 1; _data = item; _maxHeight = int.MaxValue; } public string Data { get { return _data; } set { if (value == _data) return; _data = value; PropertyChanged(this, new PropertyChangedEventArgs("Data")); } } public int MaxHeight { get { return _maxHeight; } set { if (value == _maxHeight) return; _maxHeight = value; PropertyChanged(this, new PropertyChangedEventArgs("MaxHeight")); } } public float Opacity { get { return _opacity; } set { if (value == _opacity) return; _opacity = value; PropertyChanged(this, new PropertyChangedEventArgs("Opacity")); } } public event PropertyChangedEventHandler PropertyChanged = delegate { }; } } }
//------------------------------------------------------------------------------ // <copyright file="OverwriteTest.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation // </copyright> //------------------------------------------------------------------------------ namespace DMLibTest.Cases { using DMLibTestCodeGen; using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.WindowsAzure.Storage.Auth; using Microsoft.WindowsAzure.Storage.DataMovement; using MS.Test.Common.MsTestLib; using System; using System.Threading; [MultiDirectionTestClass] public class OverwriteTest : DMLibTestBase #if DNXCORE50 , IDisposable #endif { #region Initialization and cleanup methods #if DNXCORE50 public OverwriteTest() { MyTestInitialize(); } public void Dispose() { Dispose(true); } protected virtual void Dispose(bool disposing) { MyTestCleanup(); } #endif [ClassInitialize()] public static void MyClassInitialize(TestContext testContext) { Test.Info("Class Initialize: OverwriteTest"); DMLibTestBase.BaseClassInitialize(testContext); } [ClassCleanup()] public static void MyClassCleanup() { DMLibTestBase.BaseClassCleanup(); } [TestInitialize()] public void MyTestInitialize() { base.BaseTestInitialize(); } [TestCleanup()] public void MyTestCleanup() { base.BaseTestCleanup(); } #endregion [TestCategory(Tag.Function)] [DMLibTestMethodSet(DMLibTestMethodSet.AllValidDirection)] public void OverwriteDestination() { string destExistYName = "destExistY"; string destExistNName = "destExistN"; string destNotExistYName = "destNotExistY"; DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destExistYName, 1024); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destExistNName, 1024); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destNotExistYName, 1024); DMLibDataInfo destDataInfo = new DMLibDataInfo(string.Empty); DMLibDataHelper.AddOneFileInBytes(destDataInfo.RootNode, destExistYName, 1024); DMLibDataHelper.AddOneFileInBytes(destDataInfo.RootNode, destExistNName, 1024); var options = new TestExecutionOptions<DMLibDataInfo>(); if (DMLibTestContext.DestType != DMLibDataType.Stream) { options.DestTransferDataInfo = destDataInfo; } options.TransferItemModifier = (fileNode, transferItem) => { string fileName = fileNode.Name; TransferContext transferContext = new SingleTransferContext(); if (fileName.Equals(destExistYName)) { transferContext.ShouldOverwriteCallback = DMLibInputHelper.GetDefaultOverwiteCallbackY(); } else if (fileName.Equals(destExistNName)) { transferContext.ShouldOverwriteCallback = DMLibInputHelper.GetDefaultOverwiteCallbackN(); } else if (fileName.Equals(destNotExistYName)) { transferContext.ShouldOverwriteCallback = DMLibInputHelper.GetDefaultOverwiteCallbackY(); } transferItem.TransferContext = transferContext; }; var result = this.ExecuteTestCase(sourceDataInfo, options); DMLibDataInfo expectedDataInfo = new DMLibDataInfo(string.Empty); if (DMLibTestContext.DestType != DMLibDataType.Stream) { expectedDataInfo.RootNode.AddFileNode(sourceDataInfo.RootNode.GetFileNode(destExistYName)); expectedDataInfo.RootNode.AddFileNode(destDataInfo.RootNode.GetFileNode(destExistNName)); expectedDataInfo.RootNode.AddFileNode(sourceDataInfo.RootNode.GetFileNode(destNotExistYName)); } else { expectedDataInfo = sourceDataInfo; } // Verify transfer result Test.Assert(DMLibDataHelper.Equals(expectedDataInfo, result.DataInfo), "Verify transfer result."); // Verify exception if (DMLibTestContext.DestType != DMLibDataType.Stream) { Test.Assert(result.Exceptions.Count == 1, "Verify there's only one exceptions."); TransferException transferException = result.Exceptions[0] as TransferException; Test.Assert(transferException != null, "Verify the exception is a TransferException"); VerificationHelper.VerifyTransferException(transferException, TransferErrorCode.NotOverwriteExistingDestination, "Skiped file", destExistNName); } } [TestCategory(Tag.Function)] [DMLibTestMethodSet(DMLibTestMethodSet.DirAllValidDirection)] public void DirectoryOverwriteDestination() { string destExistYName = "destExistY"; string destExistNName = "destExistN"; string destNotExistYName = "destNotExistY"; DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destExistYName, 1024); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destExistNName, 1024); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destNotExistYName, 1024); DMLibDataInfo destDataInfo = new DMLibDataInfo(string.Empty); DMLibDataHelper.AddOneFileInBytes(destDataInfo.RootNode, destExistYName, 1024); DMLibDataHelper.AddOneFileInBytes(destDataInfo.RootNode, destExistNName, 1024); TransferContext transferContext = new DirectoryTransferContext(); transferContext.ShouldOverwriteCallback = (source, destination) => { if (DMLibTestHelper.TransferInstanceToString(source).EndsWith(destExistNName)) { return false; } else { return true; } }; int skipCount = 0; int successCount = 0; transferContext.FileSkipped += (object sender, TransferEventArgs args) => { Interlocked.Increment(ref skipCount); TransferException transferException = args.Exception as TransferException; Test.Assert(transferException != null, "Verify the exception is a TransferException"); VerificationHelper.VerifyTransferException(transferException, TransferErrorCode.NotOverwriteExistingDestination, "Skiped file", destExistNName); }; transferContext.FileTransferred += (object sender, TransferEventArgs args) => { Interlocked.Increment(ref successCount); }; var options = new TestExecutionOptions<DMLibDataInfo>(); options.IsDirectoryTransfer = true; if (DMLibTestContext.DestType != DMLibDataType.Stream) { options.DestTransferDataInfo = destDataInfo; } options.TransferItemModifier = (fileNode, transferItem) => { transferItem.TransferContext = transferContext; dynamic transferOptions = DefaultTransferDirectoryOptions; transferOptions.Recursive = true; transferItem.Options = transferOptions; }; var result = this.ExecuteTestCase(sourceDataInfo, options); DMLibDataInfo expectedDataInfo = new DMLibDataInfo(string.Empty); if (DMLibTestContext.DestType != DMLibDataType.Stream) { expectedDataInfo.RootNode.AddFileNode(sourceDataInfo.RootNode.GetFileNode(destExistYName)); expectedDataInfo.RootNode.AddFileNode(destDataInfo.RootNode.GetFileNode(destExistNName)); expectedDataInfo.RootNode.AddFileNode(sourceDataInfo.RootNode.GetFileNode(destNotExistYName)); } else { expectedDataInfo = sourceDataInfo; } // Verify transfer result Test.Assert(DMLibDataHelper.Equals(expectedDataInfo, result.DataInfo), "Verify transfer result."); // Verify exception if (DMLibTestContext.DestType != DMLibDataType.Stream) { VerificationHelper.VerifySingleTransferStatus(result, 2, 1, 0, 1024 * 2); Test.Assert(successCount == 2, "Verify success transfers"); Test.Assert(skipCount == 1, "Verify skipped transfer"); } else { VerificationHelper.VerifySingleTransferStatus(result, 3, 0, 0, 1024 * 3); Test.Assert(successCount == 3, "Very all transfers are success"); Test.Assert(skipCount == 0, "Very no transfer is skipped"); } } [TestCategory(Tag.Function)] [DMLibTestMethodSet(DMLibTestMethodSet.AllValidDirection)] public void ForceOverwriteTest() { string destExistName = "destExist"; string destNotExistName = "destNotExist"; DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destExistName, 1024); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destNotExistName, 1024); DMLibDataInfo destDataInfo = new DMLibDataInfo(string.Empty); DMLibDataHelper.AddOneFileInBytes(destDataInfo.RootNode, destExistName, 1024); TransferContext transferContext = new SingleTransferContext(); transferContext.ShouldOverwriteCallback = TransferContext.ForceOverwrite; int skipCount = 0; int successCount = 0; transferContext.FileSkipped += (object sender, TransferEventArgs args) => { Interlocked.Increment(ref skipCount); }; transferContext.FileTransferred += (object sender, TransferEventArgs args) => { Interlocked.Increment(ref successCount); }; var options = new TestExecutionOptions<DMLibDataInfo>(); if (DMLibTestContext.DestType != DMLibDataType.Stream) { options.DestTransferDataInfo = destDataInfo; } if (IsCloudService(DMLibTestContext.DestType)) { SharedAccessPermissions permissions; if (DMLibTestContext.IsAsync) { permissions = SharedAccessPermissions.Write | SharedAccessPermissions.Read; } else { permissions = SharedAccessPermissions.Write; } StorageCredentials destSAS = new StorageCredentials(DestAdaptor.GenerateSAS(permissions, (int)new TimeSpan(1, 0, 0, 0).TotalSeconds)); options.DestCredentials = destSAS; } options.TransferItemModifier = (fileNode, transferItem) => { transferItem.TransferContext = transferContext; }; var result = this.ExecuteTestCase(sourceDataInfo, options); // Verify transfer result Test.Assert(DMLibDataHelper.Equals(sourceDataInfo, result.DataInfo), "Verify transfer result."); Test.Assert(successCount == 2, "Verify success transfers"); Test.Assert(skipCount == 0, "Verify skipped transfer"); } [TestCategory(Tag.Function)] [DMLibTestMethodSet(DMLibTestMethodSet.DirAllValidDirection)] public void DirectoryForceOverwriteTest() { string destExistName = "destExist"; string destNotExistName = "destNotExist"; DMLibDataInfo sourceDataInfo = new DMLibDataInfo(string.Empty); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destExistName, 1024); DMLibDataHelper.AddOneFileInBytes(sourceDataInfo.RootNode, destNotExistName, 1024); DMLibDataInfo destDataInfo = new DMLibDataInfo(string.Empty); DMLibDataHelper.AddOneFileInBytes(destDataInfo.RootNode, destExistName, 1024); TransferContext transferContext = new DirectoryTransferContext(); transferContext.ShouldOverwriteCallback = TransferContext.ForceOverwrite; int skipCount = 0; int successCount = 0; transferContext.FileSkipped += (object sender, TransferEventArgs args) => { Interlocked.Increment(ref skipCount); }; transferContext.FileTransferred += (object sender, TransferEventArgs args) => { Interlocked.Increment(ref successCount); }; var options = new TestExecutionOptions<DMLibDataInfo>(); options.IsDirectoryTransfer = true; if (DMLibTestContext.DestType != DMLibDataType.Stream) { options.DestTransferDataInfo = destDataInfo; } if (IsCloudService(DMLibTestContext.DestType)) { SharedAccessPermissions permissions; if (DMLibTestContext.IsAsync) { permissions = SharedAccessPermissions.Write | SharedAccessPermissions.Read; } else { permissions = SharedAccessPermissions.Write; } StorageCredentials destSAS = new StorageCredentials(DestAdaptor.GenerateSAS(permissions, (int)new TimeSpan(1, 0, 0, 0).TotalSeconds)); options.DestCredentials = destSAS; } options.TransferItemModifier = (fileNode, transferItem) => { transferItem.TransferContext = transferContext; dynamic transferOptions = DefaultTransferDirectoryOptions; transferOptions.Recursive = true; transferItem.Options = transferOptions; }; var result = this.ExecuteTestCase(sourceDataInfo, options); // Verify transfer result Test.Assert(DMLibDataHelper.Equals(sourceDataInfo, result.DataInfo), "Verify transfer result."); VerificationHelper.VerifySingleTransferStatus(result, 2, 0, 0, 1024 * 2); Test.Assert(successCount == 2, "Verify success transfers"); Test.Assert(skipCount == 0, "Verify skipped transfer"); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.IO; using System.Reflection; using System.Text; using System.Xml; using Microsoft.Build.Evaluation; using Microsoft.Build.Construction; using Microsoft.Build.Execution; using Microsoft.Build.Shared; using InvalidProjectFileException = Microsoft.Build.Exceptions.InvalidProjectFileException; using Xunit; namespace Microsoft.Build.UnitTests.OM.Construction { /// <summary> /// Tests for the ProjectImportElement class /// </summary> public class ProjectImportElement_Tests { /// <summary> /// Read project with no imports /// </summary> [Fact] public void ReadNone() { ProjectRootElement project = ProjectRootElement.Create(); Assert.Null(project.Imports.GetEnumerator().Current); } /// <summary> /// Read import with no project attribute /// </summary> [Fact] public void ReadInvalidMissingProject() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Import/> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read import with empty project attribute /// </summary> [Fact] public void ReadInvalidEmptyProject() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Import Project=''/> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read import with unexpected attribute /// </summary> [Fact] public void ReadInvalidAttribute() { Assert.Throws<InvalidProjectFileException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Import Project='p' X='Y'/> </Project> "; ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); } ); } /// <summary> /// Read basic valid imports /// </summary> [Fact] public void ReadBasic() { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Import Project='i1.proj' /> <Import Project='i2.proj' Condition='c'/> </Project> "; ProjectRootElement project = ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); List<ProjectImportElement> imports = Helpers.MakeList(project.Imports); Assert.Equal(2, imports.Count); Assert.Equal("i1.proj", imports[0].Project); Assert.Equal("i2.proj", imports[1].Project); Assert.Equal("c", imports[1].Condition); } /// <summary> /// Set valid project on import /// </summary> [Fact] public void SetProjectValid() { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Import Project='i1.proj' /> </Project> "; ProjectRootElement project = ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); ProjectImportElement import = (ProjectImportElement)Helpers.GetFirst(project.Children); import.Project = "i1b.proj"; Assert.Equal("i1b.proj", import.Project); } /// <summary> /// Set invalid empty project value on import /// </summary> [Fact] public void SetProjectInvalidEmpty() { Assert.Throws<ArgumentException>(() => { string content = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Import Project='i1.proj' /> </Project> "; ProjectRootElement project = ProjectRootElement.Create(XmlReader.Create(new StringReader(content))); ProjectImportElement import = (ProjectImportElement)Helpers.GetFirst(project.Children); import.Project = String.Empty; } ); } /// <summary> /// Setting the project attribute should dirty the project /// </summary> [Fact] public void SettingProjectDirties() { string file1 = null; string file2 = null; try { file1 = Microsoft.Build.Shared.FileUtilities.GetTemporaryFile(); ProjectRootElement importProject1 = ProjectRootElement.Create(); importProject1.AddProperty("p", "v1"); importProject1.Save(file1); file2 = Microsoft.Build.Shared.FileUtilities.GetTemporaryFile(); ProjectRootElement importProject2 = ProjectRootElement.Create(); importProject2.AddProperty("p", "v2"); importProject2.Save(file2); string content = String.Format ( @"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Import Project='{0}'/> </Project>", file1 ); Project project = new Project(XmlReader.Create(new StringReader(content))); ProjectImportElement import = Helpers.GetFirst(project.Xml.Imports); import.Project = file2; Assert.Equal("v1", project.GetPropertyValue("p")); project.ReevaluateIfNecessary(); Assert.Equal("v2", project.GetPropertyValue("p")); } finally { File.Delete(file1); File.Delete(file2); } } /// <summary> /// Setting the condition should dirty the project /// </summary> [Fact] public void SettingConditionDirties() { string file = null; try { file = Microsoft.Build.Shared.FileUtilities.GetTemporaryFile(); ProjectRootElement importProject = ProjectRootElement.Create(); importProject.AddProperty("p", "v1"); importProject.Save(file); string content = String.Format ( @"<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Import Project='{0}'/> </Project>", file ); Project project = new Project(XmlReader.Create(new StringReader(content))); ProjectImportElement import = Helpers.GetFirst(project.Xml.Imports); import.Condition = "false"; Assert.Equal("v1", project.GetPropertyValue("p")); project.ReevaluateIfNecessary(); Assert.Equal(String.Empty, project.GetPropertyValue("p")); } finally { File.Delete(file); } } /// <summary> /// Importing a project which has a relative path /// </summary> [Fact] public void ImportWithRelativePath() { string tempPath = Path.GetTempPath(); string testTempPath = Path.Combine(tempPath, "UnitTestsPublicOm"); string projectfile = Path.Combine(testTempPath, "a.proj"); string targetsFile = Path.Combine(tempPath, "x.targets"); string projectfileContent = String.Format ( @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > <Import Project='{0}'/> </Project> ", testTempPath + "\\..\\x.targets" ); string targetsfileContent = @" <Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' > </Project> "; try { Directory.CreateDirectory(testTempPath); ProjectRootElement project = ProjectRootElement.Create(XmlReader.Create(new StringReader(projectfileContent))); project.Save(projectfile); project = ProjectRootElement.Create(XmlReader.Create(new StringReader(targetsfileContent))); project.Save(targetsFile); Project msbuildProject = new Project(projectfile); } finally { if (Directory.Exists(testTempPath)) { FileUtilities.DeleteWithoutTrailingBackslash(testTempPath, true); } if (File.Exists(targetsFile)) { File.Delete(targetsFile); } } } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System.Diagnostics; using System.Management.Automation.Language; namespace System.Management.Automation { /// <summary> /// Represents a parameter to the Command. /// </summary> [DebuggerDisplay("{ParameterName}")] internal sealed class CommandParameterInternal { private sealed class Parameter { internal Ast ast; internal string parameterName; internal string parameterText; } private sealed class Argument { internal Ast ast; internal object value; internal bool splatted; } private Parameter _parameter; private Argument _argument; private bool _spaceAfterParameter; private bool _fromHashtableSplatting; internal bool SpaceAfterParameter => _spaceAfterParameter; internal bool ParameterNameSpecified => _parameter != null; internal bool ArgumentSpecified => _argument != null; internal bool ParameterAndArgumentSpecified => ParameterNameSpecified && ArgumentSpecified; internal bool FromHashtableSplatting => _fromHashtableSplatting; /// <summary> /// Gets and sets the string that represents parameter name, which does not include the '-' (dash). /// </summary> internal string ParameterName { get { Diagnostics.Assert(ParameterNameSpecified, "Caller must verify parameter name was specified"); return _parameter.parameterName; } set { Diagnostics.Assert(ParameterNameSpecified, "Caller must verify parameter name was specified"); _parameter.parameterName = value; } } /// <summary> /// The text of the parameter, which typically includes the leading '-' (dash) and, if specified, the trailing ':'. /// </summary> internal string ParameterText { get { Diagnostics.Assert(ParameterNameSpecified, "Caller must verify parameter name was specified"); return _parameter.parameterText; } } /// <summary> /// The ast of the parameter, if one was specified. /// </summary> internal Ast ParameterAst { get => _parameter?.ast; } /// <summary> /// The extent of the parameter, if one was specified. /// </summary> internal IScriptExtent ParameterExtent { get => ParameterAst?.Extent ?? PositionUtilities.EmptyExtent; } /// <summary> /// The ast of the optional argument, if one was specified. /// </summary> internal Ast ArgumentAst { get => _argument?.ast; } /// <summary> /// The extent of the optional argument, if one was specified. /// </summary> internal IScriptExtent ArgumentExtent { get => ArgumentAst?.Extent ?? PositionUtilities.EmptyExtent; } /// <summary> /// The value of the optional argument, if one was specified, otherwise UnboundParameter.Value. /// </summary> internal object ArgumentValue { get { return _argument != null ? _argument.value : UnboundParameter.Value; } } /// <summary> /// If an argument was specified and is to be splatted, returns true, otherwise false. /// </summary> internal bool ArgumentToBeSplatted { get { return _argument != null && _argument.splatted; } } /// <summary> /// Set the argument value and ast. /// </summary> internal void SetArgumentValue(Ast ast, object value) { if (_argument == null) { _argument = new Argument(); } _argument.value = value; _argument.ast = ast; } /// <summary> /// The extent to use when reporting generic errors. The argument extent is used, if it is not empty, otherwise /// the parameter extent is used. Some errors may prefer the parameter extent and should not use this method. /// </summary> internal IScriptExtent ErrorExtent { get { var argExtent = ArgumentExtent; return argExtent != PositionUtilities.EmptyExtent ? argExtent : ParameterExtent; } } #region ctor /// <summary> /// Create a parameter when no argument has been specified. /// </summary> /// <param name="ast">The ast in script of the parameter.</param> /// <param name="parameterName">The parameter name (with no leading dash).</param> /// <param name="parameterText">The text of the parameter, as it did, or would, appear in script.</param> internal static CommandParameterInternal CreateParameter( string parameterName, string parameterText, Ast ast = null) { return new CommandParameterInternal { _parameter = new Parameter { ast = ast, parameterName = parameterName, parameterText = parameterText } }; } /// <summary> /// Create a positional argument to a command. /// </summary> /// <param name="value">The argument value.</param> /// <param name="ast">The ast of the argument value in the script.</param> /// <param name="splatted">True if the argument value is to be splatted, false otherwise.</param> internal static CommandParameterInternal CreateArgument( object value, Ast ast = null, bool splatted = false) { return new CommandParameterInternal { _argument = new Argument { value = value, ast = ast, splatted = splatted, } }; } /// <summary> /// Create an named argument, where the parameter name is known. This can happen when: /// * The user uses the ':' syntax, as in /// foo -bar:val /// * Splatting, as in /// $x = @{ bar = val } ; foo @x /// * Via an API - when converting a CommandParameter to CommandParameterInternal. /// * In the parameter binder when it resolves a positional argument /// * Other random places that manually construct command processors and know their arguments. /// </summary> /// <param name="parameterAst">The ast in script of the parameter.</param> /// <param name="parameterName">The parameter name (with no leading dash).</param> /// <param name="parameterText">The text of the parameter, as it did, or would, appear in script.</param> /// <param name="argumentAst">The ast of the argument value in the script.</param> /// <param name="value">The argument value.</param> /// <param name="spaceAfterParameter">Used in native commands to correctly handle -foo:bar vs. -foo: bar.</param> /// <param name="fromSplatting">Indicate if this parameter-argument pair comes from splatting.</param> internal static CommandParameterInternal CreateParameterWithArgument( Ast parameterAst, string parameterName, string parameterText, Ast argumentAst, object value, bool spaceAfterParameter, bool fromSplatting = false) { return new CommandParameterInternal { _parameter = new Parameter { ast = parameterAst, parameterName = parameterName, parameterText = parameterText }, _argument = new Argument { ast = argumentAst, value = value }, _spaceAfterParameter = spaceAfterParameter, _fromHashtableSplatting = fromSplatting, }; } #endregion ctor internal bool IsDashQuestion() { return ParameterNameSpecified && (ParameterName.Equals("?", StringComparison.OrdinalIgnoreCase)); } } }
using System; using System.Collections.Generic; using System.Linq; using Microsoft.Xna.Framework; using Microsoft.Xna.Framework.Audio; using Microsoft.Xna.Framework.Content; using Microsoft.Xna.Framework.GamerServices; using Microsoft.Xna.Framework.Graphics; using Microsoft.Xna.Framework.Input; using Microsoft.Xna.Framework.Media; namespace AirportDEMO { /// <summary> /// This is the main type for your game /// </summary> public class AirportMain : Microsoft.Xna.Framework.Game { GraphicsDeviceManager graphics; SpriteBatch spriteBatch; static public List<Passenger> QueueVIP { get; set; } static public List<Passenger> QueueE1 { get; set; } static public List<Passenger> QueueE2 { get; set; } static public Entrance EntranceVIP { get; set; } static public Entrance EntranceE1 { get; set; } static public Entrance EntranceE2 { get; set; } Sprite[] receptionist; CounterBoard[] counterBoards; Texture2D counterBoard; Texture2D legendBoard; SpriteFont HUDFont; static public List<Passenger> PassengersList { get; set; } Texture2D background; Texture2D background_front; Texture2D[] passengerTexturesVIP1; Texture2D[] passengerTexturesVIP2; Texture2D[] passengerTexturesN1; Texture2D[] passengerTexturesN2a; Texture2D[] passengerTexturesN2b; TimeSpan randomCooldown = TimeSpan.FromSeconds(2); TimeSpan randomTimer; bool IsReadyToSpawn { get { return randomTimer > randomCooldown; } } public AirportMain() { graphics = new GraphicsDeviceManager(this); Content.RootDirectory = "Content"; this.IsMouseVisible = true; this.Window.Title = "Airport DEMO - By Phuong D. Nguyen & Chau D. Nguyen"; } /// <summary> /// Allows the game to perform any initialization it needs to before starting to run. /// This is where it can query for any required services and load any non-graphic /// related content. Calling base.Initialize will enumerate through any components /// and initialize them as well. /// </summary> protected override void Initialize() { // TODO: Add your initialization logic here receptionist = new Sprite[3]; PassengersList = new List<Passenger>(); passengerTexturesVIP1 = new Texture2D[8]; passengerTexturesVIP2 = new Texture2D[8]; passengerTexturesN1 = new Texture2D[8]; passengerTexturesN2a = new Texture2D[8]; passengerTexturesN2b = new Texture2D[8]; QueueVIP = new List<Passenger>(); QueueE1 = new List<Passenger>(); QueueE2 = new List<Passenger>(); EntranceVIP = new Entrance(); EntranceE1 = new Entrance(); EntranceE2 = new Entrance(); counterBoards = new CounterBoard[3]; for (int i = 0; i < 3; ++i) { receptionist[i] = new Sprite(new Vector2(180 + i * 165, 150), 0, 0.1f, 50); } base.Initialize(); } /// <summary> /// LoadContent will be called once per game and is the place to load /// all of your content. /// </summary> protected override void LoadContent() { // Create a new SpriteBatch, which can be used to draw textures. spriteBatch = new SpriteBatch(GraphicsDevice); // TODO: use this.Content to load your game content here HUDFont = Content.Load<SpriteFont>(@"Fonts\HUDFont"); legendBoard = Content.Load<Texture2D>(@"Textures\LegendBoard"); background = Content.Load<Texture2D>(@"Textures\Airport_Terminal"); background_front = Content.Load<Texture2D>(@"Textures\Airport_Terminal_Boards"); counterBoard = Content.Load<Texture2D>(@"Textures\CounterBoard"); counterBoards[0] = new CounterBoard (EntranceVIP, new Vector2(135 + 0 * 175, 235), 0.5f); counterBoards[1] = new CounterBoard (EntranceE1, new Vector2(135 + 1 * 175, 235), 0.5f); counterBoards[2] = new CounterBoard (EntranceE2, new Vector2(135 + 2 * 175, 235), 0.5f); for (int i = 0; i < 3; ++i) { counterBoards[i].LoadTexture(counterBoard, new Point(60, 60), new Point(1, 1)); receptionist[i].LoadTexture(Content.Load<Texture2D>(@"Textures\R\" + i), new Point(64, 64), new Point(9, 8)); } for (int i = 0; i < 8; ++i) { passengerTexturesVIP1[i] = Content.Load<Texture2D>(@"Textures\VIP1\" + i); passengerTexturesVIP2[i] = Content.Load<Texture2D>(@"Textures\VIP2\" + i); passengerTexturesN1[i] = Content.Load<Texture2D>(@"Textures\N1\" + i); passengerTexturesN2a[i] = Content.Load<Texture2D>(@"Textures\N2a\" + i); passengerTexturesN2b[i] = Content.Load<Texture2D>(@"Textures\N2b\" + i); } } /// <summary> /// UnloadContent will be called once per game and is the place to unload /// all content. /// </summary> protected override void UnloadContent() { // TODO: Unload any non ContentManager content here } /// <summary> /// Allows the game to run logic such as updating the world, /// checking for collisions, gathering input, and playing audio. /// </summary> /// <param name="gameTime">Provides a snapshot of timing values.</param> protected override void Update(GameTime gameTime) { // Allows the game to exit if (GamePad.GetState(PlayerIndex.One).Buttons.Back == ButtonState.Pressed) this.Exit(); // TODO: Add your update logic here RandomPassenger(gameTime); for (int i = 0; i < PassengersList.Count; ++i) { Passenger passenger = PassengersList[i]; passenger.Update(gameTime); if (!PassengersList.Contains(passenger)) --i; } for (int i = 0; i < 3; ++i) { counterBoards[i].Update(gameTime); receptionist[i].Update(gameTime); } EntranceVIP.Update(gameTime); EntranceE1.Update(gameTime); EntranceE2.Update(gameTime); base.Update(gameTime); } /// <summary> /// This is called when the game should draw itself. /// </summary> /// <param name="gameTime">Provides a snapshot of timing values.</param> protected override void Draw(GameTime gameTime) { GraphicsDevice.Clear(Color.CornflowerBlue); // TODO: Add your drawing code here spriteBatch.Begin(SpriteSortMode.FrontToBack, BlendState.AlphaBlend); foreach (Passenger passenger in PassengersList) { passenger.Draw(gameTime, spriteBatch); } for (int i = 0; i < 3; ++i) { counterBoards[i].Draw(gameTime, spriteBatch); receptionist[i].Draw(gameTime, spriteBatch); } spriteBatch.DrawString(HUDFont, EntranceVIP.Timer.Seconds.ToString(), new Vector2(124 + 0 * 175, 206), Color.White, 0, Vector2.Zero, 1, SpriteEffects.None, 1); spriteBatch.DrawString(HUDFont, EntranceE1.Timer.Seconds.ToString(), new Vector2(124 + 1 * 175, 206), Color.White, 0, Vector2.Zero, 1, SpriteEffects.None, 1); spriteBatch.DrawString(HUDFont, EntranceE2.Timer.Seconds.ToString(), new Vector2(124 + 2 * 175, 206), Color.White, 0, Vector2.Zero, 1, SpriteEffects.None, 1); spriteBatch.Draw(legendBoard, new Vector2(650, 40), new Rectangle(0, 0, 140, 400), Color.White, 0, Vector2.Zero, 1, SpriteEffects.None, 1.0f); spriteBatch.Draw(background, Vector2.Zero, new Rectangle(0, 0, 800, 480), Color.White, 0, Vector2.Zero, 1, SpriteEffects.None, 0); spriteBatch.Draw(background_front, Vector2.Zero, new Rectangle(0, 0, 800, 480), Color.White, 0, Vector2.Zero, 1, SpriteEffects.None, 0.5f); /*spriteBatch.DrawString(HUDFont, Mouse.GetState().X.ToString() + "," + Mouse.GetState().Y.ToString(), Vector2.Zero, Color.White, 0, Vector2.Zero, 1, SpriteEffects.None, 1);*/ spriteBatch.End(); base.Draw(gameTime); } private void RandomPassenger(GameTime gameTime) { randomTimer += gameTime.ElapsedGameTime; Random rand = new Random(); if (IsReadyToSpawn) { randomTimer = TimeSpan.Zero; const int queueCapacity = 10; Texture2D[] passengerTextures = null; PassengerType passengerType; double type = rand.NextDouble(); if (type < 0.1) { if (QueueVIP.Count >= queueCapacity) return; passengerTextures = passengerTexturesVIP1; passengerType = PassengerType.VIPWOLuggage; } else if(type < 0.2) { if (QueueVIP.Count >= queueCapacity) return; passengerTextures = passengerTexturesVIP2; passengerType = PassengerType.VIPWLuggage; } else if (type < 0.4) { if (QueueVIP.Count >= queueCapacity) return; passengerTextures = passengerTexturesN1; passengerType = PassengerType.NormalWOLuggage; } else { if (QueueE1.Count >= queueCapacity && QueueE2.Count >= queueCapacity) { return; } passengerType = PassengerType.NormalWLuggage; int color = rand.Next(0, 2); switch (color) { case 0: passengerTextures = passengerTexturesN2a; break; case 1: passengerTextures = passengerTexturesN2b; break; default: break; } } PassengersList.Add(new Passenger ((PassengerType)passengerType, new Vector2(400, 500), Direction.Up, 0.5f, 0.5f)); PassengersList[PassengersList.Count - 1].LoadTexture(passengerTextures, new Point(64, 64), new Point(10, 7)); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // GroupByQueryOperator.cs // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using IEnumerator = System.Collections.IEnumerator; namespace System.Linq.Parallel { /// <summary> /// The operator type for GroupBy statements. This operator groups the input based on /// a key-selection routine, yielding one-to-many values of key-to-elements. The /// implementation is very much like the hash join operator, in which we first build /// a big hashtable of the input; then we just iterate over each unique key in the /// hashtable, yielding it plus all of the elements with the same key. /// </summary> /// <typeparam name="TSource"></typeparam> /// <typeparam name="TGroupKey"></typeparam> /// <typeparam name="TElement"></typeparam> internal sealed class GroupByQueryOperator<TSource, TGroupKey, TElement> : UnaryQueryOperator<TSource, IGrouping<TGroupKey, TElement>> { private readonly Func<TSource, TGroupKey> _keySelector; // Key selection function. private readonly Func<TSource, TElement> _elementSelector; // Optional element selection function. private readonly IEqualityComparer<TGroupKey> _keyComparer; // An optional key comparison object. //--------------------------------------------------------------------------------------- // Initializes a new group by operator. // // Arguments: // child - the child operator or data source from which to pull data // keySelector - a delegate representing the key selector function // elementSelector - a delegate representing the element selector function // keyComparer - an optional key comparison routine // // Assumptions: // keySelector must be non null. // elementSelector must be non null. // internal GroupByQueryOperator(IEnumerable<TSource> child, Func<TSource, TGroupKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TGroupKey> keyComparer) : base(child) { Debug.Assert(child != null, "child data source cannot be null"); Debug.Assert(keySelector != null, "need a selector function"); Debug.Assert(elementSelector != null || typeof(TSource) == typeof(TElement), "need an element function if TSource!=TElement"); _keySelector = keySelector; _elementSelector = elementSelector; _keyComparer = keyComparer; SetOrdinalIndexState(OrdinalIndexState.Shuffled); } internal override void WrapPartitionedStream<TKey>( PartitionedStream<TSource, TKey> inputStream, IPartitionedStreamRecipient<IGrouping<TGroupKey, TElement>> recipient, bool preferStriping, QuerySettings settings) { // Hash-repartition the source stream if (Child.OutputOrdered) { WrapPartitionedStreamHelperOrdered<TKey>( ExchangeUtilities.HashRepartitionOrdered<TSource, TGroupKey, TKey>( inputStream, _keySelector, _keyComparer, null, settings.CancellationState.MergedCancellationToken), recipient, settings.CancellationState.MergedCancellationToken ); } else { WrapPartitionedStreamHelper<TKey, int>( ExchangeUtilities.HashRepartition<TSource, TGroupKey, TKey>( inputStream, _keySelector, _keyComparer, null, settings.CancellationState.MergedCancellationToken), recipient, settings.CancellationState.MergedCancellationToken ); } } //--------------------------------------------------------------------------------------- // This is a helper method. WrapPartitionedStream decides what type TKey is going // to be, and then call this method with that key as a generic parameter. // private void WrapPartitionedStreamHelper<TIgnoreKey, TKey>( PartitionedStream<Pair<TSource, TGroupKey>, TKey> hashStream, IPartitionedStreamRecipient<IGrouping<TGroupKey, TElement>> recipient, CancellationToken cancellationToken) { int partitionCount = hashStream.PartitionCount; PartitionedStream<IGrouping<TGroupKey, TElement>, TKey> outputStream = new PartitionedStream<IGrouping<TGroupKey, TElement>, TKey>(partitionCount, hashStream.KeyComparer, OrdinalIndexState.Shuffled); // If there is no element selector, we return a special identity enumerator. Otherwise, // we return one that will apply the element selection function during enumeration. for (int i = 0; i < partitionCount; i++) { if (_elementSelector == null) { Debug.Assert(typeof(TSource) == typeof(TElement)); var enumerator = new GroupByIdentityQueryOperatorEnumerator<TSource, TGroupKey, TKey>( hashStream[i], _keyComparer, cancellationToken); outputStream[i] = (QueryOperatorEnumerator<IGrouping<TGroupKey, TElement>, TKey>)(object)enumerator; } else { outputStream[i] = new GroupByElementSelectorQueryOperatorEnumerator<TSource, TGroupKey, TElement, TKey>( hashStream[i], _keyComparer, _elementSelector, cancellationToken); } } recipient.Receive(outputStream); } //--------------------------------------------------------------------------------------- // This is a helper method. WrapPartitionedStream decides what type TKey is going // to be, and then call this method with that key as a generic parameter. // private void WrapPartitionedStreamHelperOrdered<TKey>( PartitionedStream<Pair<TSource, TGroupKey>, TKey> hashStream, IPartitionedStreamRecipient<IGrouping<TGroupKey, TElement>> recipient, CancellationToken cancellationToken) { int partitionCount = hashStream.PartitionCount; PartitionedStream<IGrouping<TGroupKey, TElement>, TKey> outputStream = new PartitionedStream<IGrouping<TGroupKey, TElement>, TKey>(partitionCount, hashStream.KeyComparer, OrdinalIndexState.Shuffled); // If there is no element selector, we return a special identity enumerator. Otherwise, // we return one that will apply the element selection function during enumeration. IComparer<TKey> orderComparer = hashStream.KeyComparer; for (int i = 0; i < partitionCount; i++) { if (_elementSelector == null) { Debug.Assert(typeof(TSource) == typeof(TElement)); var enumerator = new OrderedGroupByIdentityQueryOperatorEnumerator<TSource, TGroupKey, TKey>( hashStream[i], _keySelector, _keyComparer, orderComparer, cancellationToken); outputStream[i] = (QueryOperatorEnumerator<IGrouping<TGroupKey, TElement>, TKey>)(object)enumerator; } else { outputStream[i] = new OrderedGroupByElementSelectorQueryOperatorEnumerator<TSource, TGroupKey, TElement, TKey>( hashStream[i], _keySelector, _elementSelector, _keyComparer, orderComparer, cancellationToken); } } recipient.Receive(outputStream); } //----------------------------------------------------------------------------------- // Override of the query operator base class's Open method. // internal override QueryResults<IGrouping<TGroupKey, TElement>> Open(QuerySettings settings, bool preferStriping) { // We just open our child operator. Do not propagate the preferStriping value, but instead explicitly // set it to false. Regardless of whether the parent prefers striping or range partitioning, the output // will be hash-partitioned. QueryResults<TSource> childResults = Child.Open(settings, false); return new UnaryQueryOperatorResults(childResults, this, settings, false); } //--------------------------------------------------------------------------------------- // Returns an enumerable that represents the query executing sequentially. // internal override IEnumerable<IGrouping<TGroupKey, TElement>> AsSequentialQuery(CancellationToken token) { IEnumerable<TSource> wrappedChild = CancellableEnumerable.Wrap(Child.AsSequentialQuery(token), token); if (_elementSelector == null) { Debug.Assert(typeof(TElement) == typeof(TSource)); return (IEnumerable<IGrouping<TGroupKey, TElement>>)wrappedChild.GroupBy(_keySelector, _keyComparer); } else { return wrappedChild.GroupBy(_keySelector, _elementSelector, _keyComparer); } } //--------------------------------------------------------------------------------------- // Whether this operator performs a premature merge that would not be performed in // a similar sequential operation (i.e., in LINQ to Objects). // internal override bool LimitsParallelism { get { return false; } } } //--------------------------------------------------------------------------------------- // The enumerator type responsible for grouping elements and yielding the key-value sets. // // Assumptions: // Just like the Join operator, this won't work properly at all if the analysis engine // didn't choose to hash partition. We will simply not yield correct groupings. // internal abstract class GroupByQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> : QueryOperatorEnumerator<IGrouping<TGroupKey, TElement>, TOrderKey> { protected readonly QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> _source; // The data source to enumerate. protected readonly IEqualityComparer<TGroupKey> _keyComparer; // A key comparer. protected readonly CancellationToken _cancellationToken; private Mutables _mutables; // All of the mutable state. class Mutables { internal HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>> _hashLookup; // The lookup with key-value mappings. internal int _hashLookupIndex; // The current index within the lookup. } //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // protected GroupByQueryOperatorEnumerator( QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, IEqualityComparer<TGroupKey> keyComparer, CancellationToken cancellationToken) { Debug.Assert(source != null); _source = source; _keyComparer = keyComparer; _cancellationToken = cancellationToken; } //--------------------------------------------------------------------------------------- // MoveNext will invoke the entire query sub-tree, accumulating results into a hash- // table, upon the first call. Then for the first call and all subsequent calls, we will // just enumerate the key-set from the hash-table, retrieving groupings of key-elements. // internal override bool MoveNext(ref IGrouping<TGroupKey, TElement> currentElement, ref TOrderKey currentKey) { Debug.Assert(_source != null); // Lazy-init the mutable state. This also means we haven't yet built our lookup of // groupings, so we can go ahead and do that too. Mutables mutables = _mutables; if (mutables == null) { mutables = _mutables = new Mutables(); // Build the hash lookup and start enumerating the lookup at the beginning. mutables._hashLookup = BuildHashLookup(); Debug.Assert(mutables._hashLookup != null); mutables._hashLookupIndex = -1; } // Now, with a hash lookup in hand, we just enumerate the keys. So long // as the key-value lookup has elements, we have elements. if (++mutables._hashLookupIndex < mutables._hashLookup.Count) { currentElement = new GroupByGrouping<TGroupKey, TElement>( mutables._hashLookup[mutables._hashLookupIndex]); return true; } return false; } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected abstract HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>> BuildHashLookup(); protected override void Dispose(bool disposing) { _source.Dispose(); } } //--------------------------------------------------------------------------------------- // A specialization of the group by enumerator for yielding elements with the identity // function. // internal sealed class GroupByIdentityQueryOperatorEnumerator<TSource, TGroupKey, TOrderKey> : GroupByQueryOperatorEnumerator<TSource, TGroupKey, TSource, TOrderKey> { //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // internal GroupByIdentityQueryOperatorEnumerator( QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, IEqualityComparer<TGroupKey> keyComparer, CancellationToken cancellationToken) : base(source, keyComparer, cancellationToken) { } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected override HashLookup<Wrapper<TGroupKey>, ListChunk<TSource>> BuildHashLookup() { HashLookup<Wrapper<TGroupKey>, ListChunk<TSource>> hashlookup = new HashLookup<Wrapper<TGroupKey>, ListChunk<TSource>>(new WrapperEqualityComparer<TGroupKey>(_keyComparer)); Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>); TOrderKey sourceKeyUnused = default(TOrderKey); int i = 0; while (_source.MoveNext(ref sourceElement, ref sourceKeyUnused)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // Generate a key and place it into the hashtable. Wrapper<TGroupKey> key = new Wrapper<TGroupKey>(sourceElement.Second); // If the key already exists, we just append it to the existing list -- // otherwise we will create a new one and add it to that instead. ListChunk<TSource> currentValue = null; if (!hashlookup.TryGetValue(key, ref currentValue)) { const int INITIAL_CHUNK_SIZE = 2; currentValue = new ListChunk<TSource>(INITIAL_CHUNK_SIZE); hashlookup.Add(key, currentValue); } Debug.Assert(currentValue != null); // Call to the base class to yield the current value. currentValue.Add(sourceElement.First); } return hashlookup; } } //--------------------------------------------------------------------------------------- // A specialization of the group by enumerator for yielding elements with any arbitrary // element selection function. // internal sealed class GroupByElementSelectorQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> : GroupByQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> { private readonly Func<TSource, TElement> _elementSelector; // Function to select elements. //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // internal GroupByElementSelectorQueryOperatorEnumerator( QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, IEqualityComparer<TGroupKey> keyComparer, Func<TSource, TElement> elementSelector, CancellationToken cancellationToken) : base(source, keyComparer, cancellationToken) { Debug.Assert(elementSelector != null); _elementSelector = elementSelector; } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected override HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>> BuildHashLookup() { HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>> hashlookup = new HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>>(new WrapperEqualityComparer<TGroupKey>(_keyComparer)); Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>); TOrderKey sourceKeyUnused = default(TOrderKey); int i = 0; while (_source.MoveNext(ref sourceElement, ref sourceKeyUnused)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // Generate a key and place it into the hashtable. Wrapper<TGroupKey> key = new Wrapper<TGroupKey>(sourceElement.Second); // If the key already exists, we just append it to the existing list -- // otherwise we will create a new one and add it to that instead. ListChunk<TElement> currentValue = null; if (!hashlookup.TryGetValue(key, ref currentValue)) { const int INITIAL_CHUNK_SIZE = 2; currentValue = new ListChunk<TElement>(INITIAL_CHUNK_SIZE); hashlookup.Add(key, currentValue); } Debug.Assert(currentValue != null); // Call to the base class to yield the current value. currentValue.Add(_elementSelector(sourceElement.First)); } return hashlookup; } } //--------------------------------------------------------------------------------------- // Ordered version of the GroupBy operator. // internal abstract class OrderedGroupByQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> : QueryOperatorEnumerator<IGrouping<TGroupKey, TElement>, TOrderKey> { protected readonly QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> _source; // The data source to enumerate. private readonly Func<TSource, TGroupKey> _keySelector; // The key selection routine. protected readonly IEqualityComparer<TGroupKey> _keyComparer; // The key comparison routine. protected readonly IComparer<TOrderKey> _orderComparer; // The comparison routine for order keys. protected readonly CancellationToken _cancellationToken; private Mutables _mutables; // All the mutable state. class Mutables { internal HashLookup<Wrapper<TGroupKey>, GroupKeyData> _hashLookup; // The lookup with key-value mappings. internal int _hashLookupIndex; // The current index within the lookup. } //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // protected OrderedGroupByQueryOperatorEnumerator(QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, Func<TSource, TGroupKey> keySelector, IEqualityComparer<TGroupKey> keyComparer, IComparer<TOrderKey> orderComparer, CancellationToken cancellationToken) { Debug.Assert(source != null); Debug.Assert(keySelector != null); _source = source; _keySelector = keySelector; _keyComparer = keyComparer; _orderComparer = orderComparer; _cancellationToken = cancellationToken; } //--------------------------------------------------------------------------------------- // MoveNext will invoke the entire query sub-tree, accumulating results into a hash- // table, upon the first call. Then for the first call and all subsequent calls, we will // just enumerate the key-set from the hash-table, retrieving groupings of key-elements. // internal override bool MoveNext(ref IGrouping<TGroupKey, TElement> currentElement, ref TOrderKey currentKey) { Debug.Assert(_source != null); Debug.Assert(_keySelector != null); // Lazy-init the mutable state. This also means we haven't yet built our lookup of // groupings, so we can go ahead and do that too. Mutables mutables = _mutables; if (mutables == null) { mutables = _mutables = new Mutables(); // Build the hash lookup and start enumerating the lookup at the beginning. mutables._hashLookup = BuildHashLookup(); Debug.Assert(mutables._hashLookup != null); mutables._hashLookupIndex = -1; } // Now, with a hash lookup in hand, we just enumerate the keys. So long // as the key-value lookup has elements, we have elements. if (++mutables._hashLookupIndex < mutables._hashLookup.Count) { GroupKeyData value = mutables._hashLookup[mutables._hashLookupIndex].Value; currentElement = value._grouping; currentKey = value._orderKey; return true; } return false; } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected abstract HashLookup<Wrapper<TGroupKey>, GroupKeyData> BuildHashLookup(); protected override void Dispose(bool disposing) { _source.Dispose(); } //----------------------------------------------------------------------------------- // A data structure that holds information about elements with a particular key. // // This information includes two parts: // - An order key for the grouping. // - The grouping itself. The grouping consists of elements and the grouping key. // protected class GroupKeyData { internal TOrderKey _orderKey; internal OrderedGroupByGrouping<TGroupKey, TOrderKey, TElement> _grouping; internal GroupKeyData(TOrderKey orderKey, TGroupKey hashKey, IComparer<TOrderKey> orderComparer) { _orderKey = orderKey; _grouping = new OrderedGroupByGrouping<TGroupKey, TOrderKey, TElement>(hashKey, orderComparer); } } } //--------------------------------------------------------------------------------------- // A specialization of the ordered GroupBy enumerator for yielding elements with the identity // function. // internal sealed class OrderedGroupByIdentityQueryOperatorEnumerator<TSource, TGroupKey, TOrderKey> : OrderedGroupByQueryOperatorEnumerator<TSource, TGroupKey, TSource, TOrderKey> { //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // internal OrderedGroupByIdentityQueryOperatorEnumerator(QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, Func<TSource, TGroupKey> keySelector, IEqualityComparer<TGroupKey> keyComparer, IComparer<TOrderKey> orderComparer, CancellationToken cancellationToken) : base(source, keySelector, keyComparer, orderComparer, cancellationToken) { } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected override HashLookup<Wrapper<TGroupKey>, GroupKeyData> BuildHashLookup() { HashLookup<Wrapper<TGroupKey>, GroupKeyData> hashLookup = new HashLookup<Wrapper<TGroupKey>, GroupKeyData>( new WrapperEqualityComparer<TGroupKey>(_keyComparer)); Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>); TOrderKey sourceOrderKey = default(TOrderKey); int i = 0; while (_source.MoveNext(ref sourceElement, ref sourceOrderKey)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // Generate a key and place it into the hashtable. Wrapper<TGroupKey> key = new Wrapper<TGroupKey>(sourceElement.Second); // If the key already exists, we just append it to the existing list -- // otherwise we will create a new one and add it to that instead. GroupKeyData currentValue = null; if (hashLookup.TryGetValue(key, ref currentValue)) { if (_orderComparer.Compare(sourceOrderKey, currentValue._orderKey) < 0) { currentValue._orderKey = sourceOrderKey; } } else { currentValue = new GroupKeyData(sourceOrderKey, key.Value, _orderComparer); hashLookup.Add(key, currentValue); } Debug.Assert(currentValue != null); currentValue._grouping.Add(sourceElement.First, sourceOrderKey); } // Sort the elements within each group for (int j = 0; j < hashLookup.Count; j++) { hashLookup[j].Value._grouping.DoneAdding(); } return hashLookup; } } //--------------------------------------------------------------------------------------- // A specialization of the ordered GroupBy enumerator for yielding elements with any arbitrary // element selection function. // internal sealed class OrderedGroupByElementSelectorQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> : OrderedGroupByQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> { private readonly Func<TSource, TElement> _elementSelector; // Function to select elements. //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // internal OrderedGroupByElementSelectorQueryOperatorEnumerator(QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, Func<TSource, TGroupKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TGroupKey> keyComparer, IComparer<TOrderKey> orderComparer, CancellationToken cancellationToken) : base(source, keySelector, keyComparer, orderComparer, cancellationToken) { Debug.Assert(elementSelector != null); _elementSelector = elementSelector; } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected override HashLookup<Wrapper<TGroupKey>, GroupKeyData> BuildHashLookup() { HashLookup<Wrapper<TGroupKey>, GroupKeyData> hashLookup = new HashLookup<Wrapper<TGroupKey>, GroupKeyData>( new WrapperEqualityComparer<TGroupKey>(_keyComparer)); Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>); TOrderKey sourceOrderKey = default(TOrderKey); int i = 0; while (_source.MoveNext(ref sourceElement, ref sourceOrderKey)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // Generate a key and place it into the hashtable. Wrapper<TGroupKey> key = new Wrapper<TGroupKey>(sourceElement.Second); // If the key already exists, we just append it to the existing list -- // otherwise we will create a new one and add it to that instead. GroupKeyData currentValue = null; if (hashLookup.TryGetValue(key, ref currentValue)) { if (_orderComparer.Compare(sourceOrderKey, currentValue._orderKey) < 0) { currentValue._orderKey = sourceOrderKey; } } else { currentValue = new GroupKeyData(sourceOrderKey, key.Value, _orderComparer); hashLookup.Add(key, currentValue); } Debug.Assert(currentValue != null); // Call to the base class to yield the current value. currentValue._grouping.Add(_elementSelector(sourceElement.First), sourceOrderKey); } // Sort the elements within each group for (int j = 0; j < hashLookup.Count; j++) { hashLookup[j].Value._grouping.DoneAdding(); } return hashLookup; } } //--------------------------------------------------------------------------------------- // This little type implements the IGrouping<K,T> interface, and exposes a single // key-to-many-values mapping. // internal class GroupByGrouping<TGroupKey, TElement> : IGrouping<TGroupKey, TElement> { private KeyValuePair<Wrapper<TGroupKey>, ListChunk<TElement>> _keyValues; // A key value pair. //--------------------------------------------------------------------------------------- // Constructs a new grouping out of the key value pair. // internal GroupByGrouping(KeyValuePair<Wrapper<TGroupKey>, ListChunk<TElement>> keyValues) { Debug.Assert(keyValues.Value != null); _keyValues = keyValues; } //--------------------------------------------------------------------------------------- // The key this mapping represents. // TGroupKey IGrouping<TGroupKey, TElement>.Key { get { return _keyValues.Key.Value; } } //--------------------------------------------------------------------------------------- // Access to value enumerators. // IEnumerator<TElement> IEnumerable<TElement>.GetEnumerator() { Debug.Assert(_keyValues.Value != null); return _keyValues.Value.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable<TElement>)this).GetEnumerator(); } } /// <summary> /// An ordered version of the grouping data structure. Represents an ordered group of elements that /// have the same grouping key. /// </summary> internal class OrderedGroupByGrouping<TGroupKey, TOrderKey, TElement> : IGrouping<TGroupKey, TElement> { private TGroupKey _groupKey; // The group key for this grouping private GrowingArray<TElement> _values; // Values in this group private GrowingArray<TOrderKey> _orderKeys; // Order keys that correspond to the values private IComparer<TOrderKey> _orderComparer; // Comparer for order keys private KeyAndValuesComparer _wrappedComparer; // Comparer that wraps the _orderComparer used for sorting key/value pairs /// <summary> /// Constructs a new grouping /// </summary> internal OrderedGroupByGrouping( TGroupKey groupKey, IComparer<TOrderKey> orderComparer) { _groupKey = groupKey; _values = new GrowingArray<TElement>(); _orderKeys = new GrowingArray<TOrderKey>(); _orderComparer = orderComparer; _wrappedComparer = new KeyAndValuesComparer(_orderComparer); } /// <summary> /// The key this grouping represents. /// </summary> TGroupKey IGrouping<TGroupKey, TElement>.Key { get { return _groupKey; } } IEnumerator<TElement> IEnumerable<TElement>.GetEnumerator() { Debug.Assert(_values != null); int valueCount = _values.Count; TElement[] valueArray = _values.InternalArray; Debug.Assert(valueArray.Length >= valueCount); // valueArray.Length may be larger than valueCount for (int i = 0; i < valueCount; i++) { yield return valueArray[i]; } } IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable<TElement>)this).GetEnumerator(); } /// <summary> /// Add an element /// </summary> internal void Add(TElement value, TOrderKey orderKey) { Debug.Assert(_values != null); Debug.Assert(_orderKeys != null); _values.Add(value); _orderKeys.Add(orderKey); } /// <summary> /// No more elements will be added, so we can sort the group now. /// </summary> internal void DoneAdding() { Debug.Assert(_values != null); Debug.Assert(_orderKeys != null); // Create a map of key-value pair. // We can't use a dictionary since the keys are not necessarily unique List<KeyValuePair<TOrderKey, TElement>> sortedValues = new List<KeyValuePair<TOrderKey, TElement>>(); for (int i = 0; i < _orderKeys.InternalArray.Length; i++) { sortedValues.Add(new KeyValuePair<TOrderKey, TElement>(_orderKeys.InternalArray[i], _values.InternalArray[i])); } // Sort the values by using the _orderComparer wrapped in a Tuple comparer sortedValues.Sort(0, _values.Count, _wrappedComparer); // Unpack the values from the list back into the 2 separate arrays for (int i = 0; i < _values.InternalArray.Length; i++) { _orderKeys.InternalArray[i] = sortedValues[i].Key; _values.InternalArray[i] = sortedValues[i].Value; } #if DEBUG _orderKeys = null; // Any future calls to Add() or DoneAdding() will fail #endif } private class KeyAndValuesComparer : IComparer<KeyValuePair<TOrderKey, TElement>> { private IComparer<TOrderKey> myComparer; public KeyAndValuesComparer(IComparer<TOrderKey> comparer) { myComparer = comparer; } public int Compare(KeyValuePair<TOrderKey, TElement> x, KeyValuePair<TOrderKey, TElement> y) { return myComparer.Compare(x.Key, y.Key); } } } }
using System.Linq; using NUnit.Framework; using StructureMap.Query; using StructureMap.Testing.Configuration.DSL; using StructureMap.Testing.Graph; using StructureMap.Testing.Widget; using StructureMap.Testing.Widget2; namespace StructureMap.Testing.Query { [TestFixture] public class InstanceFactoryTypeConfigurationTester { #region Setup/Teardown [SetUp] public void SetUp() { container = new Container(x => { x.For<IWidget>().Singleton().Use<AWidget>(); x.For<Rule>().AddInstances(o => { o.OfConcreteType<DefaultRule>(); o.OfConcreteType<ARule>(); o.OfConcreteType<ColorRule>().WithCtorArg("color").EqualTo("red"); }); x.For<IEngine>().Use<PushrodEngine>(); x.For<IAutomobile>(); }); } #endregion private Container container; [Test] public void build_when_the_cast_does_not_work() { container.Model.For<IWidget>().Default.Get<Rule>().ShouldBeNull(); } [Test] public void build_when_the_cast_does_work() { container.Model.For<IWidget>().Default.Get<IWidget>().ShouldBeOfType<AWidget>(); } [Test] public void building_respects_the_lifecycle() { var widget1 = container.Model.For<IWidget>().Default.Get<IWidget>(); var widget2 = container.Model.For<IWidget>().Default.Get<IWidget>(); widget1.ShouldBeTheSameAs(widget2); } [Test] public void can_iterate_over_the_children_instances() { container.Model.InstancesOf<Rule>().Count().ShouldEqual(3); } [Test] public void eject_a_singleton() { var widget1 = container.GetInstance<IWidget>(); container.GetInstance<IWidget>().ShouldBeTheSameAs(widget1); container.Model.For<IWidget>().Default.EjectObject(); container.GetInstance<IWidget>().ShouldNotBeTheSameAs(widget1); } [Test] public void eject_a_singleton_that_has_not_been_created_does_no_harm() { container.Model.For<IWidget>().Default.EjectObject(); } [Test] public void eject_a_transient_does_no_harm() { container.Model.For<IEngine>().Default.EjectObject(); } [Test] public void eject_and_remove_an_instance_by_filter_should_remove_it_from_the_model() { InstanceRef iRef = container.Model.For<Rule>().Instances.First(); container.Model.For<Rule>().EjectAndRemove(x => x.Name == iRef.Name); container.Model.For<Rule>().Instances.Select(x => x.ConcreteType) .ShouldHaveTheSameElementsAs(typeof (ARule), typeof (ColorRule)); container.GetAllInstances<Rule>().Select(x => x.GetType()) .ShouldHaveTheSameElementsAs(typeof (ARule), typeof (ColorRule)); } [Test] public void eject_and_remove_an_instance_should_remove_it_from_the_model() { InstanceRef iRef = container.Model.For<Rule>().Instances.First(); container.Model.For<Rule>().EjectAndRemove(iRef); container.Model.For<Rule>().Instances.Select(x => x.ConcreteType) .ShouldHaveTheSameElementsAs(typeof (ARule), typeof (ColorRule)); container.GetAllInstances<Rule>().Select(x => x.GetType()) .ShouldHaveTheSameElementsAs(typeof (ARule), typeof (ColorRule)); } [Test] public void eject_and_remove_an_instance_should_remove_it_from_the_model_by_name() { InstanceRef iRef = container.Model.For<Rule>().Instances.First(); container.Model.For<Rule>().EjectAndRemove(iRef.Name); container.Model.For<Rule>().Instances.Select(x => x.ConcreteType) .ShouldHaveTheSameElementsAs(typeof (ARule), typeof (ColorRule)); container.GetAllInstances<Rule>().Select(x => x.GetType()) .ShouldHaveTheSameElementsAs(typeof (ARule), typeof (ColorRule)); } [Test] public void eject_for_a_transient_type_in_a_container_should_be_tracked() { IContainer nested = container.GetNestedContainer(); var engine1 = nested.GetInstance<IEngine>(); nested.GetInstance<IEngine>().ShouldBeTheSameAs(engine1); nested.GetInstance<IEngine>().ShouldBeTheSameAs(engine1); nested.GetInstance<IEngine>().ShouldBeTheSameAs(engine1); nested.GetInstance<IEngine>().ShouldBeTheSameAs(engine1); nested.GetInstance<IEngine>().ShouldBeTheSameAs(engine1); nested.GetInstance<IEngine>().ShouldBeTheSameAs(engine1); nested.GetInstance<IEngine>().ShouldBeTheSameAs(engine1); nested.Model.For<IEngine>().Default.EjectObject(); nested.GetInstance<IEngine>().ShouldNotBeTheSameAs(engine1); } [Test] public void get_default_should_return_null_when_it_does_not_exist() { container.Model.For<Rule>().Default.ShouldBeNull(); } [Test] public void get_default_when_it_exists() { container.Model.For<IWidget>().Default.ConcreteType.ShouldEqual(typeof (AWidget)); } [Test] public void get_lifecycle() { container.Model.For<IWidget>().Lifecycle.ShouldEqual(InstanceScope.Singleton.ToString()); container.Model.For<Rule>().Lifecycle.ShouldEqual(InstanceScope.Transient.ToString()); } [Test] public void has_been_created_for_a_purely_transient_object_should_always_be_false() { container.Model.For<IEngine>().Default.ObjectHasBeenCreated().ShouldBeFalse(); container.GetInstance<IEngine>(); container.Model.For<IEngine>().Default.ObjectHasBeenCreated().ShouldBeFalse(); } [Test] public void has_been_created_for_a_singleton() { container.Model.For<IWidget>().Default.ObjectHasBeenCreated().ShouldBeFalse(); container.GetInstance<IWidget>(); container.Model.For<IWidget>().Default.ObjectHasBeenCreated().ShouldBeTrue(); } [Test] public void has_been_created_for_a_transient_type_in_a_container_should_be_tracked() { IContainer nested = container.GetNestedContainer(); nested.Model.For<IEngine>().Default.ObjectHasBeenCreated().ShouldBeFalse(); nested.GetInstance<IEngine>(); nested.Model.For<IEngine>().Default.ObjectHasBeenCreated().ShouldBeTrue(); } [Test] public void has_implementations_negative_test() { container.Model.For<IAutomobile>().HasImplementations().ShouldBeFalse(); } [Test] public void has_implementations_positive_test() { container.Model.For<Rule>().HasImplementations().ShouldBeTrue(); container.Model.For<IWidget>().HasImplementations().ShouldBeTrue(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // using System.Text; using System; using System.Diagnostics; using System.Diagnostics.Contracts; using System.Globalization; namespace System.Globalization { internal static class TimeSpanFormat { private static String IntToString(int n, int digits) { return ParseNumbers.IntToString(n, 10, digits, '0', 0); } internal static readonly FormatLiterals PositiveInvariantFormatLiterals = TimeSpanFormat.FormatLiterals.InitInvariant(false /*isNegative*/); internal static readonly FormatLiterals NegativeInvariantFormatLiterals = TimeSpanFormat.FormatLiterals.InitInvariant(true /*isNegative*/); internal enum Pattern { None = 0, Minimum = 1, Full = 2, } // // Format // // Actions: Main method called from TimeSpan.ToString // internal static String Format(TimeSpan value, String format, IFormatProvider formatProvider) { if (format == null || format.Length == 0) format = "c"; // standard formats if (format.Length == 1) { char f = format[0]; if (f == 'c' || f == 't' || f == 'T') return FormatStandard(value, true, format, Pattern.Minimum); if (f == 'g' || f == 'G') { Pattern pattern; DateTimeFormatInfo dtfi = DateTimeFormatInfo.GetInstance(formatProvider); if (value._ticks < 0) format = dtfi.FullTimeSpanNegativePattern; else format = dtfi.FullTimeSpanPositivePattern; if (f == 'g') pattern = Pattern.Minimum; else pattern = Pattern.Full; return FormatStandard(value, false, format, pattern); } throw new FormatException(SR.Format_InvalidString); } return FormatCustomized(value, format, DateTimeFormatInfo.GetInstance(formatProvider)); } // // FormatStandard // // Actions: Format the TimeSpan instance using the specified format. // private static String FormatStandard(TimeSpan value, bool isInvariant, String format, Pattern pattern) { StringBuilder sb = StringBuilderCache.Acquire(); int day = (int)(value._ticks / TimeSpan.TicksPerDay); long time = value._ticks % TimeSpan.TicksPerDay; if (value._ticks < 0) { day = -day; time = -time; } int hours = (int)(time / TimeSpan.TicksPerHour % 24); int minutes = (int)(time / TimeSpan.TicksPerMinute % 60); int seconds = (int)(time / TimeSpan.TicksPerSecond % 60); int fraction = (int)(time % TimeSpan.TicksPerSecond); FormatLiterals literal; if (isInvariant) { if (value._ticks < 0) literal = NegativeInvariantFormatLiterals; else literal = PositiveInvariantFormatLiterals; } else { literal = new FormatLiterals(); literal.Init(format, pattern == Pattern.Full); } if (fraction != 0) { // truncate the partial second to the specified length fraction = (int)((long)fraction / (long)Math.Pow(10, DateTimeFormat.MaxSecondsFractionDigits - literal.ff)); } // Pattern.Full: [-]dd.hh:mm:ss.fffffff // Pattern.Minimum: [-][d.]hh:mm:ss[.fffffff] sb.Append(literal.Start); // [-] if (pattern == Pattern.Full || day != 0) { // sb.Append(day); // [dd] sb.Append(literal.DayHourSep); // [.] } // sb.Append(IntToString(hours, literal.hh)); // hh sb.Append(literal.HourMinuteSep); // : sb.Append(IntToString(minutes, literal.mm)); // mm sb.Append(literal.MinuteSecondSep); // : sb.Append(IntToString(seconds, literal.ss)); // ss if (!isInvariant && pattern == Pattern.Minimum) { int effectiveDigits = literal.ff; while (effectiveDigits > 0) { if (fraction % 10 == 0) { fraction = fraction / 10; effectiveDigits--; } else { break; } } if (effectiveDigits > 0) { sb.Append(literal.SecondFractionSep); // [.FFFFFFF] sb.Append((fraction).ToString(DateTimeFormat.fixedNumberFormats[effectiveDigits - 1], CultureInfo.InvariantCulture)); } } else if (pattern == Pattern.Full || fraction != 0) { sb.Append(literal.SecondFractionSep); // [.] sb.Append(IntToString(fraction, literal.ff)); // [fffffff] } // sb.Append(literal.End); // return StringBuilderCache.GetStringAndRelease(sb); } // // FormatCustomized // // Actions: Format the TimeSpan instance using the specified format. // internal static String FormatCustomized(TimeSpan value, String format, DateTimeFormatInfo dtfi) { Debug.Assert(dtfi != null, "dtfi == null"); int day = (int)(value._ticks / TimeSpan.TicksPerDay); long time = value._ticks % TimeSpan.TicksPerDay; if (value._ticks < 0) { day = -day; time = -time; } int hours = (int)(time / TimeSpan.TicksPerHour % 24); int minutes = (int)(time / TimeSpan.TicksPerMinute % 60); int seconds = (int)(time / TimeSpan.TicksPerSecond % 60); int fraction = (int)(time % TimeSpan.TicksPerSecond); long tmp = 0; int i = 0; int tokenLen; StringBuilder result = StringBuilderCache.Acquire(); while (i < format.Length) { char ch = format[i]; int nextChar; switch (ch) { case 'h': tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > 2) throw new FormatException(SR.Format_InvalidString); DateTimeFormat.FormatDigits(result, hours, tokenLen); break; case 'm': tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > 2) throw new FormatException(SR.Format_InvalidString); DateTimeFormat.FormatDigits(result, minutes, tokenLen); break; case 's': tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > 2) throw new FormatException(SR.Format_InvalidString); DateTimeFormat.FormatDigits(result, seconds, tokenLen); break; case 'f': // // The fraction of a second in single-digit precision. The remaining digits are truncated. // tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > DateTimeFormat.MaxSecondsFractionDigits) throw new FormatException(SR.Format_InvalidString); tmp = (long)fraction; tmp /= (long)Math.Pow(10, DateTimeFormat.MaxSecondsFractionDigits - tokenLen); result.Append((tmp).ToString(DateTimeFormat.fixedNumberFormats[tokenLen - 1], CultureInfo.InvariantCulture)); break; case 'F': // // Displays the most significant digit of the seconds fraction. Nothing is displayed if the digit is zero. // tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > DateTimeFormat.MaxSecondsFractionDigits) throw new FormatException(SR.Format_InvalidString); tmp = (long)fraction; tmp /= (long)Math.Pow(10, DateTimeFormat.MaxSecondsFractionDigits - tokenLen); int effectiveDigits = tokenLen; while (effectiveDigits > 0) { if (tmp % 10 == 0) { tmp = tmp / 10; effectiveDigits--; } else { break; } } if (effectiveDigits > 0) { result.Append((tmp).ToString(DateTimeFormat.fixedNumberFormats[effectiveDigits - 1], CultureInfo.InvariantCulture)); } break; case 'd': // // tokenLen == 1 : Day as digits with no leading zero. // tokenLen == 2+: Day as digits with leading zero for single-digit days. // tokenLen = DateTimeFormat.ParseRepeatPattern(format, i, ch); if (tokenLen > 8) throw new FormatException(SR.Format_InvalidString); DateTimeFormat.FormatDigits(result, day, tokenLen, true); break; case '\'': case '\"': tokenLen = DateTimeFormat.ParseQuoteString(format, i, result); break; case '%': // Optional format character. // For example, format string "%d" will print day // Most of the cases, "%" can be ignored. nextChar = DateTimeFormat.ParseNextChar(format, i); // nextChar will be -1 if we already reach the end of the format string. // Besides, we will not allow "%%" appear in the pattern. if (nextChar >= 0 && nextChar != (int)'%') { result.Append(TimeSpanFormat.FormatCustomized(value, ((char)nextChar).ToString(), dtfi)); tokenLen = 2; } else { // // This means that '%' is at the end of the format string or // "%%" appears in the format string. // throw new FormatException(SR.Format_InvalidString); } break; case '\\': // Escaped character. Can be used to insert character into the format string. // For example, "\d" will insert the character 'd' into the string. // nextChar = DateTimeFormat.ParseNextChar(format, i); if (nextChar >= 0) { result.Append(((char)nextChar)); tokenLen = 2; } else { // // This means that '\' is at the end of the formatting string. // throw new FormatException(SR.Format_InvalidString); } break; default: throw new FormatException(SR.Format_InvalidString); } i += tokenLen; } return StringBuilderCache.GetStringAndRelease(result); } internal struct FormatLiterals { internal String Start { get { return literals[0]; } } internal String DayHourSep { get { return literals[1]; } } internal String HourMinuteSep { get { return literals[2]; } } internal String MinuteSecondSep { get { return literals[3]; } } internal String SecondFractionSep { get { return literals[4]; } } internal String End { get { return literals[5]; } } internal String AppCompatLiteral; internal int dd; internal int hh; internal int mm; internal int ss; internal int ff; private String[] literals; /* factory method for static invariant FormatLiterals */ internal static FormatLiterals InitInvariant(bool isNegative) { FormatLiterals x = new FormatLiterals(); x.literals = new String[6]; x.literals[0] = isNegative ? "-" : String.Empty; x.literals[1] = "."; x.literals[2] = ":"; x.literals[3] = ":"; x.literals[4] = "."; x.literals[5] = String.Empty; x.AppCompatLiteral = ":."; // MinuteSecondSep+SecondFractionSep; x.dd = 2; x.hh = 2; x.mm = 2; x.ss = 2; x.ff = DateTimeFormat.MaxSecondsFractionDigits; return x; } // For the "v1" TimeSpan localized patterns, the data is simply literal field separators with // the constants guaranteed to include DHMSF ordered greatest to least significant. // Once the data becomes more complex than this we will need to write a proper tokenizer for // parsing and formatting internal void Init(String format, bool useInvariantFieldLengths) { literals = new String[6]; for (int i = 0; i < literals.Length; i++) literals[i] = String.Empty; dd = 0; hh = 0; mm = 0; ss = 0; ff = 0; StringBuilder sb = StringBuilderCache.Acquire(); bool inQuote = false; char quote = '\''; int field = 0; for (int i = 0; i < format.Length; i++) { switch (format[i]) { case '\'': case '\"': if (inQuote && (quote == format[i])) { /* we were in a quote and found a matching exit quote, so we are outside a quote now */ Debug.Assert(field >= 0 && field <= 5, "field >= 0 && field <= 5"); if (field >= 0 && field <= 5) { literals[field] = sb.ToString(); sb.Length = 0; inQuote = false; } else { return; // how did we get here? } } else if (!inQuote) { /* we are at the start of a new quote block */ quote = format[i]; inQuote = true; } else { /* we were in a quote and saw the other type of quote character, so we are still in a quote */ } break; case '%': Debug.Assert(false, "Unexpected special token '%', Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); goto default; case '\\': if (!inQuote) { i++; /* skip next character that is escaped by this backslash or percent sign */ break; } goto default; case 'd': if (!inQuote) { Debug.Assert((field == 0 && sb.Length == 0) || field == 1, "field == 0 || field == 1, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 1; // DayHourSep dd++; } break; case 'h': if (!inQuote) { Debug.Assert((field == 1 && sb.Length == 0) || field == 2, "field == 1 || field == 2, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 2; // HourMinuteSep hh++; } break; case 'm': if (!inQuote) { Debug.Assert((field == 2 && sb.Length == 0) || field == 3, "field == 2 || field == 3, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 3; // MinuteSecondSep mm++; } break; case 's': if (!inQuote) { Debug.Assert((field == 3 && sb.Length == 0) || field == 4, "field == 3 || field == 4, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 4; // SecondFractionSep ss++; } break; case 'f': case 'F': if (!inQuote) { Debug.Assert((field == 4 && sb.Length == 0) || field == 5, "field == 4 || field == 5, Bug in DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); field = 5; // End ff++; } break; default: sb.Append(format[i]); break; } } Debug.Assert(field == 5); AppCompatLiteral = MinuteSecondSep + SecondFractionSep; Debug.Assert(0 < dd && dd < 3, "0 < dd && dd < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); Debug.Assert(0 < hh && hh < 3, "0 < hh && hh < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); Debug.Assert(0 < mm && mm < 3, "0 < mm && mm < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); Debug.Assert(0 < ss && ss < 3, "0 < ss && ss < 3, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); Debug.Assert(0 < ff && ff < 8, "0 < ff && ff < 8, Bug in System.Globalization.DateTimeFormatInfo.FullTimeSpan[Positive|Negative]Pattern"); if (useInvariantFieldLengths) { dd = 2; hh = 2; mm = 2; ss = 2; ff = DateTimeFormat.MaxSecondsFractionDigits; } else { if (dd < 1 || dd > 2) dd = 2; // The DTFI property has a problem. let's try to make the best of the situation. if (hh < 1 || hh > 2) hh = 2; if (mm < 1 || mm > 2) mm = 2; if (ss < 1 || ss > 2) ss = 2; if (ff < 1 || ff > 7) ff = 7; } StringBuilderCache.Release(sb); } } //end of struct FormatLiterals } }
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.html file at the root of this distribution. If * you cannot locate the Apache License, Version 2.0, please send an email to * dlr@microsoft.com. By using this source code in any fashion, you are agreeing to be bound * by the terms of the Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. * * * ***************************************************************************/ using System; using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using Microsoft.Scripting; using Microsoft.Scripting.Runtime; using Microsoft.Scripting.Utils; using IronPython.Runtime; using IronPython.Runtime.Operations; using IronPython.Runtime.Types; #if FEATURE_NUMERICS using System.Numerics; #else using Microsoft.Scripting.Math; using Complex = Microsoft.Scripting.Math.Complex64; #endif [assembly: PythonModule("copy_reg", typeof(IronPython.Modules.PythonCopyReg))] namespace IronPython.Modules { [Documentation("Provides global reduction-function registration for pickling and copying objects.")] public static class PythonCopyReg { private static readonly object _dispatchTableKey = new object(); private static readonly object _extensionRegistryKey = new object(); private static readonly object _invertedRegistryKey = new object(); private static readonly object _extensionCacheKey = new object(); internal static PythonDictionary GetDispatchTable(CodeContext/*!*/ context) { EnsureModuleInitialized(context); return (PythonDictionary)PythonContext.GetContext(context).GetModuleState(_dispatchTableKey); } internal static PythonDictionary GetExtensionRegistry(CodeContext/*!*/ context) { EnsureModuleInitialized(context); return (PythonDictionary)PythonContext.GetContext(context).GetModuleState(_extensionRegistryKey); } internal static PythonDictionary GetInvertedRegistry(CodeContext/*!*/ context) { EnsureModuleInitialized(context); return (PythonDictionary)PythonContext.GetContext(context).GetModuleState(_invertedRegistryKey); } internal static PythonDictionary GetExtensionCache(CodeContext/*!*/ context) { EnsureModuleInitialized(context); return (PythonDictionary)PythonContext.GetContext(context).GetModuleState(_extensionCacheKey); } #region Public API [Documentation("pickle(type, function[, constructor]) -> None\n\n" + "Associate function with type, indicating that function should be used to\n" + "\"reduce\" objects of the given type when pickling. function should behave as\n" + "specified by the \"Extended __reduce__ API\" section of PEP 307.\n" + "\n" + "Reduction functions registered by calling pickle() can be retrieved later\n" + "through copy_reg.dispatch_table[type].\n" + "\n" + "Note that calling pickle() will overwrite any previous association for the\n" + "given type.\n" + "\n" + "The constructor argument is ignored, and exists only for backwards\n" + "compatibility." )] public static void pickle(CodeContext/*!*/ context, object type, object function, [DefaultParameterValue(null)] object ctor) { EnsureCallable(context, function, "reduction functions must be callable"); if (ctor != null) constructor(context, ctor); GetDispatchTable(context)[type] = function; } [Documentation("constructor(object) -> None\n\n" + "Raise TypeError if object isn't callable. This function exists only for\n" + "backwards compatibility; for details, see\n" + "http://mail.python.org/pipermail/python-dev/2006-June/066831.html." )] public static void constructor(CodeContext/*!*/ context, object callable) { EnsureCallable(context, callable, "constructors must be callable"); } /// <summary> /// Throw TypeError with a specified message if object isn't callable. /// </summary> private static void EnsureCallable(CodeContext/*!*/ context, object @object, string message) { if (!PythonOps.IsCallable(context, @object)) { throw PythonOps.TypeError(message); } } [Documentation("pickle_complex(complex_number) -> (<type 'complex'>, (real, imag))\n\n" + "Reduction function for pickling complex numbers.")] public static PythonTuple pickle_complex(CodeContext context, object complex) { return PythonTuple.MakeTuple( DynamicHelpers.GetPythonTypeFromType(typeof(Complex)), PythonTuple.MakeTuple( PythonOps.GetBoundAttr(context, complex, "real"), PythonOps.GetBoundAttr(context, complex, "imag") ) ); } public static void clear_extension_cache(CodeContext/*!*/ context) { GetExtensionCache(context).clear(); } [Documentation("Register an extension code.")] public static void add_extension(CodeContext/*!*/ context, object moduleName, object objectName, object value) { PythonTuple key = PythonTuple.MakeTuple(moduleName, objectName); int code = GetCode(context, value); bool keyExists = GetExtensionRegistry(context).__contains__(key); bool codeExists = GetInvertedRegistry(context).__contains__(code); if (!keyExists && !codeExists) { GetExtensionRegistry(context)[key] = code; GetInvertedRegistry(context)[code] = key; } else if (keyExists && codeExists && PythonOps.EqualRetBool(context, GetExtensionRegistry(context)[key], code) && PythonOps.EqualRetBool(context, GetInvertedRegistry(context)[code], key) ) { // nop } else { if (keyExists) { throw PythonOps.ValueError("key {0} is already registered with code {1}", PythonOps.Repr(context, key), PythonOps.Repr(context, GetExtensionRegistry(context)[key])); } else { // codeExists throw PythonOps.ValueError("code {0} is already in use for key {1}", PythonOps.Repr(context, code), PythonOps.Repr(context, GetInvertedRegistry(context)[code])); } } } [Documentation("Unregister an extension code. (only for testing)")] public static void remove_extension(CodeContext/*!*/ context, object moduleName, object objectName, object value) { PythonTuple key = PythonTuple.MakeTuple(moduleName, objectName); int code = GetCode(context, value); object existingKey; object existingCode; if (((IDictionary<object, object>)GetExtensionRegistry(context)).TryGetValue(key, out existingCode) && ((IDictionary<object, object>)GetInvertedRegistry(context)).TryGetValue(code, out existingKey) && PythonOps.EqualRetBool(context, existingCode, code) && PythonOps.EqualRetBool(context, existingKey, key) ) { GetExtensionRegistry(context).__delitem__(key); GetInvertedRegistry(context).__delitem__(code); } else { throw PythonOps.ValueError("key {0} is not registered with code {1}", PythonOps.Repr(context, key), PythonOps.Repr(context, code)); } } [Documentation("__newobj__(cls, *args) -> cls.__new__(cls, *args)\n\n" + "Helper function for unpickling. Creates a new object of a given class.\n" + "See PEP 307 section \"The __newobj__ unpickling function\" for details." )] public static object __newobj__(CodeContext/*!*/ context, object cls, params object[] args) { object[] newArgs = new object[1 + args.Length]; newArgs[0] = cls; for (int i = 0; i < args.Length; i++) newArgs[i + 1] = args[i]; return PythonOps.Invoke(context, cls, "__new__", newArgs); } [Documentation("_reconstructor(basetype, objtype, basestate) -> object\n\n" + "Helper function for unpickling. Creates and initializes a new object of a given\n" + "class. See PEP 307 section \"Case 2: pickling new-style class instances using\n" + "protocols 0 or 1\" for details." )] public static object _reconstructor(CodeContext/*!*/ context, object objType, object baseType, object baseState) { object obj; if (baseState == null) { obj = PythonOps.Invoke(context, baseType, "__new__", objType); PythonOps.Invoke(context, baseType, "__init__", obj); } else { obj = PythonOps.Invoke(context, baseType, "__new__", objType, baseState); PythonOps.Invoke(context, baseType, "__init__", obj, baseState); } return obj; } #endregion #region Private implementation /// <summary> /// Convert object to ushort, throwing ValueError on overflow. /// </summary> private static int GetCode(CodeContext/*!*/ context, object value) { try { int intValue = PythonContext.GetContext(context).ConvertToInt32(value); if (intValue > 0) return intValue; // fall through and throw below } catch (OverflowException) { // throw below } throw PythonOps.ValueError("code out of range"); } #endregion private static void EnsureModuleInitialized(CodeContext context) { if (!PythonContext.GetContext(context).HasModuleState(_dispatchTableKey)) { Importer.ImportBuiltin(context, "copy_reg"); } } [SpecialName] public static void PerformModuleReload(PythonContext/*!*/ context, PythonDictionary/*!*/ dict) { context.NewObject = (BuiltinFunction)dict["__newobj__"]; context.PythonReconstructor = (BuiltinFunction)dict["_reconstructor"]; PythonDictionary dispatchTable = new PythonDictionary(); dispatchTable[TypeCache.Complex] = dict["pickle_complex"]; context.SetModuleState(_dispatchTableKey, dict["dispatch_table"] = dispatchTable); context.SetModuleState(_extensionRegistryKey, dict["_extension_registry"] = new PythonDictionary()); context.SetModuleState(_invertedRegistryKey, dict["_inverted_registry"] = new PythonDictionary()); context.SetModuleState(_extensionCacheKey, dict["_extension_cache"] = new PythonDictionary()); } } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; namespace DalSic { /// <summary> /// Strongly-typed collection for the ConMotivosRechazo class. /// </summary> [Serializable] public partial class ConMotivosRechazoCollection : ActiveList<ConMotivosRechazo, ConMotivosRechazoCollection> { public ConMotivosRechazoCollection() {} /// <summary> /// Filters an existing collection based on the set criteria. This is an in-memory filter /// Thanks to developingchris for this! /// </summary> /// <returns>ConMotivosRechazoCollection</returns> public ConMotivosRechazoCollection Filter() { for (int i = this.Count - 1; i > -1; i--) { ConMotivosRechazo o = this[i]; foreach (SubSonic.Where w in this.wheres) { bool remove = false; System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName); if (pi.CanRead) { object val = pi.GetValue(o, null); switch (w.Comparison) { case SubSonic.Comparison.Equals: if (!val.Equals(w.ParameterValue)) { remove = true; } break; } } if (remove) { this.Remove(o); break; } } } return this; } } /// <summary> /// This is an ActiveRecord class which wraps the CON_MotivosRechazo table. /// </summary> [Serializable] public partial class ConMotivosRechazo : ActiveRecord<ConMotivosRechazo>, IActiveRecord { #region .ctors and Default Settings public ConMotivosRechazo() { SetSQLProps(); InitSetDefaults(); MarkNew(); } private void InitSetDefaults() { SetDefaults(); } public ConMotivosRechazo(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) ForceDefaults(); MarkNew(); } public ConMotivosRechazo(object keyID) { SetSQLProps(); InitSetDefaults(); LoadByKey(keyID); } public ConMotivosRechazo(string columnName, object columnValue) { SetSQLProps(); InitSetDefaults(); LoadByParam(columnName,columnValue); } protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema and Query Accessor public static Query CreateQuery() { return new Query(Schema); } public static TableSchema.Table Schema { get { if (BaseSchema == null) SetSQLProps(); return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("CON_MotivosRechazo", TableType.Table, DataService.GetInstance("sicProvider")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @"dbo"; //columns TableSchema.TableColumn colvarIdMotivoRechazo = new TableSchema.TableColumn(schema); colvarIdMotivoRechazo.ColumnName = "idMotivoRechazo"; colvarIdMotivoRechazo.DataType = DbType.Int32; colvarIdMotivoRechazo.MaxLength = 0; colvarIdMotivoRechazo.AutoIncrement = true; colvarIdMotivoRechazo.IsNullable = false; colvarIdMotivoRechazo.IsPrimaryKey = true; colvarIdMotivoRechazo.IsForeignKey = false; colvarIdMotivoRechazo.IsReadOnly = false; colvarIdMotivoRechazo.DefaultSetting = @""; colvarIdMotivoRechazo.ForeignKeyTableName = ""; schema.Columns.Add(colvarIdMotivoRechazo); TableSchema.TableColumn colvarNombre = new TableSchema.TableColumn(schema); colvarNombre.ColumnName = "nombre"; colvarNombre.DataType = DbType.String; colvarNombre.MaxLength = 500; colvarNombre.AutoIncrement = false; colvarNombre.IsNullable = false; colvarNombre.IsPrimaryKey = false; colvarNombre.IsForeignKey = false; colvarNombre.IsReadOnly = false; colvarNombre.DefaultSetting = @""; colvarNombre.ForeignKeyTableName = ""; schema.Columns.Add(colvarNombre); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["sicProvider"].AddSchema("CON_MotivosRechazo",schema); } } #endregion #region Props [XmlAttribute("IdMotivoRechazo")] [Bindable(true)] public int IdMotivoRechazo { get { return GetColumnValue<int>(Columns.IdMotivoRechazo); } set { SetColumnValue(Columns.IdMotivoRechazo, value); } } [XmlAttribute("Nombre")] [Bindable(true)] public string Nombre { get { return GetColumnValue<string>(Columns.Nombre); } set { SetColumnValue(Columns.Nombre, value); } } #endregion //no foreign key tables defined (0) //no ManyToMany tables defined (0) #region ObjectDataSource support /// <summary> /// Inserts a record, can be used with the Object Data Source /// </summary> public static void Insert(string varNombre) { ConMotivosRechazo item = new ConMotivosRechazo(); item.Nombre = varNombre; if (System.Web.HttpContext.Current != null) item.Save(System.Web.HttpContext.Current.User.Identity.Name); else item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name); } /// <summary> /// Updates a record, can be used with the Object Data Source /// </summary> public static void Update(int varIdMotivoRechazo,string varNombre) { ConMotivosRechazo item = new ConMotivosRechazo(); item.IdMotivoRechazo = varIdMotivoRechazo; item.Nombre = varNombre; item.IsNew = false; if (System.Web.HttpContext.Current != null) item.Save(System.Web.HttpContext.Current.User.Identity.Name); else item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name); } #endregion #region Typed Columns public static TableSchema.TableColumn IdMotivoRechazoColumn { get { return Schema.Columns[0]; } } public static TableSchema.TableColumn NombreColumn { get { return Schema.Columns[1]; } } #endregion #region Columns Struct public struct Columns { public static string IdMotivoRechazo = @"idMotivoRechazo"; public static string Nombre = @"nombre"; } #endregion #region Update PK Collections #endregion #region Deep Save #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Xunit; #pragma warning disable 1998 // Async method with no "await" operators. namespace System.Threading.Tests { public static class AsyncLocalTests { [Fact] public static async Task ValueProperty() { AsyncLocal<int> local = new AsyncLocal<int>(); Assert.Equal(local.Value, 0); local.Value = 1; Assert.Equal(local.Value, 1); local.Value = 0; Assert.Equal(local.Value, 0); } [Fact] public static async Task CaptureAndRestore() { AsyncLocal<int> local = new AsyncLocal<int>(); local.Value = 42; ExecutionContext ec = ExecutionContext.Capture(); local.Value = 12; ExecutionContext.Run( ec, _ => { Assert.Equal(local.Value, 42); local.Value = 56; }, null); Assert.Equal(local.Value, 12); } [Fact] public static async Task CaptureAndRestoreEmptyContext() { AsyncLocal<int> local = new AsyncLocal<int>(); ExecutionContext ec = ExecutionContext.Capture(); local.Value = 12; ExecutionContext.Run( ec, _ => { Assert.Equal(local.Value, 0); local.Value = 56; }, null); Assert.Equal(local.Value, 12); } [Theory] [MemberData(nameof(GetCounts))] public static async Task CaptureAndRestoreNullAsyncLocals(int count) { AsyncLocal<object>[] locals = new AsyncLocal<object>[count]; for (var i = 0; i < locals.Length; i++) { locals[i] = new AsyncLocal<object>(); } ExecutionContext ec = ExecutionContext.Capture(); ExecutionContext.Run( ec, _ => { for (var i = 0; i < locals.Length; i++) { AsyncLocal<object> local = locals[i]; Assert.Null(local.Value); local.Value = 56; Assert.IsType<int>(local.Value); Assert.Equal(56, (int)local.Value); } }, null); for (var i = 0; i < locals.Length; i++) { Assert.Null(locals[i].Value); } } [Fact] public static async Task CaptureAndRunOnFlowSupressedContext() { ExecutionContext.SuppressFlow(); try { ExecutionContext ec = ExecutionContext.Capture(); Assert.Throws<InvalidOperationException>(() => ExecutionContext.Run(ec, _ => { }, null)); } finally { ExecutionContext.RestoreFlow(); } } [Fact] public static async Task NotifyOnValuePropertyChange() { bool expectThreadContextChange = false; int expectedPreviousValue = 0; int expectedCurrentValue = 1; bool gotNotification = false; bool expectNotification = false; AsyncLocal<int> local = new AsyncLocal<int>( args => { gotNotification = true; Assert.True(expectNotification); expectNotification = false; Assert.Equal(args.ThreadContextChanged, expectThreadContextChange); Assert.Equal(args.PreviousValue, expectedPreviousValue); Assert.Equal(args.CurrentValue, expectedCurrentValue); }); expectNotification = true; local.Value = 1; Assert.True(gotNotification); expectNotification = true; expectThreadContextChange = true; expectedPreviousValue = local.Value; expectedCurrentValue = 0; return; } [Fact] public static async Task NotifyOnThreadContextChange() { bool expectThreadContextChange = false; int expectedPreviousValue = 0; int expectedCurrentValue = 1; bool gotNotification = false; bool expectNotification = false; AsyncLocal<int> local = new AsyncLocal<int>( args => { gotNotification = true; Assert.True(expectNotification); expectNotification = false; Assert.Equal(args.ThreadContextChanged, expectThreadContextChange); Assert.Equal(args.PreviousValue, expectedPreviousValue); Assert.Equal(args.CurrentValue, expectedCurrentValue); }); expectNotification = true; local.Value = 1; Assert.True(gotNotification); gotNotification = false; ExecutionContext ec = ExecutionContext.Capture(); expectNotification = true; expectedPreviousValue = 1; expectedCurrentValue = 2; local.Value = 2; Assert.True(gotNotification); gotNotification = false; expectNotification = true; expectedPreviousValue = 2; expectedCurrentValue = 1; expectThreadContextChange = true; ExecutionContext.Run( ec, _ => { Assert.True(gotNotification); gotNotification = false; Assert.Equal(local.Value, 1); expectNotification = true; expectedPreviousValue = 1; expectedCurrentValue = 3; expectThreadContextChange = false; local.Value = 3; Assert.True(gotNotification); gotNotification = false; expectNotification = true; expectedPreviousValue = 3; expectedCurrentValue = 2; expectThreadContextChange = true; return; }, null); Assert.True(gotNotification); gotNotification = false; Assert.Equal(local.Value, 2); expectNotification = true; expectThreadContextChange = true; expectedPreviousValue = local.Value; expectedCurrentValue = 0; return; } [Fact] public static async Task NotifyOnThreadContextChangeWithOneEmptyContext() { bool expectThreadContextChange = false; int expectedPreviousValue = 0; int expectedCurrentValue = 1; bool gotNotification = false; bool expectNotification = false; AsyncLocal<int> local = new AsyncLocal<int>( args => { gotNotification = true; Assert.True(expectNotification); expectNotification = false; Assert.Equal(args.ThreadContextChanged, expectThreadContextChange); Assert.Equal(args.PreviousValue, expectedPreviousValue); Assert.Equal(args.CurrentValue, expectedCurrentValue); }); ExecutionContext ec = ExecutionContext.Capture(); expectNotification = true; expectedPreviousValue = 0; expectedCurrentValue = 1; local.Value = 1; Assert.True(gotNotification); gotNotification = false; expectNotification = true; expectedPreviousValue = 1; expectedCurrentValue = 0; expectThreadContextChange = true; ExecutionContext.Run( ec, _ => { Assert.True(gotNotification); gotNotification = false; Assert.Equal(local.Value, 0); expectNotification = true; expectedPreviousValue = 0; expectedCurrentValue = 1; expectThreadContextChange = true; return; }, null); Assert.True(gotNotification); gotNotification = false; Assert.Equal(local.Value, 1); expectNotification = true; expectThreadContextChange = true; expectedPreviousValue = local.Value; expectedCurrentValue = 0; return; } // helper to make it easy to start an anonymous async method on the current thread. private static Task Run(Func<Task> func) { return func(); } [Fact] public static async Task AsyncMethodNotifications() { // // Define thread-local and async-local values. The async-local value uses its notification // to keep the thread-local value in sync with the async-local value. // ThreadLocal<int> tls = new ThreadLocal<int>(); AsyncLocal<int> als = new AsyncLocal<int>(args => { tls.Value = args.CurrentValue; }); Assert.Equal(tls.Value, als.Value); als.Value = 1; Assert.Equal(tls.Value, als.Value); als.Value = 2; Assert.Equal(tls.Value, als.Value); await Run(async () => { Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 2); als.Value = 3; Assert.Equal(tls.Value, als.Value); Task t = Run(async () => { Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 3); als.Value = 4; Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 4); await Task.Run(() => { Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 4); als.Value = 5; Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 5); }); Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 4); als.Value = 6; Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 6); }); Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 3); await Task.Yield(); Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 3); await t; Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 3); }); Assert.Equal(tls.Value, als.Value); Assert.Equal(als.Value, 2); } [Fact] public static async Task SetValueFromNotification() { int valueToSet = 0; AsyncLocal<int> local = null; local = new AsyncLocal<int>(args => { if (args.ThreadContextChanged) local.Value = valueToSet; }); valueToSet = 2; local.Value = 1; Assert.Equal(local.Value, 1); await Run(async () => { local.Value = 3; valueToSet = 4; }); Assert.Equal(local.Value, 4); } [Fact] public static async Task ExecutionContextCopyOnWrite() { AsyncLocal<int> local = new AsyncLocal<int>(); local.Value = 42; await Run(async () => { SynchronizationContext.SetSynchronizationContext(new SynchronizationContext()); Assert.Equal(42, local.Value); local.Value = 12; }); Assert.Equal(local.Value, 42); } [Theory] [MemberData(nameof(GetCounts))] public static async Task AddAndUpdateManyLocals_ValueType(int count) { var locals = new AsyncLocal<int>[count]; for (int i = 0; i < locals.Length; i++) { locals[i] = new AsyncLocal<int>(); locals[i].Value = i; for (int j = 0; j <= i; j++) { Assert.Equal(j, locals[j].Value); locals[j].Value = j + 1; Assert.Equal(j + 1, locals[j].Value); locals[j].Value = j; Assert.Equal(j, locals[j].Value); } } } [Theory] [MemberData(nameof(GetCounts))] public static async Task AddUpdateAndRemoveManyLocals_ReferenceType(int count) { var locals = new AsyncLocal<string>[count]; for (int i = 0; i < locals.Length; i++) { locals[i] = new AsyncLocal<string>(); locals[i].Value = i.ToString(); for (int j = 0; j <= i; j++) { Assert.Equal(j.ToString(), locals[j].Value); locals[j].Value = (j + 1).ToString(); Assert.Equal((j + 1).ToString(), locals[j].Value); locals[j].Value = j.ToString(); Assert.Equal(j.ToString(), locals[j].Value); } } for (int i = 0; i < locals.Length; i++) { locals[i].Value = null; Assert.Null(locals[i].Value); for (int j = i + 1; j < locals.Length; j++) { Assert.Equal(j.ToString(), locals[j].Value); } } } [Theory] [MemberData(nameof(GetCounts))] public static async Task AsyncLocalsUnwind(int count) { AsyncLocal<object>[] asyncLocals = new AsyncLocal<object>[count]; ExecutionContext Default = ExecutionContext.Capture(); int[] manuallySetCounts = new int[count]; int[] automaticallyUnsetCounts = new int[count]; int[] automaticallySetCounts = new int[count]; ExecutionContext[] capturedContexts = new ExecutionContext[count]; // Setup the AsyncLocals; capturing ExecutionContext for each level await SetLocalsRecursivelyAsync(count - 1); ValidateCounts(thresholdIndex: 0, maunalSets: 1, automaticUnsets: 1, automaticSets: 0); ValidateAsyncLocalsValuesNull(); // Check Running with the contexts captured when setting the locals TestCapturedExecutionContexts(); ExecutionContext.SuppressFlow(); try { // Re-check restoring, but starting with a suppressed flow TestCapturedExecutionContexts(); } finally { ExecutionContext.RestoreFlow(); } // -- Local functions -- void ValidateAsyncLocalsValuesNull() { // Check AsyncLocals haven't leaked for (int i = 0; i < asyncLocals.Length; i++) { Assert.Null(asyncLocals[i].Value); } } void ValidateAsyncLocalsValues(int thresholdIndex) { for (int localsIndex = 0; localsIndex < asyncLocals.Length; localsIndex++) { if (localsIndex >= thresholdIndex) { Assert.Equal(localsIndex, (int)asyncLocals[localsIndex].Value); } else { Assert.Null(asyncLocals[localsIndex].Value); } } } void TestCapturedExecutionContexts() { for (int contextIndex = 0; contextIndex < asyncLocals.Length; contextIndex++) { ClearCounts(); ExecutionContext.Run( capturedContexts[contextIndex].CreateCopy(), (o) => TestCapturedExecutionContext((int)o), contextIndex); // Validate locals have been restored to the Default context's values ValidateAsyncLocalsValuesNull(); } } void TestCapturedExecutionContext(int contextIndex) { ValidateCounts(thresholdIndex: contextIndex, maunalSets: 0, automaticUnsets: 0, automaticSets: 1); // Validate locals have been restored to the outer context's values ValidateAsyncLocalsValues(thresholdIndex: contextIndex); // Validate locals are correctly reset Running with a Default context from a non-Default context ExecutionContext.Run( Default.CreateCopy(), _ => ValidateAsyncLocalsValuesNull(), null); ValidateCounts(thresholdIndex: contextIndex, maunalSets: 0, automaticUnsets: 1, automaticSets: 2); // Validate locals have been restored to the outer context's values ValidateAsyncLocalsValues(thresholdIndex: contextIndex); for (int innerContextIndex = 0; innerContextIndex < asyncLocals.Length; innerContextIndex++) { // Validate locals are correctly restored Running with another non-Default context from a non-Default context ExecutionContext.Run( capturedContexts[innerContextIndex].CreateCopy(), o => ValidateAsyncLocalsValues(thresholdIndex: (int)o), innerContextIndex); // Validate locals have been restored to the outer context's values ValidateAsyncLocalsValues(thresholdIndex: contextIndex); } } void ValidateCounts(int thresholdIndex, int maunalSets, int automaticUnsets, int automaticSets) { for (int localsIndex = 0; localsIndex < asyncLocals.Length; localsIndex++) { Assert.Equal(localsIndex < thresholdIndex ? 0 : maunalSets, manuallySetCounts[localsIndex]); Assert.Equal(localsIndex < thresholdIndex ? 0 : automaticUnsets, automaticallyUnsetCounts[localsIndex]); Assert.Equal(localsIndex < thresholdIndex ? 0 : automaticSets, automaticallySetCounts[localsIndex]); } } // Synchronous function is async to create different ExectutionContexts for each set, and check async unwinding async Task SetLocalsRecursivelyAsync(int index) { // Set AsyncLocal asyncLocals[index] = new AsyncLocal<object>(CountValueChanges) { Value = index }; // Capture context with AsyncLocal set capturedContexts[index] = ExecutionContext.Capture(); if (index > 0) { // Go deeper into async stack int nextIndex = index - 1; await SetLocalsRecursivelyAsync(index - 1); // Set is undone by the await Assert.Null(asyncLocals[nextIndex].Value); } } void CountValueChanges(AsyncLocalValueChangedArgs<object> args) { if (!args.ThreadContextChanged) { // Manual create, previous should be null Assert.Null(args.PreviousValue); Assert.IsType<int>(args.CurrentValue); manuallySetCounts[(int)args.CurrentValue]++; } else { // Automatic change, only one value should be not null if (args.CurrentValue != null) { Assert.Null(args.PreviousValue); Assert.IsType<int>(args.CurrentValue); automaticallySetCounts[(int)args.CurrentValue]++; } else { Assert.Null(args.CurrentValue); Assert.NotNull(args.PreviousValue); Assert.IsType<int>(args.PreviousValue); automaticallyUnsetCounts[(int)args.PreviousValue]++; } } } void ClearCounts() { Array.Clear(manuallySetCounts, 0, count); Array.Clear(automaticallyUnsetCounts, 0, count); Array.Clear(automaticallySetCounts, 0, count); } } // The data structure that holds AsyncLocals changes based on size; // so it needs to be tested at a variety of sizes public static IEnumerable<object[]> GetCounts() => Enumerable.Range(1, 40).Select(i => new object[] { i }); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Globalization; using System.Runtime.CompilerServices; using System.Text; namespace System.Numerics { /// <summary> /// A structure encapsulating two single precision floating point values and provides hardware accelerated methods. /// </summary> [Intrinsic] public partial struct Vector2 : IEquatable<Vector2>, IFormattable { #region Public Static Properties /// <summary> /// Returns the vector (0,0). /// </summary> public static Vector2 Zero { [Intrinsic] get { return new Vector2(); } } /// <summary> /// Returns the vector (1,1). /// </summary> public static Vector2 One { [Intrinsic] get { return new Vector2(1.0f, 1.0f); } } /// <summary> /// Returns the vector (1,0). /// </summary> public static Vector2 UnitX { get { return new Vector2(1.0f, 0.0f); } } /// <summary> /// Returns the vector (0,1). /// </summary> public static Vector2 UnitY { get { return new Vector2(0.0f, 1.0f); } } #endregion Public Static Properties #region Public instance methods /// <summary> /// Returns the hash code for this instance. /// </summary> /// <returns>The hash code.</returns> public override readonly int GetHashCode() { return HashCode.Combine(this.X.GetHashCode(), this.Y.GetHashCode()); } /// <summary> /// Returns a boolean indicating whether the given Object is equal to this Vector2 instance. /// </summary> /// <param name="obj">The Object to compare against.</param> /// <returns>True if the Object is equal to this Vector2; False otherwise.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public override readonly bool Equals(object? obj) { if (!(obj is Vector2)) return false; return Equals((Vector2)obj); } /// <summary> /// Returns a String representing this Vector2 instance. /// </summary> /// <returns>The string representation.</returns> public override readonly string ToString() { return ToString("G", CultureInfo.CurrentCulture); } /// <summary> /// Returns a String representing this Vector2 instance, using the specified format to format individual elements. /// </summary> /// <param name="format">The format of individual elements.</param> /// <returns>The string representation.</returns> public readonly string ToString(string? format) { return ToString(format, CultureInfo.CurrentCulture); } /// <summary> /// Returns a String representing this Vector2 instance, using the specified format to format individual elements /// and the given IFormatProvider. /// </summary> /// <param name="format">The format of individual elements.</param> /// <param name="formatProvider">The format provider to use when formatting elements.</param> /// <returns>The string representation.</returns> public readonly string ToString(string? format, IFormatProvider? formatProvider) { StringBuilder sb = new StringBuilder(); string separator = NumberFormatInfo.GetInstance(formatProvider).NumberGroupSeparator; sb.Append('<'); sb.Append(this.X.ToString(format, formatProvider)); sb.Append(separator); sb.Append(' '); sb.Append(this.Y.ToString(format, formatProvider)); sb.Append('>'); return sb.ToString(); } /// <summary> /// Returns the length of the vector. /// </summary> /// <returns>The vector's length.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public readonly float Length() { if (Vector.IsHardwareAccelerated) { float ls = Vector2.Dot(this, this); return MathF.Sqrt(ls); } else { float ls = X * X + Y * Y; return MathF.Sqrt(ls); } } /// <summary> /// Returns the length of the vector squared. This operation is cheaper than Length(). /// </summary> /// <returns>The vector's length squared.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public readonly float LengthSquared() { if (Vector.IsHardwareAccelerated) { return Vector2.Dot(this, this); } else { return X * X + Y * Y; } } #endregion Public Instance Methods #region Public Static Methods /// <summary> /// Returns the Euclidean distance between the two given points. /// </summary> /// <param name="value1">The first point.</param> /// <param name="value2">The second point.</param> /// <returns>The distance.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static float Distance(Vector2 value1, Vector2 value2) { if (Vector.IsHardwareAccelerated) { Vector2 difference = value1 - value2; float ls = Vector2.Dot(difference, difference); return MathF.Sqrt(ls); } else { float dx = value1.X - value2.X; float dy = value1.Y - value2.Y; float ls = dx * dx + dy * dy; return MathF.Sqrt(ls); } } /// <summary> /// Returns the Euclidean distance squared between the two given points. /// </summary> /// <param name="value1">The first point.</param> /// <param name="value2">The second point.</param> /// <returns>The distance squared.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static float DistanceSquared(Vector2 value1, Vector2 value2) { if (Vector.IsHardwareAccelerated) { Vector2 difference = value1 - value2; return Vector2.Dot(difference, difference); } else { float dx = value1.X - value2.X; float dy = value1.Y - value2.Y; return dx * dx + dy * dy; } } /// <summary> /// Returns a vector with the same direction as the given vector, but with a length of 1. /// </summary> /// <param name="value">The vector to normalize.</param> /// <returns>The normalized vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Normalize(Vector2 value) { if (Vector.IsHardwareAccelerated) { float length = value.Length(); return value / length; } else { float ls = value.X * value.X + value.Y * value.Y; float invNorm = 1.0f / MathF.Sqrt(ls); return new Vector2( value.X * invNorm, value.Y * invNorm); } } /// <summary> /// Returns the reflection of a vector off a surface that has the specified normal. /// </summary> /// <param name="vector">The source vector.</param> /// <param name="normal">The normal of the surface being reflected off.</param> /// <returns>The reflected vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Reflect(Vector2 vector, Vector2 normal) { if (Vector.IsHardwareAccelerated) { float dot = Vector2.Dot(vector, normal); return vector - (2 * dot * normal); } else { float dot = vector.X * normal.X + vector.Y * normal.Y; return new Vector2( vector.X - 2.0f * dot * normal.X, vector.Y - 2.0f * dot * normal.Y); } } /// <summary> /// Restricts a vector between a min and max value. /// </summary> /// <param name="value1">The source vector.</param> /// <param name="min">The minimum value.</param> /// <param name="max">The maximum value.</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Clamp(Vector2 value1, Vector2 min, Vector2 max) { // This compare order is very important!!! // We must follow HLSL behavior in the case user specified min value is bigger than max value. float x = value1.X; x = (min.X > x) ? min.X : x; // max(x, minx) x = (max.X < x) ? max.X : x; // min(x, maxx) float y = value1.Y; y = (min.Y > y) ? min.Y : y; // max(y, miny) y = (max.Y < y) ? max.Y : y; // min(y, maxy) return new Vector2(x, y); } /// <summary> /// Linearly interpolates between two vectors based on the given weighting. /// </summary> /// <param name="value1">The first source vector.</param> /// <param name="value2">The second source vector.</param> /// <param name="amount">Value between 0 and 1 indicating the weight of the second source vector.</param> /// <returns>The interpolated vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Lerp(Vector2 value1, Vector2 value2, float amount) { return new Vector2( value1.X + (value2.X - value1.X) * amount, value1.Y + (value2.Y - value1.Y) * amount); } /// <summary> /// Transforms a vector by the given matrix. /// </summary> /// <param name="position">The source vector.</param> /// <param name="matrix">The transformation matrix.</param> /// <returns>The transformed vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Transform(Vector2 position, Matrix3x2 matrix) { return new Vector2( position.X * matrix.M11 + position.Y * matrix.M21 + matrix.M31, position.X * matrix.M12 + position.Y * matrix.M22 + matrix.M32); } /// <summary> /// Transforms a vector by the given matrix. /// </summary> /// <param name="position">The source vector.</param> /// <param name="matrix">The transformation matrix.</param> /// <returns>The transformed vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Transform(Vector2 position, Matrix4x4 matrix) { return new Vector2( position.X * matrix.M11 + position.Y * matrix.M21 + matrix.M41, position.X * matrix.M12 + position.Y * matrix.M22 + matrix.M42); } /// <summary> /// Transforms a vector normal by the given matrix. /// </summary> /// <param name="normal">The source vector.</param> /// <param name="matrix">The transformation matrix.</param> /// <returns>The transformed vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 TransformNormal(Vector2 normal, Matrix3x2 matrix) { return new Vector2( normal.X * matrix.M11 + normal.Y * matrix.M21, normal.X * matrix.M12 + normal.Y * matrix.M22); } /// <summary> /// Transforms a vector normal by the given matrix. /// </summary> /// <param name="normal">The source vector.</param> /// <param name="matrix">The transformation matrix.</param> /// <returns>The transformed vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 TransformNormal(Vector2 normal, Matrix4x4 matrix) { return new Vector2( normal.X * matrix.M11 + normal.Y * matrix.M21, normal.X * matrix.M12 + normal.Y * matrix.M22); } /// <summary> /// Transforms a vector by the given Quaternion rotation value. /// </summary> /// <param name="value">The source vector to be rotated.</param> /// <param name="rotation">The rotation to apply.</param> /// <returns>The transformed vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Transform(Vector2 value, Quaternion rotation) { float x2 = rotation.X + rotation.X; float y2 = rotation.Y + rotation.Y; float z2 = rotation.Z + rotation.Z; float wz2 = rotation.W * z2; float xx2 = rotation.X * x2; float xy2 = rotation.X * y2; float yy2 = rotation.Y * y2; float zz2 = rotation.Z * z2; return new Vector2( value.X * (1.0f - yy2 - zz2) + value.Y * (xy2 - wz2), value.X * (xy2 + wz2) + value.Y * (1.0f - xx2 - zz2)); } #endregion Public Static Methods #region Public operator methods // all the below methods should be inlined as they are // implemented over JIT intrinsics /// <summary> /// Adds two vectors together. /// </summary> /// <param name="left">The first source vector.</param> /// <param name="right">The second source vector.</param> /// <returns>The summed vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Add(Vector2 left, Vector2 right) { return left + right; } /// <summary> /// Subtracts the second vector from the first. /// </summary> /// <param name="left">The first source vector.</param> /// <param name="right">The second source vector.</param> /// <returns>The difference vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Subtract(Vector2 left, Vector2 right) { return left - right; } /// <summary> /// Multiplies two vectors together. /// </summary> /// <param name="left">The first source vector.</param> /// <param name="right">The second source vector.</param> /// <returns>The product vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Multiply(Vector2 left, Vector2 right) { return left * right; } /// <summary> /// Multiplies a vector by the given scalar. /// </summary> /// <param name="left">The source vector.</param> /// <param name="right">The scalar value.</param> /// <returns>The scaled vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Multiply(Vector2 left, float right) { return left * right; } /// <summary> /// Multiplies a vector by the given scalar. /// </summary> /// <param name="left">The scalar value.</param> /// <param name="right">The source vector.</param> /// <returns>The scaled vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Multiply(float left, Vector2 right) { return left * right; } /// <summary> /// Divides the first vector by the second. /// </summary> /// <param name="left">The first source vector.</param> /// <param name="right">The second source vector.</param> /// <returns>The vector resulting from the division.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Divide(Vector2 left, Vector2 right) { return left / right; } /// <summary> /// Divides the vector by the given scalar. /// </summary> /// <param name="left">The source vector.</param> /// <param name="divisor">The scalar value.</param> /// <returns>The result of the division.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Divide(Vector2 left, float divisor) { return left / divisor; } /// <summary> /// Negates a given vector. /// </summary> /// <param name="value">The source vector.</param> /// <returns>The negated vector.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static Vector2 Negate(Vector2 value) { return -value; } #endregion Public operator methods } }
using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Reflection; using Hydra.Framework; using Hydra.Framework.Extensions; namespace Hydra.Framework.Reflection { public class FastInvoker<T, TResult> : FastInvokerBase, IFastInvoker<T, TResult> { [ThreadStatic] static FastInvoker<T, TResult> _current; readonly Dictionary<int, Func<T, TResult>> _noArgs = new Dictionary<int, Func<T, TResult>>(); readonly Dictionary<int, Func<T, object[], TResult>> _withArgs = new Dictionary<int, Func<T, object[], TResult>>(); FastInvoker() : base(typeof(T)) { } public static FastInvoker<T, TResult> Current { get { if (_current == null) _current = new FastInvoker<T, TResult>(); return _current; } } public TResult FastInvoke(T target, string methodName) { int key = 97*methodName.GetHashCode(); Func<T, TResult> invoker = GetInvoker(key, () => { return MethodNameCache[methodName] .MatchingArguments() .Where(x => x.ReturnType == typeof(TResult)) .First(); }); return invoker(target); } public TResult FastInvoke(T target, string methodName, params object[] args) { if (args == null || args.Length == 0) return FastInvoke(target, methodName); int key = GetArgumentHashCode(97*methodName.GetHashCode(), args); Func<T, object[], TResult> invoker = GetInvoker(key, () => { return MethodNameCache[methodName] .MatchingArguments(args) .Select(x => x.ToSpecializedMethod(args)) .Where(x => x.ReturnType == typeof(TResult)) .First(); // TODO: Need to check return type after method has been specialized }, args); return invoker(target, args); } public TResult FastInvoke(T target, Type[] genericTypes, string methodName) { int key = GetArgumentHashCode(97*methodName.GetHashCode(), genericTypes); Func<T, TResult> invoker = GetInvoker(key, () => { var empty = new object[] {}; return MethodNameCache[methodName] .MatchingArguments() .Select(x => x.ToSpecializedMethod(genericTypes, empty)) .Where(x => x.ReturnType == typeof(TResult)) .First(); }); return invoker(target); } public TResult FastInvoke(T target, Type[] genericTypes, string methodName, object[] args) { if (args == null || args.Length == 0) return FastInvoke(target, genericTypes, methodName); int key = GetArgumentHashCode(97*methodName.GetHashCode(), genericTypes, args); Func<T, object[], TResult> invoker = GetInvoker(key, () => { return MethodNameCache[methodName] .MatchingArguments(args) .Select(x => x.ToSpecializedMethod(genericTypes, args)) .Where(x => x.ReturnType == typeof(TResult)) .First(); }, args); return invoker(target, args); } public TResult FastInvoke(T target, Expression<Func<T, TResult>> expression) { var call = expression.Body as MethodCallExpression; if (call == null) throw new ArgumentException("Only method call expressions are supported.", "expression"); int key = 61*call.Method.GetHashCode(); Func<T, TResult> invoker = GetInvoker(key, () => call.Method); return invoker(target); } public TResult FastInvoke(T target, Expression<Func<T, TResult>> expression, params object[] args) { var call = expression.Body as MethodCallExpression; if (call == null) throw new ArgumentException("Only method call expressions are supported.", "expression"); MethodInfo method = call.Method; int key = GetArgumentHashCode(61*method.GetHashCode(), args); Func<T, object[], TResult> invoker = GetInvoker(key, () => { return method.IsGenericMethod ? method.GetGenericMethodDefinition().ToSpecializedMethod( args) : method; }, args); return invoker(target, args); } public TResult FastInvoke(T target, Type[] genericTypes, Expression<Func<T, TResult>> expression) { var call = expression.Body as MethodCallExpression; if (call == null) throw new ArgumentException("Only method call expressions are supported.", "expression"); MethodInfo method = call.Method; int key = GetArgumentHashCode(61*method.GetHashCode(), genericTypes); Func<T, TResult> invoker = GetInvoker(key, () => { if (method.IsGenericMethod) return GetGenericMethodFromTypes(method.GetGenericMethodDefinition(), genericTypes); return method; }); return invoker(target); } public TResult FastInvoke(T target, Type[] genericTypes, Expression<Func<T, TResult>> expression, object[] args) { var call = expression.Body as MethodCallExpression; if (call == null) throw new ArgumentException("Only method call expressions are supported.", "expression"); MethodInfo method = call.Method; int key = GetArgumentHashCode(61*method.GetHashCode(), genericTypes, args); Func<T, object[], TResult> invoker = GetInvoker(key, () => { if (method.IsGenericMethod) return method.GetGenericMethodDefinition().ToSpecializedMethod(genericTypes, args); return method.ToSpecializedMethod(genericTypes, args); }, args); return invoker(target, args); } Func<T, TResult> GetInvoker(int key, Func<MethodInfo> getMethodInfo) { return _noArgs.Retrieve(key, () => { MethodInfo method = getMethodInfo(); ParameterExpression instanceParameter = Expression.Parameter(typeof(T), "target"); MethodCallExpression call = Expression.Call(instanceParameter, method); return Expression.Lambda<Func<T, TResult>>(call, new[] {instanceParameter}).Compile(); }); } Func<T, object[], TResult> GetInvoker(int key, Func<MethodInfo> getMethodInfo, object[] args) { return _withArgs.Retrieve(key, () => { MethodInfo method = getMethodInfo(); ParameterExpression instanceParameter = Expression.Parameter(typeof(T), "target"); ParameterExpression argsParameter = Expression.Parameter(typeof(object[]), "args"); Expression[] parameters = method.GetParameters().ToArrayIndexParameters(argsParameter).ToArray(); MethodCallExpression call = Expression.Call(instanceParameter, method, parameters); return Expression.Lambda<Func<T, object[], TResult>>(call, new[] {instanceParameter, argsParameter}).Compile(); }); } } }
using System; using System.Linq; using static System.Math; namespace Qwack.Math.Matrix { public static class DoubleArrayFunctions { public static double[][] InvertMatrix(double[][] a) { var n = a.Length; //e will represent each column in the identity matrix //x will hold the inverse matrix to be returned var x = new double[n][]; for (var i = 0; i < n; i++) { x[i] = new double[a[i].Length]; } //Get the LU matrix and P matrix (as an array) var results = LupDecomposition(a); var lu = results.Item1; var p = results.Item2; /* * Solve AX = e for each column ei of the identity matrix using LUP decomposition * */ for (var i = 0; i < n; i++) { var e = new double[a[i].Length]; e[i] = 1; var solve = LupSolve(lu, p, e); for (var j = 0; j < solve.Length; j++) { x[j][i] = solve[j]; } } return x; } public static double[] LupSolve(double[][] lu, int[] pi, double[] b) { var n = lu.Length - 1; var x = new double[n + 1]; var y = new double[n + 1]; /* * Solve for y using formward substitution * */ for (var i = 0; i <= n; i++) { double suml = 0; for (var j = 0; j <= i - 1; j++) { /* * Since we've taken L and U as a singular matrix as an input * the value for L at index i and j will be 1 when i equals j, not LU[i][j], since * the diagonal values are all 1 for L. * */ var lij = i == j ? 1 : lu[i][j]; suml += (lij * y[j]); } y[i] = b[pi[i]] - suml; } //Solve for x by using back substitution for (var i = n; i >= 0; i--) { var sumu = 0.0; for (var j = i + 1; j <= n; j++) { sumu += (lu[i][j] * x[j]); } x[i] = (y[i] - sumu) / lu[i][i]; } return x; } public static Tuple<double[][], int[]> LupDecomposition(double[][] A) { var a = new double[A.Length][]; for (var i = 0; i < a.Length; i++) { a[i] = new double[A[i].Length]; Array.Copy(A[i], a[i], a[i].Length); } var n = a.Length - 1; /* * pi represents the permutation matrix. We implement it as an array * whose value indicates which column the 1 would appear. We use it to avoid * dividing by zero or small numbers. * */ var pi = new int[n + 1]; var kp = 0; //Initialize the permutation matrix, will be the identity matrix for (var j = 0; j <= n; j++) { pi[j] = j; } for (var k = 0; k <= n; k++) { /* * In finding the permutation matrix p that avoids dividing by zero * we take a slightly different approach. For numerical stability * We find the element with the largest * absolute value of those in the current first column (column k). If all elements in * the current first column are zero then the matrix is singluar and throw an * error. * */ double p = 0; for (var i = k; i <= n; i++) { if (Abs(a[i][k]) <= p) continue; p = Abs(a[i][k]); kp = i; } //if (p == 0) //{ // throw new Exception("singular matrix"); //} /* * These lines update the pivot array (which represents the pivot matrix) * by exchanging pi[k] and pi[kp]. * */ var pik = pi[k]; var pikp = pi[kp]; pi[k] = pikp; pi[kp] = pik; /* * Exchange rows k and kpi as determined by the pivot * */ for (var i = 0; i <= n; i++) { var aki = a[k][i]; var akpi = a[kp][i]; a[k][i] = akpi; a[kp][i] = aki; } /* * Compute the Schur complement * */ for (var i = k + 1; i <= n; i++) { a[i][k] = a[i][k] / a[k][k]; for (var j = k + 1; j <= n; j++) { a[i][j] = a[i][j] - (a[i][k] * a[k][j]); } } } return Tuple.Create(a, pi); } public static double[] MatrixProduct(double[] vectorA, double[][] matrixB) { var aCols = vectorA.Length; var bRows = matrixB.Length; var bCols = matrixB[0].Length; if (aCols != bRows) throw new InvalidOperationException("Non-conformable matrices"); var result = new double[vectorA.Length]; for (var j = 0; j < bCols; ++j) // each col of B { for (var k = 0; k < bRows; ++k) {// could use k < bRows result[j] += vectorA[k] * matrixB[k][j]; } } return result; } public static double VectorProduct(double[] vectorA, double[] vectorB) { var aCols = vectorA.Length; var bRows = vectorB.Length; if (aCols != bRows) throw new InvalidOperationException("Non-conformable vectors"); var result = 0.0; for (var k = 0; k < bRows; ++k) { result += vectorA[k] * vectorB[k]; } return result; } public static double[][] RowVectorToMatrix(double[] vectorA) { var result = new double[1][]; result[0] = vectorA; return result; } public static double[][] ColumnVectorToMatrix(double[] vectorA) { var result = new double[vectorA.Length][]; for (var i = 0; i < vectorA.Length; i++) result[i] = new double[] { vectorA[i] }; return result; } public static double[][] MatrixSubtract(this double[][] a, double[][] b) { if (a.Length != b.Length || a[0].Length != b[0].Length) throw new InvalidOperationException("Non-conformable matrices"); var o = new double[a.Length][]; for (var i = 0; i < o.Length; i++) { o[i] = new double[a[i].Length]; for (var j = 0; j < o[i].Length; j++) { o[i][j] = a[i][j] - b[i][j]; } } return o; } public static double[][] MatrixAdd(this double[][] a, double[][] b) { if (a.Length != b.Length || a[0].Length != b[0].Length) throw new InvalidOperationException("Non-conformable matrices"); var o = new double[a.Length][]; for (var i = 0; i < o.Length; i++) { o[i] = new double[a[i].Length]; for (var j = 0; j < o[i].Length; j++) { o[i][j] = a[i][j] + b[i][j]; } } return o; } public static double[][] Transpose(this double[][] matrix) { var o = new double[matrix[0].Length][]; for (var r = 0; r < matrix[0].Length; r++) { o[r] = new double[matrix.Length]; for (var c = 0; c < matrix.Length; c++) { o[r][c] = matrix[c][r]; } } return o; } public static bool IsSquare(this double[][] matrix) { var rows = matrix.Length; var cols = matrix[0].Length; return rows == cols; } public static double MaxElement(this double[][] matrix) => matrix.Max(x => x.Max()); public static double MaxAbsElement(this double[][] matrix) => matrix.Max(x => x.Max(y => Abs(y))); public static double MinElement(this double[][] matrix) => matrix.Min(x => x.Min()); public static double[][] GetColumn(this double[][] matrix, int col) { var o = new double[matrix[0].Length][]; for (var i = 0; i < o.Length; i++) { o[i] = new[] { matrix[i][col] }; } return o; } public static double[] GetColumnVector(this double[][] matrix, int col) { var o = new double[matrix[0].Length]; for (var i = 0; i < o.Length; i++) { o[i] = matrix[i][col]; } return o; } public static double[][] MatrixProductBounds(double[][] matrixA, double[][] matrixB) { var aRows = matrixA.Length; var aCols = matrixA[0].Length; var bCols = matrixB[0].Length; if (aCols != matrixB.Length) throw new InvalidOperationException("Non-conformable matrices"); var result = new double[aRows][]; for (var i = 0; i < matrixA.Length; ++i) // each row of A { var resultRow = new double[bCols]; var matrixARow = matrixA[i]; for (var j = 0; j < bCols; ++j) // each col of B { for (var k = 0; k < matrixB.Length; ++k) { resultRow[j] += matrixARow[k] * matrixB[k][j]; } } result[i] = resultRow; } return result; } public static double[][] MatrixProduct(double[][] matrixA, double[][] matrixB) { var aRows = matrixA.Length; var aCols = matrixA[0].Length; var bRows = matrixB.Length; var bCols = matrixB[0].Length; if (aCols != bRows) throw new InvalidOperationException("Non-conformable matrices"); var result = MatrixCreate(aRows, bCols); for (var i = 0; i < aRows; ++i) // each row of A { for (var j = 0; j < bCols; ++j) // each col of B { for (var k = 0; k < bRows; ++k) { result[i][j] += matrixA[i][k] * matrixB[k][j]; } } } return result; } public static double[][] MatrixCreate(int rows, int cols) { var result = new double[rows][]; for (var i = 0; i < rows; ++i) { result[i] = new double[cols]; } return result; } public static double[] MatrixProduct(double[][] matrixA, double[] vectorB) { var aRows = matrixA.Length; var aCols = matrixA[0].Length; var bRows = vectorB.Length; if (aCols != bRows) throw new InvalidOperationException("Non-conformable matrices in MatrixProduct"); var result = new double[aRows]; for (var i = 0; i < aRows; ++i) // each row of A { for (var k = 0; k < aCols; ++k) { result[i] += matrixA[i][k] * vectorB[k]; } } return result; } public static double[] MatrixProductBounds(double[][] matrixA, double[] vectorB) { var aRows = matrixA.Length; var aCols = matrixA[0].Length; var bRows = vectorB.Length; if (aCols != bRows) throw new InvalidOperationException("Non-conformable matrices in MatrixProduct"); var result = new double[aRows]; for (var i = 0; i < matrixA.Length; ++i) // each row of A { var rowA = matrixA[i]; for (var k = 0; k < aCols; ++k) { result[i] += rowA[k] * vectorB[k]; } } return result; } /// <summary> /// The Cholesky decomposition /// </summary> /// <param name="matrix"></param> /// <returns></returns> public static double[][] Cholesky(this double[][] matrix) { if (!matrix.IsSquare()) throw new InvalidOperationException("Matrix must be square"); var N = matrix.Length; var result = new double[N][]; for (var r = 0; r < result.Length; r++) { result[r] = new double[N]; } for (var r = 0; r < N; r++) // each row of A { for (var c = 0; c < N; c++) { var element = matrix[r][c]; for (var k = 0; k < r; k++) { element -= result[r][k] * result[c][k]; } if (r == c) { result[r][c] = Sqrt(element); } else if (r < c) { result[c][r] = element / result[r][r]; } } } return result; } public static double[][] Cholesky2(this double[][] a) { var n = a[0].Length; var ret = new double[n][]; for (var r = 0; r < n; r++) { ret[r] = new double[n]; for (var c = 0; c <= r; c++) { if (c == r) { double sum = 0; for (var j = 0; j < c; j++) { sum += ret[c][j] * ret[c][j]; } ret[c][c] = Sqrt(a[c][c] - sum); } else { double sum = 0; for (var j = 0; j < c; j++) sum += ret[r][j] * ret[c][j]; ret[r][c] = 1.0 / ret[c][c] * (a[r][c] - sum); } } } return ret; } public static double Norm(this double[] vectorA) => vectorA.Select(x => Abs(x)).Sum(); public static double EuclidNorm(this double[] vectorA) => Sqrt(vectorA.Select(x => x * x).Sum()); public static double[] Normalize(this double[] vectorA) => vectorA.Select(x => x / Norm(vectorA)).ToArray(); public static double[] EuclidNormalize(this double[] vectorA) => vectorA.Select(x => x / EuclidNorm(vectorA)).ToArray(); public static double Norm(this double[][] matrixA) => matrixA.Select(x => x.Select(y => Abs(y)).Sum()).Sum(); public static double EuclidNorm(this double[][] matrixA) => Sqrt(matrixA.Select(x => x.Select(y => y * y).Sum()).Sum()); public static double[][] DiagonalMatrix(double element, int size) { var o = new double[size][]; for (var i = 0; i < size; i++) { o[i] = new double[size]; o[i][i] = element; } return o; } public static double[][] EmptyMatrix(int rows, int cols) { var o = new double[rows][]; for (var i = 0; i < rows; i++) { o[i] = new double[cols]; } return o; } public static double Determinant(this double[][] a) { if (!a.IsSquare()) throw new Exception("Cannot calculate determinant of a non-square matrix"); var size = a.Length; if (size == 1) return a[0][0]; if (size == 2) return a[0][0] * a[1][1] - a[1][0] * a[0][1]; var total = 0.0; for (var i = 0; i < size; i++) { var subMatrix = new double[size - 1][]; for (var j = 0; j < subMatrix.Length; j++) { subMatrix[j] = new double[subMatrix.Length]; } var sign = i % 2 == 1 ? -1.0 : 1.0; for (var j = 0; j < subMatrix.Length; j++) { var colShift = j >= i ? 1 : 0; for (var k = 0; k < subMatrix.Length; k++) { subMatrix[k][j] = a[k + 1][j + colShift]; } } total += a[0][i] * Determinant(subMatrix) * sign; } return total; } public static (double eigenValue, double[] eigenVector) RayleighQuotient(this double[][] a, double epsilon, double[] initialEigenVector, double initialEigenValue) { var size = a.Length; var norm = initialEigenVector.Norm(); var eigenVector = initialEigenVector.Normalize(); var eigenValue = initialEigenValue; var err = double.MaxValue; var breakkout = 0; while (Abs(err) > epsilon) { var muI = DiagonalMatrix(eigenValue, size); var r = a.MatrixSubtract(muI); r = InvertMatrix(r); eigenVector = MatrixProduct(r, eigenVector); eigenVector = eigenVector.Normalize(); var q = VectorProduct(MatrixProduct(eigenVector, a), eigenVector); q /= VectorProduct(eigenVector, eigenVector); eigenValue = q; var y = MatrixSubtract(a, DiagonalMatrix(eigenValue, size)); err = y.Determinant(); if (breakkout > 10000) throw new Exception("Failed to find eigen values / vectors"); breakkout++; } return (eigenValue, eigenVector); } public static double[] ScalarDivide(this double[] vector, double divisor) => vector.Select(x => x / divisor).ToArray(); public static double[][] ScalarDivide(this double[][] matrixA, double divisor) => matrixA.Select(x => x.Select(y => y / divisor).ToArray()).ToArray(); public static double[] ScalarSubtract(this double[] vector, double subtractor) => vector.Select(x => x - subtractor).ToArray(); public static double[][] ScalarSubtract(this double[][] matrixA, double subtractor) => matrixA.Select(x => x.Select(y => y - subtractor).ToArray()).ToArray(); public static double VTV(this double[] v) => v.Select(x => x * x).Sum(); public static double[][] Clone(double[][] matrix) { var o = new double[matrix.Length][]; for (var i = 0; i < o.Length; i++) { o[i] = new double[matrix[i].Length]; Array.Copy(matrix[i], o[i], o[i].Length); } return o; } private static double[][] ComputeMinor(this double[][] mat, int d) { var o = EmptyMatrix(mat.Length, mat[0].Length); for (var i = 0; i < d; i++) o[i][i] = 1.0; for (var i = d; i < o.Length; i++) for (var j = d; j < o[0].Length; j++) o[i][j] = mat[i][j]; return o; } private static double[] Vmadd(double[] a, double[] b, double s) => a.Select((x, ix) => x + s * b[ix]).ToArray(); private static double[][] ComputeHouseholderFactor(double[] v) { var n = v.Length; var mat = EmptyMatrix(n, n); for (var i = 0; i < n; i++) for (var j = 0; j < n; j++) mat[i][j] = -2 * v[i] * v[j]; for (var i = 0; i < n; i++) mat[i][i] += 1; return mat; } public static (double[][] Q, double[][] R) QRHouseholder(this double[][] mat) { var m = mat.Length; var n = mat[0].Length; // array of factor Q1, Q2, ... Qm var qv = new double[m][][]; // temp array var z = Clone(mat); double[][] z1; for (var k = 0; k < n && k < m - 1; k++) { var e = new double[m]; // compute minor z1 = z.ComputeMinor(k); // extract k-th column into x var x = GetColumnVector(z1, k); var a = x.EuclidNorm(); if (mat[k][k] > 0) a = -a; for (var i = 0; i < e.Length; i++) e[i] = (i == k) ? 1 : 0; // e = x + a*e e = Vmadd(x, e, a); // e = e / ||e|| e = e.EuclidNormalize(); // qv[k] = I - 2 *e*e^T qv[k] = ComputeHouseholderFactor(e); // z = qv[k] * z1 z = MatrixProduct(qv[k], z1); } var Q = qv[0]; // after this loop, we will obtain Q (up to a transpose operation) for (var i = 1; i < n && i < m - 1; i++) { Q = MatrixProduct(qv[1], Q); } var R = MatrixProduct(Q, mat); Q = Transpose(Q); return (Q, R); } public static double[] QREigenValues(this double[][] a, double epsilon) { var A = a; var err = double.MaxValue; var breakkout = 0; while (Abs(err) > epsilon) { var (Q, R) = A.QRHouseholder(); A = MatrixProduct(Q.Transpose(), MatrixProduct(A, Q)); err = 0; for (var i = 1; i < A.Length; i++) for (var j = 0; j < i; j++) { err += Abs(A[i][j]); } if (breakkout > 100000) throw new Exception("Failed to find eigen values / vectors"); breakkout++; } var eigenValues = new double[A.Length]; for (var i = 0; i < A.Length; i++) eigenValues[i] = A[i][i]; return eigenValues; } } }
using System; using System.Collections.Generic; using System.IO; using System.Runtime.Versioning; using Moq; using NuGet.Test.Mocks; using Xunit; namespace NuGet.Test { public class ProjectManagerTest { [Fact] public void AddingPackageReferenceNullOrEmptyPackageIdThrows() { // Arrange ProjectManager projectManager = CreateProjectManager(); // Act & Assert ExceptionAssert.ThrowsArgNullOrEmpty(() => projectManager.AddPackageReference((string)null), "packageId"); ExceptionAssert.ThrowsArgNullOrEmpty(() => projectManager.AddPackageReference(String.Empty), "packageId"); } [Fact] public void AddingUnknownPackageReferenceThrows() { // Arrange ProjectManager projectManager = CreateProjectManager(); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.AddPackageReference("unknown"), "Unable to find package 'unknown'."); } [Fact] public void AddingPackageReferenceThrowsExceptionPackageReferenceIsAdded() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new Mock<MockProjectSystem>() { CallBase = true }; projectSystem.Setup(m => m.AddFile("file", It.IsAny<Stream>())).Throws<UnauthorizedAccessException>(); projectSystem.Setup(m => m.Root).Returns("FakeRoot"); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem.Object), projectSystem.Object, new MockPackageRepository()); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "file" }); sourceRepository.AddPackage(packageA); // Act ExceptionAssert.Throws<UnauthorizedAccessException>(() => projectManager.AddPackageReference("A")); // Assert Assert.True(projectManager.LocalRepository.Exists(packageA)); } [Fact] public void AddingPackageReferenceAddsPreprocessedFileToTargetPathWithRemovedExtension() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { @"foo\bar\file.pp" }); sourceRepository.AddPackage(packageA); // Act projectManager.AddPackageReference("A"); // Assert Assert.False(projectSystem.FileExists(@"foo\bar\file.pp")); Assert.True(projectSystem.FileExists(@"foo\bar\file")); } [Fact] public void AddPackageReferenceWhenNewVersionOfPackageAlreadyReferencedThrows() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { new PackageDependency("B") }, content: new[] { "foo" }); IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { new PackageDependency("B") }, content: new[] { "foo" }); IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "foo" }); projectManager.LocalRepository.AddPackage(packageA20); projectManager.LocalRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.AddPackageReference("A", SemanticVersion.Parse("1.0")), @"Already referencing a newer version of 'A'."); } [Fact] public void RemovingUnknownPackageReferenceThrows() { // Arrange var projectManager = CreateProjectManager(); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.RemovePackageReference("foo"), "Unable to find package 'foo'."); } [Fact] public void RemovingPackageReferenceWithOtherProjectWithReferencesThatWereNotCopiedToProject() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); var packageA = PackageUtility.CreatePackage("A", "1.0", content: new[] { "a.file" }); var packageB = PackageUtility.CreatePackage("B", "1.0", content: null, assemblyReferences: new[] { PackageUtility.CreateAssemblyReference("foo.dll", new FrameworkName("SP", new Version("40.0"))) }, tools: null, dependencies: null, downloadCount: 0, description: null, summary: null, listed: true, tags: null); projectManager.LocalRepository.AddPackage(packageA); sourceRepository.AddPackage(packageA); projectManager.LocalRepository.AddPackage(packageB); sourceRepository.AddPackage(packageB); // Act projectManager.RemovePackageReference("A"); // Assert Assert.False(projectManager.LocalRepository.Exists(packageA)); } [Fact] public void RemovingUnknownPackageReferenceNullOrEmptyPackageIdThrows() { // Arrange var projectManager = CreateProjectManager(); // Act & Assert ExceptionAssert.ThrowsArgNullOrEmpty(() => projectManager.RemovePackageReference((string)null), "packageId"); ExceptionAssert.ThrowsArgNullOrEmpty(() => projectManager.RemovePackageReference(String.Empty), "packageId"); } [Fact] public void RemovingPackageReferenceWithNoDependents() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); var package = PackageUtility.CreatePackage("foo", "1.2.33", content: new[] { "file1" }); projectManager.LocalRepository.AddPackage(package); sourceRepository.AddPackage(package); // Act projectManager.RemovePackageReference("foo"); // Assert Assert.False(projectManager.LocalRepository.Exists(package)); } [Fact] public void AddPackageReferenceAddsContentAndReferencesProjectSystem() { // Arrange var projectSystem = new MockProjectSystem(); var localRepository = new MockPackageRepository(); var mockRepository = new MockPackageRepository(); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, localRepository); var packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "contentFile" }, new[] { "reference.dll" }, new[] { "tool" }); mockRepository.AddPackage(packageA); // Act projectManager.AddPackageReference("A"); // Assert Assert.Equal(1, projectSystem.Paths.Count); Assert.Equal(1, projectSystem.References.Count); Assert.True(projectSystem.References.ContainsKey(@"reference.dll")); Assert.True(projectSystem.FileExists(@"contentFile")); Assert.True(localRepository.Exists("A")); } [Fact] public void AddPackageReferenceAddingPackageWithDuplicateReferenceOverwritesReference() { // Arrange var projectSystem = new MockProjectSystem(); var localRepository = new MockPackageRepository(); var mockRepository = new MockPackageRepository(); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, localRepository); var packageA = PackageUtility.CreatePackage("A", "1.0", assemblyReferences: new[] { "reference.dll" }); var packageB = PackageUtility.CreatePackage("B", "1.0", assemblyReferences: new[] { "reference.dll" }); mockRepository.AddPackage(packageA); mockRepository.AddPackage(packageB); // Act projectManager.AddPackageReference("A"); projectManager.AddPackageReference("B"); // Assert Assert.Equal(0, projectSystem.Paths.Count); Assert.Equal(1, projectSystem.References.Count); Assert.True(projectSystem.References.ContainsKey(@"reference.dll")); Assert.True(projectSystem.References.ContainsValue(@"B.1.0\reference.dll")); Assert.True(localRepository.Exists("A")); Assert.True(localRepository.Exists("B")); } [Fact] public void AddPackageReferenceRaisesOnBeforeInstallAndOnAfterInstall() { // Arrange var projectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); var packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "contentFile" }, new[] { "reference.dll" }, new[] { "tool" }); projectManager.PackageReferenceAdding += (sender, e) => { // Assert Assert.Equal(e.InstallPath, @"C:\MockFileSystem\A.1.0"); Assert.Same(e.Package, packageA); }; projectManager.PackageReferenceAdded += (sender, e) => { // Assert Assert.Equal(e.InstallPath, @"C:\MockFileSystem\A.1.0"); Assert.Same(e.Package, packageA); }; mockRepository.AddPackage(packageA); // Act projectManager.AddPackageReference("A"); } [Fact] public void RemovePackageReferenceRaisesOnBeforeUninstallAndOnAfterUninstall() { // Arrange var mockProjectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository()); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { @"sub\file1", @"sub\file2" }); projectManager.PackageReferenceRemoving += (sender, e) => { // Assert Assert.Equal(e.InstallPath, @"C:\MockFileSystem\A.1.0"); Assert.Same(e.Package, packageA); }; projectManager.PackageReferenceRemoved += (sender, e) => { // Assert Assert.Equal(e.InstallPath, @"C:\MockFileSystem\A.1.0"); Assert.Same(e.Package, packageA); }; mockRepository.AddPackage(packageA); projectManager.AddPackageReference("A"); // Act projectManager.RemovePackageReference("A"); } [Fact] public void RemovePackageReferenceExcludesFileIfAnotherPackageUsesThem() { // Arrange var mockProjectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository()); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "fileA", "commonFile" }); IPackage packageB = PackageUtility.CreatePackage("B", "1.0", new[] { "fileB", "commonFile" }); mockRepository.AddPackage(packageA); mockRepository.AddPackage(packageB); projectManager.AddPackageReference("A"); projectManager.AddPackageReference("B"); // Act projectManager.RemovePackageReference("A"); // Assert Assert.True(mockProjectSystem.Deleted.Contains(@"fileA")); Assert.True(mockProjectSystem.FileExists(@"commonFile")); } [Fact] public void AddPackageWithUnsupportedFilesSkipsUnsupportedFiles() { // Arrange var localRepository = new MockPackageRepository(); var sourceRepository = new MockPackageRepository(); var projectSystem = new Mock<MockProjectSystem>() { CallBase = true }; projectSystem.Setup(m => m.IsSupportedFile("unsupported")).Returns(false); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem.Object), projectSystem.Object, localRepository); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "a", "b", "unsupported" }); sourceRepository.AddPackage(packageA); // Act projectManager.AddPackageReference("A"); // Assert Assert.Equal(2, projectSystem.Object.Paths.Count); Assert.True(projectSystem.Object.FileExists("a")); Assert.True(projectSystem.Object.FileExists("b")); Assert.True(localRepository.Exists("A")); Assert.False(projectSystem.Object.FileExists("unsupported")); } [Fact] public void AddPackageWithUnsupportedTransformFileSkipsUnsupportedFile() { // Arrange var sourceRepository = new MockPackageRepository(); var localRepository = new MockPackageRepository(); var projectSystem = new Mock<MockProjectSystem>() { CallBase = true }; projectSystem.Setup(m => m.IsSupportedFile("unsupported")).Returns(false); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem.Object), projectSystem.Object, localRepository); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "a", "b", "unsupported.pp" }); sourceRepository.AddPackage(packageA); // Act projectManager.AddPackageReference("A"); // Assert Assert.Equal(2, projectSystem.Object.Paths.Count); Assert.True(projectSystem.Object.FileExists("a")); Assert.True(projectSystem.Object.FileExists("b")); Assert.True(localRepository.Exists("A")); Assert.False(projectSystem.Object.FileExists("unsupported")); } [Fact] public void AddPackageDoNotTransformPackagesConfigFile() { // Arrange var sourceRepository = new MockPackageRepository(); var localRepository = new MockPackageRepository(); var projectSystem = new Mock<MockProjectSystem>() { CallBase = true }; var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem.Object), projectSystem.Object, localRepository); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "a", "b", "packages.config.pp", "PACKAGES.config.transform" }); sourceRepository.AddPackage(packageA); // Act projectManager.AddPackageReference("A"); // Assert Assert.Equal(4, projectSystem.Object.Paths.Count); Assert.True(projectSystem.Object.FileExists("a")); Assert.True(projectSystem.Object.FileExists("b")); Assert.True(projectSystem.Object.FileExists("packages.config.pp")); Assert.True(projectSystem.Object.FileExists("packages.config.transform")); Assert.True(localRepository.Exists("A")); Assert.False(projectSystem.Object.FileExists("packages.config")); } [Fact] public void AddPackageDoNotTransformPackagesConfigFileInNestedFolder() { // Arrange var sourceRepository = new MockPackageRepository(); var localRepository = new MockPackageRepository(); var projectSystem = new Mock<MockProjectSystem>() { CallBase = true }; var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem.Object), projectSystem.Object, localRepository); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { "a", "b", "sub\\packages.config.pp", "local\\PACKAGES.config.transform" }); sourceRepository.AddPackage(packageA); // Act projectManager.AddPackageReference("A"); // Assert Assert.Equal(4, projectSystem.Object.Paths.Count); Assert.True(projectSystem.Object.FileExists("a")); Assert.True(projectSystem.Object.FileExists("b")); Assert.True(projectSystem.Object.FileExists("sub\\packages.config.pp")); Assert.True(projectSystem.Object.FileExists("local\\packages.config.transform")); Assert.True(localRepository.Exists("A")); Assert.False(projectSystem.Object.FileExists("sub\\packages.config")); Assert.False(projectSystem.Object.FileExists("local\\packages.config")); } [Fact] public void AddPackageWithTransformFile() { // Arrange var mockProjectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); mockProjectSystem.AddFile("web.config", @"<configuration> <system.web> <compilation debug=""true"" targetFramework=""4.0"" /> </system.web> </configuration> ".AsStream()); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository()); var package = new Mock<IPackage>(); package.Setup(m => m.Id).Returns("A"); package.Setup(m => m.Version).Returns(new SemanticVersion("1.0")); package.Setup(m => m.Listed).Returns(true); var file = new Mock<IPackageFile>(); file.Setup(m => m.Path).Returns(@"content\web.config.transform"); file.Setup(m => m.GetStream()).Returns(() => @"<configuration> <configSections> <add a=""n"" /> </configSections> </configuration> ".AsStream()); package.Setup(m => m.GetFiles()).Returns(new[] { file.Object }); mockRepository.AddPackage(package.Object); // Act projectManager.AddPackageReference("A"); // Assert Assert.Equal(@"<?xml version=""1.0"" encoding=""utf-8""?> <configuration> <configSections> <add a=""n"" /> </configSections> <system.web> <compilation debug=""true"" targetFramework=""4.0"" /> </system.web> </configuration>", mockProjectSystem.OpenFile("web.config").ReadToEnd()); } [Fact] public void RemovePackageWithTransformFile() { // Arrange var mockProjectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); mockProjectSystem.AddFile("web.config", @"<configuration> <system.web> <compilation debug=""true"" targetFramework=""4.0"" baz=""test"" /> </system.web> </configuration> ".AsStream()); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository()); var package = new Mock<IPackage>(); package.Setup(m => m.Id).Returns("A"); package.Setup(m => m.Version).Returns(new SemanticVersion("1.0")); var file = new Mock<IPackageFile>(); file.Setup(m => m.Path).Returns(@"content\web.config.transform"); file.Setup(m => m.GetStream()).Returns(() => @"<configuration> <system.web> <compilation debug=""true"" targetFramework=""4.0"" /> </system.web> </configuration> ".AsStream()); package.Setup(m => m.GetFiles()).Returns(new[] { file.Object }); mockRepository.AddPackage(package.Object); projectManager.LocalRepository.AddPackage(package.Object); // Act projectManager.RemovePackageReference("A"); // Assert Assert.Equal(@"<?xml version=""1.0"" encoding=""utf-8""?> <configuration> <system.web> <compilation baz=""test"" /> </system.web> </configuration>", mockProjectSystem.OpenFile("web.config").ReadToEnd()); } [Fact] public void RemovePackageWithTransformFileThatThrowsContinuesRemovingPackage() { // Arrange var mockProjectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); var localRepository = new MockPackageRepository(); mockProjectSystem.AddFile("web.config", () => { throw new UnauthorizedAccessException(); }); mockProjectSystem.AddFile("foo.txt"); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, localRepository); var package = new Mock<IPackage>(); package.Setup(m => m.Id).Returns("A"); package.Setup(m => m.Version).Returns(new SemanticVersion("1.0")); var file = new Mock<IPackageFile>(); var contentFile = new Mock<IPackageFile>(); contentFile.Setup(m => m.Path).Returns(@"content\foo.txt"); contentFile.Setup(m => m.GetStream()).Returns(new MemoryStream()); file.Setup(m => m.Path).Returns(@"content\web.config.transform"); file.Setup(m => m.GetStream()).Returns(() => @"<configuration> <system.web> <compilation debug=""true"" targetFramework=""4.0"" /> </system.web> </configuration> ".AsStream()); package.Setup(m => m.GetFiles()).Returns(new[] { file.Object, contentFile.Object }); mockRepository.AddPackage(package.Object); projectManager.LocalRepository.AddPackage(package.Object); // Act projectManager.RemovePackageReference("A"); // Assert Assert.False(mockProjectSystem.FileExists("foo.txt")); Assert.False(localRepository.Exists(package.Object)); } [Fact] public void RemovePackageWithUnsupportedTransformFileDoesNothing() { // Arrange var mockProjectSystem = new Mock<MockProjectSystem>() { CallBase = true }; mockProjectSystem.Setup(m => m.IsSupportedFile("web.config")).Returns(false); var mockRepository = new MockPackageRepository(); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem.Object, new MockPackageRepository()); var package = new Mock<IPackage>(); package.Setup(m => m.Id).Returns("A"); package.Setup(m => m.Version).Returns(new SemanticVersion("1.0")); var file = new Mock<IPackageFile>(); file.Setup(m => m.Path).Returns(@"content\web.config.transform"); file.Setup(m => m.GetStream()).Returns(() => @"<configuration> <system.web> <compilation debug=""true"" targetFramework=""4.0"" /> </system.web> </configuration> ".AsStream()); package.Setup(m => m.GetFiles()).Returns(new[] { file.Object }); mockRepository.AddPackage(package.Object); projectManager.LocalRepository.AddPackage(package.Object); // Act projectManager.RemovePackageReference("A"); // Assert Assert.False(mockProjectSystem.Object.FileExists("web.config")); } [Fact] public void RemovePackageRemovesDirectoriesAddedByPackageFilesIfEmpty() { // Arrange var mockProjectSystem = new MockProjectSystem(); var mockRepository = new MockPackageRepository(); var projectManager = new ProjectManager(mockRepository, new DefaultPackagePathResolver(new MockProjectSystem()), mockProjectSystem, new MockPackageRepository()); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", new[] { @"sub\file1", @"sub\file2" }); mockRepository.AddPackage(packageA); projectManager.AddPackageReference("A"); // Act projectManager.RemovePackageReference("A"); // Assert Assert.True(mockProjectSystem.Deleted.Contains(@"sub\file1")); Assert.True(mockProjectSystem.Deleted.Contains(@"sub\file2")); Assert.True(mockProjectSystem.Deleted.Contains("sub")); } [Fact] public void AddPackageReferenceWhenOlderVersionOfPackageInstalledDoesAnUpgrade() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]") }, content: new[] { "foo" }); IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[2.0]") }, content: new[] { "bar" }); IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "foo" }); IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "foo" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageB20); // Act projectManager.AddPackageReference("A"); // Assert Assert.False(projectManager.LocalRepository.Exists(packageA10)); Assert.False(projectManager.LocalRepository.Exists(packageB10)); Assert.True(projectManager.LocalRepository.Exists(packageA20)); Assert.True(projectManager.LocalRepository.Exists(packageB20)); } [Fact] public void UpdatePackageNullOrEmptyPackageIdThrows() { // Arrange ProjectManager packageManager = CreateProjectManager(); // Act & Assert ExceptionAssert.ThrowsArgNullOrEmpty(() => packageManager.UpdatePackageReference(null), "packageId"); ExceptionAssert.ThrowsArgNullOrEmpty(() => packageManager.UpdatePackageReference(String.Empty), "packageId"); } [Fact] public void UpdatePackageReferenceWithMixedDependenciesUpdatesPackageAndDependenciesIfUnused() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> [B 1.0, C 1.0] IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B","[1.0]"), PackageDependency.CreateDependency("C","[1.0]") }, content: new[] { "A.file" }); IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "B.fie" }); IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", content: new[] { "C.file" }); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageC10); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); projectManager.LocalRepository.AddPackage(packageC10); IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]"), PackageDependency.CreateDependency("C", "[2.0]"), PackageDependency.CreateDependency("D", "[1.0]") }, content: new[] { "A.20.file" }); IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", content: new[] { "C.20" }); IPackage packageD10 = PackageUtility.CreatePackage("D", "1.0", content: new[] { "D.20" }); // A 2.0 -> [B 1.0, C 2.0, D 1.0] sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageC20); sourceRepository.AddPackage(packageD10); // Act projectManager.UpdatePackageReference("A"); // Assert Assert.True(projectManager.LocalRepository.Exists(packageA20)); Assert.True(projectManager.LocalRepository.Exists(packageB10)); Assert.True(projectManager.LocalRepository.Exists(packageC20)); Assert.True(projectManager.LocalRepository.Exists(packageD10)); Assert.False(projectManager.LocalRepository.Exists(packageA10)); Assert.False(projectManager.LocalRepository.Exists(packageC10)); } [Fact] public void UpdatePackageReferenceIfPackageNotReferencedThrows() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("A"), @"C:\MockFileSystem\ does not reference 'A'."); } [Fact] public void UpdatePackageReferenceToOlderVersionThrows() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> [B 1.0] IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0"); IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0"); IPackage packageA30 = PackageUtility.CreatePackage("A", "3.0"); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageA30); projectManager.LocalRepository.AddPackage(packageA20); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("A", version: SemanticVersion.Parse("1.0")), @"Already referencing a newer version of 'A'."); } [Fact] public void UpdatePackageReferenceWithUnresolvedDependencyThrows() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> [B 1.0] IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]"), }); IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0"); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageB10); // A 2.0 -> [B 2.0] IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[2.0]") }); sourceRepository.AddPackage(packageA20); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("A"), "Unable to resolve dependency 'B (= 2.0)'."); } [Fact] public void UpdatePackageReferenceWithUpdateDependenciesSetToFalseIgnoresDependencies() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> [B 1.0] IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]"), }, content: new[] { "A.cs" }); IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "B.fs.spp" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageB10); // A 2.0 -> [B 2.0] IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[2.0]"), }, content: new[] { "D.a" }); IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "B.s" }); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB20); // Act projectManager.UpdatePackageReference("A", version: null, updateDependencies: false, allowPrereleaseVersions: false); // Assert Assert.True(projectManager.LocalRepository.Exists(packageA20)); Assert.False(projectManager.LocalRepository.Exists(packageA10)); Assert.True(projectManager.LocalRepository.Exists(packageB10)); Assert.False(projectManager.LocalRepository.Exists(packageB20)); } [Fact] public void UpdatePackageHasNoEffectIfConstraintsDefinedDontAllowForUpdates() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); var constraintProvider = new Mock<IPackageConstraintProvider>(); constraintProvider.Setup(m => m.GetConstraint("A")).Returns(VersionUtility.ParseVersionSpec("[1.0, 2.0)")); constraintProvider.Setup(m => m.Source).Returns("foo"); projectManager.ConstraintProvider = constraintProvider.Object; IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0"); IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0"); projectManager.LocalRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); // Act projectManager.UpdatePackageReference("A"); // Assert Assert.True(projectManager.LocalRepository.Exists(packageA10)); Assert.False(projectManager.LocalRepository.Exists(packageA20)); } [Fact] public void UpdateDependencyDependentsHaveSatisfyableDependencies() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> [C >= 1.0] IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("C", "1.0") }, content: new[] { "A" }); // B 1.0 -> [C <= 2.0] IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("C", "2.0") }, content: new[] { "B" }); IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", content: new[] { "C" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); projectManager.LocalRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageC10); IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", content: new[] { "C2" }); // A 2.0 -> [B 1.0, C 2.0, D 1.0] sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageC20); // Act projectManager.UpdatePackageReference("C"); // Assert Assert.True(projectManager.LocalRepository.Exists(packageA10)); Assert.True(projectManager.LocalRepository.Exists(packageB10)); Assert.True(projectManager.LocalRepository.Exists(packageC20)); Assert.False(projectManager.LocalRepository.Exists(packageC10)); } [Fact] public void UpdatePackageReferenceDoesNothingIfVersionIsNotSpecifiedAndNewVersionIsLessThanOldPrereleaseVersion() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); var packageA1 = PackageUtility.CreatePackage("A", "1.0", content: new string[] { "good" }); var packageA2 = PackageUtility.CreatePackage("A", "2.0-alpha", content: new string[] { "excellent" }); // project has A 2.0alpha installed projectManager.LocalRepository.AddPackage(packageA2); sourceRepository.AddPackage(packageA1); // Act projectManager.UpdatePackageReference("A", version: null, updateDependencies: false, allowPrereleaseVersions: false); // Assert Assert.True(projectManager.LocalRepository.Exists("A", new SemanticVersion("2.0-alpha"))); Assert.False(projectManager.LocalRepository.Exists("A", new SemanticVersion("1.0"))); } [Fact] public void UpdatePackageReferenceUpdateToNewerVersionIfPrereleaseFlagIsSet() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); var packageA1 = PackageUtility.CreatePackage("A", "1.0", content: new string[] {"good"}); var packageA2 = PackageUtility.CreatePackage("A", "2.0-alpha", content: new string[] {"excellent"}); // project has A 1.0 installed projectManager.LocalRepository.AddPackage(packageA1); sourceRepository.AddPackage(packageA2); // Act projectManager.UpdatePackageReference("A", version: null, updateDependencies: false, allowPrereleaseVersions: true); // Assert Assert.True(projectManager.LocalRepository.Exists("A", new SemanticVersion("2.0-alpha"))); } [Fact] public void UpdatePackageReferenceWithSatisfyableDependencies() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> [B 1.0, C 1.0] IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]"), PackageDependency.CreateDependency("C", "[1.0]") }, content: new[] { "file" }); IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", new[] { "Bfile" }); IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", new[] { "Cfile" }); // G 1.0 -> [C (>= 1.0)] IPackage packageG10 = PackageUtility.CreatePackage("G", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("C", "1.0") }, content: new[] { "Gfile" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); projectManager.LocalRepository.AddPackage(packageC10); projectManager.LocalRepository.AddPackage(packageG10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageG10); IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]"), PackageDependency.CreateDependency("C", "[2.0]"), PackageDependency.CreateDependency("D", "[1.0]") }, content: new[] { "A20file" }); IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", new[] { "C20file" }); IPackage packageD10 = PackageUtility.CreatePackage("D", "1.0", new[] { "D20file" }); // A 2.0 -> [B 1.0, C 2.0, D 1.0] sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageC20); sourceRepository.AddPackage(packageD10); // Act projectManager.UpdatePackageReference("A"); // Assert Assert.True(projectManager.LocalRepository.Exists(packageA20)); Assert.True(projectManager.LocalRepository.Exists(packageB10)); Assert.True(projectManager.LocalRepository.Exists(packageC20)); Assert.True(projectManager.LocalRepository.Exists(packageD10)); Assert.True(projectManager.LocalRepository.Exists(packageG10)); Assert.False(projectManager.LocalRepository.Exists(packageC10)); Assert.False(projectManager.LocalRepository.Exists(packageA10)); } [Fact] public void UpdatePackageReferenceWithDependenciesInUseThrowsConflictError() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> [B 1.0, C 1.0] IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]"), PackageDependency.CreateDependency("C", "[1.0]") }, content: new[] { "afile" }); IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "Bfile" }); IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", content: new[] { "Cfile" }); // G 1.0 -> [C 1.0] IPackage packageG10 = PackageUtility.CreatePackage("G", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("C", "[1.0]") }, content: new[] { "gfile" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); projectManager.LocalRepository.AddPackage(packageC10); projectManager.LocalRepository.AddPackage(packageG10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageG10); IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]"), PackageDependency.CreateDependency("C", "[2.0]"), PackageDependency.CreateDependency("D", "[1.0]") }, content: new[] { "a20file" }); IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", content: new[] { "cfile" }); IPackage packageD10 = PackageUtility.CreatePackage("D", "1.0", content: new[] { "dfile" }); // A 2.0 -> [B 1.0, C 2.0, D 1.0] sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageC20); sourceRepository.AddPackage(packageD10); // Act ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("A"), "Updating 'C 1.0' to 'C 2.0' failed. Unable to find a version of 'G' that is compatible with 'C 2.0'."); } [Fact] public void UpdatePackageReferenceFromRepositorySuccesfullyUpdatesDependentsIfDependentsAreResolvable() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]") }, content: new[] { "afile" }); IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0, 3.0]") }, content: new[] { "a2file" }); IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "bfile" }); IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "b2file" }); IPackage packageB30 = PackageUtility.CreatePackage("B", "3.0", content: new[] { "b3file" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageB20); sourceRepository.AddPackage(packageB30); // Act projectManager.UpdatePackageReference("B"); // Assert Assert.False(projectManager.LocalRepository.Exists(packageA10)); Assert.False(projectManager.LocalRepository.Exists(packageB10)); Assert.True(projectManager.LocalRepository.Exists(packageA20)); Assert.True(projectManager.LocalRepository.Exists(packageB30)); } [Fact] public void UpdatePackageReferenceFromRepositoryFailsIfPackageHasUnresolvableDependents() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A -> B 1.0 IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]") }, content: new[] { "afile" }); IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "bfile" }); IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "cfile" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageB20); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("B"), "Updating 'B 1.0' to 'B 2.0' failed. Unable to find a version of 'A' that is compatible with 'B 2.0'."); } [Fact] public void UpdatePackageReferenceFromRepositoryFailsIfPackageHasAnyUnresolvableDependents() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> B 1.0 IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]") }, content: new[] { "afile" }); // A 2.0 -> B [2.0] IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[2.0]") }, content: new[] { "afile" }); // B 1.0 IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "bfile" }); // B 2.0 IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", content: new[] { "cfile" }); // C 1.0 -> B [1.0] IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]") }, content: new[] { "bfile" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); projectManager.LocalRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageB20); sourceRepository.AddPackage(packageC10); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("B"), "Updating 'B 1.0' to 'B 2.0' failed. Unable to find a version of 'C' that is compatible with 'B 2.0'."); } [Fact] public void UpdatePackageReferenceFromRepositoryOverlappingDependencies() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> B 1.0 IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]") }, content: new[] { "afile" }); // A 2.0 -> B [2.0] IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[2.0]") }, content: new[] { "afile" }); // B 1.0 IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", content: new[] { "b1file" }); // B 2.0 -> C 2.0 IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("C", "2.0") }, content: new[] { "afile" }); // C 2.0 IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", content: new[] { "c2file" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageB20); sourceRepository.AddPackage(packageC20); // Act projectManager.UpdatePackageReference("B"); // Assert Assert.False(projectManager.LocalRepository.Exists(packageA10)); Assert.False(projectManager.LocalRepository.Exists(packageB10)); Assert.True(projectManager.LocalRepository.Exists(packageA20)); Assert.True(projectManager.LocalRepository.Exists(packageB20)); Assert.True(projectManager.LocalRepository.Exists(packageC20)); } [Fact] public void UpdatePackageReferenceFromRepositoryChainedIncompatibleDependents() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); // A 1.0 -> B [1.0] IPackage packageA10 = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0]") }, content: new[] { "afile" }); // B 1.0 -> C [1.0] IPackage packageB10 = PackageUtility.CreatePackage("B", "1.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("C", "[1.0]") }, content: new[] { "bfile" }); // C 1.0 IPackage packageC10 = PackageUtility.CreatePackage("C", "1.0", content: new[] { "c1file" }); // A 2.0 -> B [1.0, 2.0) IPackage packageA20 = PackageUtility.CreatePackage("A", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("B", "[1.0, 2.0)") }, content: new[] { "afile" }); // B 2.0 -> C [2.0] IPackage packageB20 = PackageUtility.CreatePackage("B", "2.0", dependencies: new List<PackageDependency> { PackageDependency.CreateDependency("C", "[2.0]") }, content: new[] { "cfile" }); // C 2.0 IPackage packageC20 = PackageUtility.CreatePackage("C", "2.0", content: new[] { "c2file" }); projectManager.LocalRepository.AddPackage(packageA10); projectManager.LocalRepository.AddPackage(packageB10); projectManager.LocalRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageA10); sourceRepository.AddPackage(packageA20); sourceRepository.AddPackage(packageB10); sourceRepository.AddPackage(packageB20); sourceRepository.AddPackage(packageC10); sourceRepository.AddPackage(packageC20); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.UpdatePackageReference("C"), "Updating 'C 1.0' to 'C 2.0' failed. Unable to find a version of 'B' that is compatible with 'C 2.0'."); } [Fact] public void UpdatePackageReferenceNoVersionSpecifiedShouldUpdateToLatest() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); IPackage package10 = PackageUtility.CreatePackage("NetFramework", "1.0", content: new[] { "1.0f" }); projectManager.LocalRepository.AddPackage(package10); sourceRepository.AddPackage(package10); IPackage package11 = PackageUtility.CreatePackage("NetFramework", "1.1", content: new[] { "1.1f" }); sourceRepository.AddPackage(package11); IPackage package20 = PackageUtility.CreatePackage("NetFramework", "2.0", content: new[] { "2.0f" }); sourceRepository.AddPackage(package20); IPackage package35 = PackageUtility.CreatePackage("NetFramework", "3.5", content: new[] { "3.5f" }); sourceRepository.AddPackage(package35); // Act projectManager.UpdatePackageReference("NetFramework"); // Assert Assert.False(projectManager.LocalRepository.Exists(package10)); Assert.True(projectManager.LocalRepository.Exists(package35)); } [Fact] public void UpdatePackageReferenceVersionSpeciedShouldUpdateToSpecifiedVersion() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); var package10 = PackageUtility.CreatePackage("NetFramework", "1.0", new[] { "file.dll" }); projectManager.LocalRepository.AddPackage(package10); sourceRepository.AddPackage(package10); var package11 = PackageUtility.CreatePackage("NetFramework", "1.1", new[] { "file.dll" }); sourceRepository.AddPackage(package11); var package20 = PackageUtility.CreatePackage("NetFramework", "2.0", new[] { "file.dll" }); sourceRepository.AddPackage(package20); // Act projectManager.UpdatePackageReference("NetFramework", new SemanticVersion("1.1")); // Assert Assert.False(projectManager.LocalRepository.Exists(package10)); Assert.True(projectManager.LocalRepository.Exists(package11)); } [Fact] public void RemovingPackageReferenceRemovesPackageButNotDependencies() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { new PackageDependency("B") }, content: new[] { "A" }); IPackage packageB = PackageUtility.CreatePackage("B", "1.0", content: new[] { "B" }); projectManager.LocalRepository.AddPackage(packageA); projectManager.LocalRepository.AddPackage(packageB); sourceRepository.AddPackage(packageA); sourceRepository.AddPackage(packageB); // Act projectManager.RemovePackageReference("A"); // Assert Assert.False(projectManager.LocalRepository.Exists(packageA)); Assert.True(projectManager.LocalRepository.Exists(packageB)); } [Fact] public void RemovePackageReferenceOnlyRemovedAssembliesFromTheTargetFramework() { // Arrange var net20 = new FrameworkName(".NETFramework", new Version("2.0")); var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(net20); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); IPackageAssemblyReference net20Reference = PackageUtility.CreateAssemblyReference("foo.dll", net20); IPackageAssemblyReference net40Reference = PackageUtility.CreateAssemblyReference("bar.dll", new FrameworkName(".NETFramework", new Version("4.0"))); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", content: null, assemblyReferences: new[] { net20Reference, net40Reference }, tools: null, dependencies: null, downloadCount: 0, description: null, summary: null, listed: true, tags: null); projectManager.LocalRepository.AddPackage(packageA); sourceRepository.AddPackage(packageA); projectManager.AddPackageReference("A"); // Act projectManager.RemovePackageReference("A"); // Assert Assert.False(projectManager.LocalRepository.Exists(packageA)); Assert.Equal(1, projectSystem.Deleted.Count); Assert.True(projectSystem.Deleted.Contains("foo.dll")); } [Fact] public void ReAddingAPackageReferenceAfterRemovingADependencyShouldReReferenceAllDependencies() { // Arrange var sourceRepository = new MockPackageRepository(); var projectSystem = new MockProjectSystem(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); IPackage packageA = PackageUtility.CreatePackage("A", "1.0", dependencies: new List<PackageDependency> { new PackageDependency("B") }, content: new[] { "foo" }); IPackage packageB = PackageUtility.CreatePackage("B", "1.0", dependencies: new List<PackageDependency> { new PackageDependency("C") }, content: new[] { "bar" }); var packageC = PackageUtility.CreatePackage("C", "1.0", content: new[] { "baz" }); projectManager.LocalRepository.AddPackage(packageA); projectManager.LocalRepository.AddPackage(packageB); sourceRepository.AddPackage(packageA); sourceRepository.AddPackage(packageB); sourceRepository.AddPackage(packageC); // Act projectManager.AddPackageReference("A"); // Assert Assert.True(projectManager.LocalRepository.Exists(packageA)); Assert.True(projectManager.LocalRepository.Exists(packageB)); Assert.True(projectManager.LocalRepository.Exists(packageC)); } [Fact] public void AddPackageReferenceWithAnyNonCompatibleReferenceThrowsAndPackageIsNotReferenced() { // Arrange var mockProjectSystem = new Mock<MockProjectSystem>() { CallBase = true }; var localRepository = new MockPackageRepository(); var sourceRepository = new MockPackageRepository(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(mockProjectSystem.Object), mockProjectSystem.Object, localRepository); mockProjectSystem.Setup(m => m.TargetFramework).Returns(new FrameworkName(".NETFramework", new Version("2.0"))); var mockPackage = new Mock<IPackage>(); mockPackage.Setup(m => m.Id).Returns("A"); mockPackage.Setup(m => m.Version).Returns(new SemanticVersion("1.0")); mockPackage.Setup(m => m.Listed).Returns(true); var assemblyReference = PackageUtility.CreateAssemblyReference("foo.dll", new FrameworkName(".NETFramework", new Version("5.0"))); mockPackage.Setup(m => m.AssemblyReferences).Returns(new[] { assemblyReference }); sourceRepository.AddPackage(mockPackage.Object); // Act & Assert ExceptionAssert.Throws<InvalidOperationException>(() => projectManager.AddPackageReference("A"), "Could not install package 'A 1.0'. You are trying to install this package into a project that targets '.NETFramework,Version=v2.0', but the package does not contain any assembly references that are compatible with that framework. For more information, contact the package author."); Assert.False(localRepository.Exists(mockPackage.Object)); } [Fact] public void AddPackageReferenceWithAnyNonCompatibleFrameworkReferenceDoesNotThrow() { // Arrange var mockProjectSystem = new Mock<MockProjectSystem>() { CallBase = true }; var localRepository = new MockPackageRepository(); var sourceRepository = new MockPackageRepository(); var projectManager = new ProjectManager(sourceRepository, new DefaultPackagePathResolver(mockProjectSystem.Object), mockProjectSystem.Object, localRepository); mockProjectSystem.Setup(m => m.TargetFramework).Returns(VersionUtility.ParseFrameworkName("net20")); var mockPackage = new Mock<IPackage>(); mockPackage.Setup(m => m.Id).Returns("A"); mockPackage.Setup(m => m.Version).Returns(new SemanticVersion("1.0")); mockPackage.Setup(m => m.Listed).Returns(true); var frameworkReference = new FrameworkAssemblyReference("System.Web", new[] { VersionUtility.ParseFrameworkName("net50") }); mockPackage.Setup(m => m.FrameworkAssemblies).Returns(new[] { frameworkReference }); sourceRepository.AddPackage(mockPackage.Object); // Act & Assert projectManager.AddPackageReference("A"); Assert.True(localRepository.Exists(mockPackage.Object)); } private ProjectManager CreateProjectManager() { var projectSystem = new MockProjectSystem(); return new ProjectManager(new MockPackageRepository(), new DefaultPackagePathResolver(projectSystem), projectSystem, new MockPackageRepository()); } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. using Avalonia.Controls.Presenters; using Avalonia.Controls.Primitives; using Avalonia.Controls.Templates; using Avalonia.LogicalTree; using Avalonia.Styling; using Avalonia.UnitTests; using Avalonia.VisualTree; using Moq; using System; using System.Linq; using Xunit; using Avalonia.Rendering; namespace Avalonia.Controls.UnitTests.Presenters { /// <summary> /// Tests for ContentControls that aren't hosted in a control template. /// </summary> public class ContentPresenterTests_Standalone { [Fact] public void Should_Set_Childs_Parent_To_Itself_Standalone() { var content = new Border(); var target = new ContentPresenter { Content = content }; target.UpdateChild(); Assert.Same(target, content.Parent); } [Fact] public void Should_Add_Child_To_Own_LogicalChildren_Standalone() { var content = new Border(); var target = new ContentPresenter { Content = content }; target.UpdateChild(); var logicalChildren = target.GetLogicalChildren(); Assert.Single(logicalChildren); Assert.Equal(content, logicalChildren.First()); } [Fact] public void Should_Raise_DetachedFromLogicalTree_On_Content_Changed_Standalone() { var target = new ContentPresenter { ContentTemplate = new FuncDataTemplate<string>(t => new ContentControl() { Content = t }, false) }; var parentMock = new Mock<Control>(); parentMock.As<IContentPresenterHost>(); parentMock.As<IRenderRoot>(); parentMock.As<IStyleRoot>(); (target as ISetLogicalParent).SetParent(parentMock.Object); target.Content = "foo"; target.UpdateChild(); var foo = target.Child as ContentControl; bool foodetached = false; Assert.NotNull(foo); Assert.Equal("foo", foo.Content); foo.DetachedFromLogicalTree += delegate { foodetached = true; }; target.Content = "bar"; target.UpdateChild(); var bar = target.Child as ContentControl; Assert.NotNull(bar); Assert.True(bar != foo); Assert.False((foo as IControl).IsAttachedToLogicalTree); Assert.True(foodetached); } [Fact] public void Should_Raise_DetachedFromLogicalTree_In_ContentControl_On_Content_Changed_Standalone() { var contentControl = new ContentControl { Template = new FuncControlTemplate<ContentControl>(c => new ContentPresenter() { Name = "PART_ContentPresenter", [~ContentPresenter.ContentProperty] = c[~ContentControl.ContentProperty], [~ContentPresenter.ContentTemplateProperty] = c[~ContentControl.ContentTemplateProperty] }), ContentTemplate = new FuncDataTemplate<string>(t => new ContentControl() { Content = t }, false) }; var parentMock = new Mock<Control>(); parentMock.As<IRenderRoot>(); parentMock.As<IStyleRoot>(); parentMock.As<ILogical>().SetupGet(l => l.IsAttachedToLogicalTree).Returns(true); (contentControl as ISetLogicalParent).SetParent(parentMock.Object); contentControl.ApplyTemplate(); var target = contentControl.Presenter as ContentPresenter; contentControl.Content = "foo"; target.UpdateChild(); var tbfoo = target.Child as ContentControl; bool foodetached = false; Assert.NotNull(tbfoo); Assert.Equal("foo", tbfoo.Content); tbfoo.DetachedFromLogicalTree += delegate { foodetached = true; }; contentControl.Content = "bar"; target.UpdateChild(); var tbbar = target.Child as ContentControl; Assert.NotNull(tbbar); Assert.True(tbbar != tbfoo); Assert.False((tbfoo as IControl).IsAttachedToLogicalTree); Assert.True(foodetached); } [Fact] public void Should_Raise_DetachedFromLogicalTree_On_Detached_Standalone() { var target = new ContentPresenter { ContentTemplate = new FuncDataTemplate<string>(t => new ContentControl() { Content = t }, false) }; var parentMock = new Mock<Control>(); parentMock.As<IContentPresenterHost>(); parentMock.As<IRenderRoot>(); parentMock.As<IStyleRoot>(); (target as ISetLogicalParent).SetParent(parentMock.Object); target.Content = "foo"; target.UpdateChild(); var foo = target.Child as ContentControl; bool foodetached = false; Assert.NotNull(foo); Assert.Equal("foo", foo.Content); foo.DetachedFromLogicalTree += delegate { foodetached = true; }; (target as ISetLogicalParent).SetParent(null); Assert.False((foo as IControl).IsAttachedToLogicalTree); Assert.True(foodetached); } [Fact] public void Should_Remove_Old_Child_From_LogicalChildren_On_ContentChanged_Standalone() { var target = new ContentPresenter { ContentTemplate = new FuncDataTemplate<string>(t => new ContentControl() { Content = t }, false) }; target.Content = "foo"; target.UpdateChild(); var foo = target.Child as ContentControl; Assert.NotNull(foo); var logicalChildren = target.GetLogicalChildren(); Assert.Single(logicalChildren); target.Content = "bar"; target.UpdateChild(); Assert.Null(foo.Parent); logicalChildren = target.GetLogicalChildren(); Assert.Single(logicalChildren); Assert.NotEqual(foo, logicalChildren.First()); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Impl { using System; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Threading.Tasks; using Apache.Ignite.Core.Impl.Binary; using Apache.Ignite.Core.Impl.Binary.IO; using Apache.Ignite.Core.Impl.Binary.Metadata; using Apache.Ignite.Core.Impl.Common; using Apache.Ignite.Core.Impl.Memory; using Apache.Ignite.Core.Impl.Unmanaged; using BinaryReader = Apache.Ignite.Core.Impl.Binary.BinaryReader; using BinaryWriter = Apache.Ignite.Core.Impl.Binary.BinaryWriter; using UU = Apache.Ignite.Core.Impl.Unmanaged.UnmanagedUtils; /// <summary> /// Base class for interop targets. /// </summary> [SuppressMessage("ReSharper", "LocalVariableHidesMember")] internal abstract class PlatformTarget { /** */ protected const int True = 1; /** */ private const int OpMeta = -1; /** */ public const int OpNone = -2; /** */ private static readonly Dictionary<Type, FutureType> IgniteFutureTypeMap = new Dictionary<Type, FutureType> { {typeof(bool), FutureType.Bool}, {typeof(byte), FutureType.Byte}, {typeof(char), FutureType.Char}, {typeof(double), FutureType.Double}, {typeof(float), FutureType.Float}, {typeof(int), FutureType.Int}, {typeof(long), FutureType.Long}, {typeof(short), FutureType.Short} }; /** Unmanaged target. */ private readonly IUnmanagedTarget _target; /** Marshaller. */ private readonly Marshaller _marsh; /// <summary> /// Constructor. /// </summary> /// <param name="target">Target.</param> /// <param name="marsh">Marshaller.</param> protected PlatformTarget(IUnmanagedTarget target, Marshaller marsh) { Debug.Assert(target != null); Debug.Assert(marsh != null); _target = target; _marsh = marsh; } /// <summary> /// Unmanaged target. /// </summary> internal IUnmanagedTarget Target { get { return _target; } } /// <summary> /// Marshaller. /// </summary> internal Marshaller Marshaller { get { return _marsh; } } #region Static Helpers /// <summary> /// Write collection. /// </summary> /// <param name="writer">Writer.</param> /// <param name="vals">Values.</param> /// <returns>The same writer for chaining.</returns> protected static BinaryWriter WriteCollection<T>(BinaryWriter writer, ICollection<T> vals) { return WriteCollection<T, T>(writer, vals, null); } /// <summary> /// Write nullable collection. /// </summary> /// <param name="writer">Writer.</param> /// <param name="vals">Values.</param> /// <returns>The same writer for chaining.</returns> protected static BinaryWriter WriteNullableCollection<T>(BinaryWriter writer, ICollection<T> vals) { return WriteNullable(writer, vals, WriteCollection); } /// <summary> /// Write collection. /// </summary> /// <param name="writer">Writer.</param> /// <param name="vals">Values.</param> /// <param name="selector">A transform function to apply to each element.</param> /// <returns>The same writer for chaining.</returns> protected static BinaryWriter WriteCollection<T1, T2>(BinaryWriter writer, ICollection<T1> vals, Func<T1, T2> selector) { writer.WriteInt(vals.Count); if (selector == null) { foreach (var val in vals) writer.Write(val); } else { foreach (var val in vals) writer.Write(selector(val)); } return writer; } /// <summary> /// Write enumerable. /// </summary> /// <param name="writer">Writer.</param> /// <param name="vals">Values.</param> /// <returns>The same writer for chaining.</returns> protected static BinaryWriter WriteEnumerable<T>(BinaryWriter writer, IEnumerable<T> vals) { return WriteEnumerable<T, T>(writer, vals, null); } /// <summary> /// Write enumerable. /// </summary> /// <param name="writer">Writer.</param> /// <param name="vals">Values.</param> /// <param name="selector">A transform function to apply to each element.</param> /// <returns>The same writer for chaining.</returns> protected static BinaryWriter WriteEnumerable<T1, T2>(BinaryWriter writer, IEnumerable<T1> vals, Func<T1, T2> selector) { var col = vals as ICollection<T1>; if (col != null) return WriteCollection(writer, col, selector); var stream = writer.Stream; var pos = stream.Position; stream.Seek(4, SeekOrigin.Current); var size = 0; if (selector == null) { foreach (var val in vals) { writer.Write(val); size++; } } else { foreach (var val in vals) { writer.Write(selector(val)); size++; } } stream.WriteInt(pos, size); return writer; } /// <summary> /// Write dictionary. /// </summary> /// <param name="writer">Writer.</param> /// <param name="vals">Values.</param> /// <returns>The same writer.</returns> protected static BinaryWriter WriteDictionary<T1, T2>(BinaryWriter writer, IDictionary<T1, T2> vals) { writer.WriteInt(vals.Count); foreach (KeyValuePair<T1, T2> pair in vals) { writer.Write(pair.Key); writer.Write(pair.Value); } return writer; } /// <summary> /// Write a nullable item. /// </summary> /// <param name="writer">Writer.</param> /// <param name="item">Item.</param> /// <param name="writeItem">Write action to perform on item when it is not null.</param> /// <returns>The same writer for chaining.</returns> protected static BinaryWriter WriteNullable<T>(BinaryWriter writer, T item, Func<BinaryWriter, T, BinaryWriter> writeItem) where T : class { if (item == null) { writer.WriteBoolean(false); return writer; } writer.WriteBoolean(true); return writeItem(writer, item); } #endregion #region OUT operations /// <summary> /// Perform out operation. /// </summary> /// <param name="type">Operation type.</param> /// <param name="action">Action to be performed on the stream.</param> /// <returns></returns> protected long DoOutOp(int type, Action<IBinaryStream> action) { using (var stream = IgniteManager.Memory.Allocate().GetStream()) { action(stream); return UU.TargetInStreamOutLong(_target, type, stream.SynchronizeOutput()); } } /// <summary> /// Perform out operation. /// </summary> /// <param name="type">Operation type.</param> /// <param name="action">Action to be performed on the stream.</param> /// <returns></returns> protected long DoOutOp(int type, Action<BinaryWriter> action) { using (var stream = IgniteManager.Memory.Allocate().GetStream()) { var writer = _marsh.StartMarshal(stream); action(writer); FinishMarshal(writer); return UU.TargetInStreamOutLong(_target, type, stream.SynchronizeOutput()); } } /// <summary> /// Perform out operation. /// </summary> /// <param name="type">Operation type.</param> /// <param name="action">Action to be performed on the stream.</param> /// <returns></returns> protected IUnmanagedTarget DoOutOpObject(int type, Action<BinaryWriter> action) { using (var stream = IgniteManager.Memory.Allocate().GetStream()) { var writer = _marsh.StartMarshal(stream); action(writer); FinishMarshal(writer); return UU.TargetInStreamOutObject(_target, type, stream.SynchronizeOutput()); } } /// <summary> /// Perform simple output operation accepting single argument. /// </summary> /// <param name="type">Operation type.</param> /// <param name="val1">Value.</param> /// <returns>Result.</returns> protected long DoOutOp<T1>(int type, T1 val1) { return DoOutOp(type, writer => { writer.Write(val1); }); } /// <summary> /// Perform simple output operation accepting two arguments. /// </summary> /// <param name="type">Operation type.</param> /// <param name="val1">Value 1.</param> /// <param name="val2">Value 2.</param> /// <returns>Result.</returns> protected long DoOutOp<T1, T2>(int type, T1 val1, T2 val2) { return DoOutOp(type, writer => { writer.Write(val1); writer.Write(val2); }); } /// <summary> /// Perform simple output operation accepting three arguments. /// </summary> /// <param name="type">Operation type.</param> /// <param name="val1">Value 1.</param> /// <param name="val2">Value 2.</param> /// <param name="val3">Value 3.</param> /// <returns>Result.</returns> protected long DoOutOp<T1, T2, T3>(int type, T1 val1, T2 val2, T3 val3) { return DoOutOp(type, writer => { writer.Write(val1); writer.Write(val2); writer.Write(val3); }); } #endregion #region IN operations /// <summary> /// Perform in operation. /// </summary> /// <param name="type">Type.</param> /// <param name="action">Action.</param> protected void DoInOp(int type, Action<IBinaryStream> action) { using (var stream = IgniteManager.Memory.Allocate().GetStream()) { UU.TargetOutStream(_target, type, stream.MemoryPointer); stream.SynchronizeInput(); action(stream); } } /// <summary> /// Perform in operation. /// </summary> /// <param name="type">Type.</param> /// <param name="action">Action.</param> /// <returns>Result.</returns> protected T DoInOp<T>(int type, Func<IBinaryStream, T> action) { using (var stream = IgniteManager.Memory.Allocate().GetStream()) { UU.TargetOutStream(_target, type, stream.MemoryPointer); stream.SynchronizeInput(); return action(stream); } } /// <summary> /// Perform simple in operation returning immediate result. /// </summary> /// <param name="type">Type.</param> /// <returns>Result.</returns> protected T DoInOp<T>(int type) { using (var stream = IgniteManager.Memory.Allocate().GetStream()) { UU.TargetOutStream(_target, type, stream.MemoryPointer); stream.SynchronizeInput(); return Unmarshal<T>(stream); } } #endregion #region OUT-IN operations /// <summary> /// Perform out-in operation. /// </summary> /// <param name="type">Operation type.</param> /// <param name="outAction">Out action.</param> /// <param name="inAction">In action.</param> protected void DoOutInOp(int type, Action<BinaryWriter> outAction, Action<IBinaryStream> inAction) { using (PlatformMemoryStream outStream = IgniteManager.Memory.Allocate().GetStream()) { using (PlatformMemoryStream inStream = IgniteManager.Memory.Allocate().GetStream()) { BinaryWriter writer = _marsh.StartMarshal(outStream); outAction(writer); FinishMarshal(writer); UU.TargetInStreamOutStream(_target, type, outStream.SynchronizeOutput(), inStream.MemoryPointer); inStream.SynchronizeInput(); inAction(inStream); } } } /// <summary> /// Perform out-in operation. /// </summary> /// <param name="type">Operation type.</param> /// <param name="outAction">Out action.</param> /// <param name="inAction">In action.</param> /// <returns>Result.</returns> protected TR DoOutInOp<TR>(int type, Action<BinaryWriter> outAction, Func<IBinaryStream, TR> inAction) { using (PlatformMemoryStream outStream = IgniteManager.Memory.Allocate().GetStream()) { using (PlatformMemoryStream inStream = IgniteManager.Memory.Allocate().GetStream()) { BinaryWriter writer = _marsh.StartMarshal(outStream); outAction(writer); FinishMarshal(writer); UU.TargetInStreamOutStream(_target, type, outStream.SynchronizeOutput(), inStream.MemoryPointer); inStream.SynchronizeInput(); return inAction(inStream); } } } /// <summary> /// Perform out-in operation. /// </summary> /// <param name="type">Operation type.</param> /// <param name="outAction">Out action.</param> /// <param name="inAction">In action.</param> /// <param name="arg">Argument.</param> /// <returns>Result.</returns> protected unsafe TR DoOutInOp<TR>(int type, Action<BinaryWriter> outAction, Func<IBinaryStream, TR> inAction, void* arg) { using (PlatformMemoryStream outStream = IgniteManager.Memory.Allocate().GetStream()) { using (PlatformMemoryStream inStream = IgniteManager.Memory.Allocate().GetStream()) { BinaryWriter writer = _marsh.StartMarshal(outStream); outAction(writer); FinishMarshal(writer); UU.TargetInObjectStreamOutStream(_target, type, arg, outStream.SynchronizeOutput(), inStream.MemoryPointer); inStream.SynchronizeInput(); return inAction(inStream); } } } /// <summary> /// Perform out-in operation. /// </summary> /// <param name="type">Operation type.</param> /// <param name="outAction">Out action.</param> /// <returns>Result.</returns> protected TR DoOutInOp<TR>(int type, Action<BinaryWriter> outAction) { using (PlatformMemoryStream outStream = IgniteManager.Memory.Allocate().GetStream()) { using (PlatformMemoryStream inStream = IgniteManager.Memory.Allocate().GetStream()) { BinaryWriter writer = _marsh.StartMarshal(outStream); outAction(writer); FinishMarshal(writer); UU.TargetInStreamOutStream(_target, type, outStream.SynchronizeOutput(), inStream.MemoryPointer); inStream.SynchronizeInput(); return Unmarshal<TR>(inStream); } } } /// <summary> /// Perform simple out-in operation accepting single argument. /// </summary> /// <param name="type">Operation type.</param> /// <param name="val">Value.</param> /// <returns>Result.</returns> protected TR DoOutInOp<T1, TR>(int type, T1 val) { using (PlatformMemoryStream outStream = IgniteManager.Memory.Allocate().GetStream()) { using (PlatformMemoryStream inStream = IgniteManager.Memory.Allocate().GetStream()) { BinaryWriter writer = _marsh.StartMarshal(outStream); writer.WriteObject(val); FinishMarshal(writer); UU.TargetInStreamOutStream(_target, type, outStream.SynchronizeOutput(), inStream.MemoryPointer); inStream.SynchronizeInput(); return Unmarshal<TR>(inStream); } } } /// <summary> /// Perform simple out-in operation accepting two arguments. /// </summary> /// <param name="type">Operation type.</param> /// <param name="val1">Value.</param> /// <param name="val2">Value.</param> /// <returns>Result.</returns> protected TR DoOutInOp<T1, T2, TR>(int type, T1 val1, T2 val2) { using (PlatformMemoryStream outStream = IgniteManager.Memory.Allocate().GetStream()) { using (PlatformMemoryStream inStream = IgniteManager.Memory.Allocate().GetStream()) { BinaryWriter writer = _marsh.StartMarshal(outStream); writer.WriteObject(val1); writer.WriteObject(val2); FinishMarshal(writer); UU.TargetInStreamOutStream(_target, type, outStream.SynchronizeOutput(), inStream.MemoryPointer); inStream.SynchronizeInput(); return Unmarshal<TR>(inStream); } } } #endregion #region Miscelanneous /// <summary> /// Finish marshaling. /// </summary> /// <param name="writer">Writer.</param> internal void FinishMarshal(BinaryWriter writer) { _marsh.FinishMarshal(writer); } /// <summary> /// Put binary types to Grid. /// </summary> /// <param name="types">Binary types.</param> internal void PutBinaryTypes(ICollection<BinaryType> types) { DoOutOp(OpMeta, stream => { BinaryWriter w = _marsh.StartMarshal(stream); w.WriteInt(types.Count); foreach (var meta in types) { w.WriteInt(meta.TypeId); w.WriteString(meta.TypeName); w.WriteString(meta.AffinityKeyFieldName); IDictionary<string, int> fields = meta.GetFieldsMap(); w.WriteInt(fields.Count); foreach (var field in fields) { w.WriteString(field.Key); w.WriteInt(field.Value); } w.WriteBoolean(meta.IsEnum); // Send schemas var desc = meta.Descriptor; Debug.Assert(desc != null); var count = 0; var countPos = stream.Position; w.WriteInt(0); // Reserve for count foreach (var schema in desc.Schema.GetAll()) { w.WriteInt(schema.Key); var ids = schema.Value; w.WriteInt(ids.Length); foreach (var id in ids) w.WriteInt(id); count++; } stream.WriteInt(countPos, count); } _marsh.FinishMarshal(w); }); _marsh.OnBinaryTypesSent(types); } /// <summary> /// Unmarshal object using the given stream. /// </summary> /// <param name="stream">Stream.</param> /// <returns>Unmarshalled object.</returns> protected virtual T Unmarshal<T>(IBinaryStream stream) { return _marsh.Unmarshal<T>(stream); } /// <summary> /// Creates a future and starts listening. /// </summary> /// <typeparam name="T">Future result type</typeparam> /// <param name="listenAction">The listen action.</param> /// <param name="keepBinary">Keep binary flag, only applicable to object futures. False by default.</param> /// <param name="convertFunc">The function to read future result from stream.</param> /// <returns>Created future.</returns> protected Future<T> GetFuture<T>(Func<long, int, IUnmanagedTarget> listenAction, bool keepBinary = false, Func<BinaryReader, T> convertFunc = null) { var futType = FutureType.Object; var type = typeof(T); if (type.IsPrimitive) IgniteFutureTypeMap.TryGetValue(type, out futType); var fut = convertFunc == null && futType != FutureType.Object ? new Future<T>() : new Future<T>(new FutureConverter<T>(_marsh, keepBinary, convertFunc)); var futHnd = _marsh.Ignite.HandleRegistry.Allocate(fut); var futTarget = listenAction(futHnd, (int) futType); fut.SetTarget(futTarget); return fut; } /// <summary> /// Creates a future and starts listening. /// </summary> /// <typeparam name="T">Future result type</typeparam> /// <param name="listenAction">The listen action.</param> /// <param name="keepBinary">Keep binary flag, only applicable to object futures. False by default.</param> /// <param name="convertFunc">The function to read future result from stream.</param> /// <returns>Created future.</returns> protected Future<T> GetFuture<T>(Action<long, int> listenAction, bool keepBinary = false, Func<BinaryReader, T> convertFunc = null) { var futType = FutureType.Object; var type = typeof(T); if (type.IsPrimitive) IgniteFutureTypeMap.TryGetValue(type, out futType); var fut = convertFunc == null && futType != FutureType.Object ? new Future<T>() : new Future<T>(new FutureConverter<T>(_marsh, keepBinary, convertFunc)); var futHnd = _marsh.Ignite.HandleRegistry.Allocate(fut); listenAction(futHnd, (int)futType); return fut; } /// <summary> /// Creates a task to listen for the last async op. /// </summary> protected Task GetTask() { return GetTask<object>(); } /// <summary> /// Creates a task to listen for the last async op. /// </summary> protected Task<T> GetTask<T>() { return GetFuture<T>((futId, futTyp) => UU.TargetListenFuture(Target, futId, futTyp)).Task; } #endregion } /// <summary> /// PlatformTarget with IDisposable pattern. /// </summary> internal abstract class PlatformDisposableTarget : PlatformTarget, IDisposable { /** Disposed flag. */ private volatile bool _disposed; /// <summary> /// Constructor. /// </summary> /// <param name="target">Target.</param> /// <param name="marsh">Marshaller.</param> protected PlatformDisposableTarget(IUnmanagedTarget target, Marshaller marsh) : base(target, marsh) { // No-op. } /** <inheritdoc /> */ public void Dispose() { lock (this) { if (_disposed) return; Dispose(true); GC.SuppressFinalize(this); _disposed = true; } } /// <summary> /// Releases unmanaged and - optionally - managed resources. /// </summary> /// <param name="disposing"> /// <c>true</c> when called from Dispose; <c>false</c> when called from finalizer. /// </param> protected virtual void Dispose(bool disposing) { Target.Dispose(); } /// <summary> /// Throws <see cref="ObjectDisposedException"/> if this instance has been disposed. /// </summary> protected void ThrowIfDisposed() { if (_disposed) throw new ObjectDisposedException(GetType().Name, "Object has been disposed."); } /// <summary> /// Gets a value indicating whether this instance is disposed. /// </summary> protected bool IsDisposed { get { return _disposed; } } } }
#region License //L // 2007 - 2013 Copyright Northwestern University // // Distributed under the OSI-approved BSD 3-Clause License. // See http://ncip.github.com/annotation-and-image-markup/LICENSE.txt for details. //L #endregion namespace AIM.Annotation.View.WinForms { partial class AimAnnotationDetailsComponentControl { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Component Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); System.Windows.Forms.Panel _panelCalcualtions; System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(AimAnnotationDetailsComponentControl)); this._tboxCalculations = new System.Windows.Forms.TextBox(); this._infoTabs = new System.Windows.Forms.TabControl(); this._tabAnatomicEnities = new System.Windows.Forms.TabPage(); this._tabImagingObservations = new System.Windows.Forms.TabPage(); this._tabCalculations = new System.Windows.Forms.TabPage(); this._tabWebBrowser = new System.Windows.Forms.TabPage(); this._wbDetails = new System.Windows.Forms.WebBrowser(); this.contextMenuWb = new System.Windows.Forms.ContextMenuStrip(this.components); this.copyToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); this.toolStripSeparator4 = new System.Windows.Forms.ToolStripSeparator(); this.selectAllToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem(); _panelCalcualtions = new System.Windows.Forms.Panel(); _panelCalcualtions.SuspendLayout(); this._infoTabs.SuspendLayout(); this._tabCalculations.SuspendLayout(); this._tabWebBrowser.SuspendLayout(); this.contextMenuWb.SuspendLayout(); this.SuspendLayout(); // // _panelCalcualtions // _panelCalcualtions.AutoScroll = true; _panelCalcualtions.AutoSizeMode = System.Windows.Forms.AutoSizeMode.GrowAndShrink; _panelCalcualtions.Controls.Add(this._tboxCalculations); _panelCalcualtions.Dock = System.Windows.Forms.DockStyle.Fill; _panelCalcualtions.Location = new System.Drawing.Point(3, 3); _panelCalcualtions.Name = "_panelCalcualtions"; _panelCalcualtions.Size = new System.Drawing.Size(543, 458); _panelCalcualtions.TabIndex = 0; // // _tboxCalculations // this._tboxCalculations.Dock = System.Windows.Forms.DockStyle.Fill; this._tboxCalculations.Location = new System.Drawing.Point(0, 0); this._tboxCalculations.Multiline = true; this._tboxCalculations.Name = "_tboxCalculations"; this._tboxCalculations.ReadOnly = true; this._tboxCalculations.Size = new System.Drawing.Size(543, 458); this._tboxCalculations.TabIndex = 0; // // _infoTabs // this._infoTabs.Controls.Add(this._tabAnatomicEnities); this._infoTabs.Controls.Add(this._tabImagingObservations); this._infoTabs.Controls.Add(this._tabCalculations); this._infoTabs.Controls.Add(this._tabWebBrowser); this._infoTabs.Dock = System.Windows.Forms.DockStyle.Fill; this._infoTabs.Location = new System.Drawing.Point(0, 0); this._infoTabs.Name = "_infoTabs"; this._infoTabs.SelectedIndex = 0; this._infoTabs.Size = new System.Drawing.Size(557, 493); this._infoTabs.TabIndex = 0; // // _tabAnatomicEnities // this._tabAnatomicEnities.Location = new System.Drawing.Point(4, 25); this._tabAnatomicEnities.Name = "_tabAnatomicEnities"; this._tabAnatomicEnities.Padding = new System.Windows.Forms.Padding(3); this._tabAnatomicEnities.Size = new System.Drawing.Size(549, 464); this._tabAnatomicEnities.TabIndex = 0; this._tabAnatomicEnities.Text = "Anatomic Enities"; this._tabAnatomicEnities.UseVisualStyleBackColor = true; // // _tabImagingObservations // this._tabImagingObservations.Location = new System.Drawing.Point(4, 25); this._tabImagingObservations.Name = "_tabImagingObservations"; this._tabImagingObservations.Padding = new System.Windows.Forms.Padding(3); this._tabImagingObservations.Size = new System.Drawing.Size(549, 464); this._tabImagingObservations.TabIndex = 1; this._tabImagingObservations.Text = "Imaging Observartions"; this._tabImagingObservations.UseVisualStyleBackColor = true; // // _tabCalculations // this._tabCalculations.Controls.Add(_panelCalcualtions); this._tabCalculations.Location = new System.Drawing.Point(4, 25); this._tabCalculations.Name = "_tabCalculations"; this._tabCalculations.Padding = new System.Windows.Forms.Padding(3); this._tabCalculations.Size = new System.Drawing.Size(549, 464); this._tabCalculations.TabIndex = 2; this._tabCalculations.Text = "Calculations"; this._tabCalculations.UseVisualStyleBackColor = true; // // _tabWebBrowser // this._tabWebBrowser.Controls.Add(this._wbDetails); this._tabWebBrowser.Location = new System.Drawing.Point(4, 25); this._tabWebBrowser.Name = "_tabWebBrowser"; this._tabWebBrowser.Padding = new System.Windows.Forms.Padding(3); this._tabWebBrowser.Size = new System.Drawing.Size(549, 464); this._tabWebBrowser.TabIndex = 3; this._tabWebBrowser.Text = "All Details"; this._tabWebBrowser.UseVisualStyleBackColor = true; // // _wbDetails // this._wbDetails.AllowNavigation = true; this._wbDetails.AllowWebBrowserDrop = false; this._wbDetails.ContextMenuStrip = this.contextMenuWb; this._wbDetails.Dock = System.Windows.Forms.DockStyle.Fill; this._wbDetails.IsWebBrowserContextMenuEnabled = false; this._wbDetails.Location = new System.Drawing.Point(3, 3); this._wbDetails.MinimumSize = new System.Drawing.Size(20, 20); this._wbDetails.Name = "_wbDetails"; this._wbDetails.Size = new System.Drawing.Size(543, 458); this._wbDetails.TabIndex = 0; this._wbDetails.WebBrowserShortcutsEnabled = false; // // contextMenuWb // this.contextMenuWb.Items.AddRange(new System.Windows.Forms.ToolStripItem[] { this.copyToolStripMenuItem, this.toolStripSeparator4, this.selectAllToolStripMenuItem}); this.contextMenuWb.Name = "contextMenuWb"; this.contextMenuWb.Size = new System.Drawing.Size(153, 76); this.contextMenuWb.Opening += new System.ComponentModel.CancelEventHandler(this.ContextMenuWbOpening); // // copyToolStripMenuItem // this.copyToolStripMenuItem.Image = ((System.Drawing.Image)(resources.GetObject("copyToolStripMenuItem.Image"))); this.copyToolStripMenuItem.ImageTransparentColor = System.Drawing.Color.Magenta; this.copyToolStripMenuItem.Name = "copyToolStripMenuItem"; this.copyToolStripMenuItem.Size = new System.Drawing.Size(152, 22); this.copyToolStripMenuItem.Text = "&Copy"; this.copyToolStripMenuItem.Click += new System.EventHandler(this.CopyToolStripMenuItemClick); // // toolStripSeparator4 // this.toolStripSeparator4.Name = "toolStripSeparator4"; this.toolStripSeparator4.Size = new System.Drawing.Size(149, 6); // // selectAllToolStripMenuItem // this.selectAllToolStripMenuItem.Name = "selectAllToolStripMenuItem"; this.selectAllToolStripMenuItem.Size = new System.Drawing.Size(152, 22); this.selectAllToolStripMenuItem.Text = "Select &All"; this.selectAllToolStripMenuItem.Click += new System.EventHandler(this.SelectAllToolStripMenuItemClick); // // AimAnnotationDetailsComponentControl // this.AutoScaleDimensions = new System.Drawing.SizeF(8F, 16F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.Controls.Add(this._infoTabs); this.Name = "AimAnnotationDetailsComponentControl"; this.Size = new System.Drawing.Size(557, 493); _panelCalcualtions.ResumeLayout(false); _panelCalcualtions.PerformLayout(); this._infoTabs.ResumeLayout(false); this._tabCalculations.ResumeLayout(false); this._tabWebBrowser.ResumeLayout(false); this.contextMenuWb.ResumeLayout(false); this.ResumeLayout(false); } #endregion private System.Windows.Forms.TabControl _infoTabs; private System.Windows.Forms.TabPage _tabAnatomicEnities; private System.Windows.Forms.TabPage _tabImagingObservations; private System.Windows.Forms.TabPage _tabCalculations; private System.Windows.Forms.TextBox _tboxCalculations; private System.Windows.Forms.TabPage _tabWebBrowser; private System.Windows.Forms.WebBrowser _wbDetails; private System.Windows.Forms.ToolStripMenuItem copyToolStripMenuItem; private System.Windows.Forms.ToolStripSeparator toolStripSeparator4; private System.Windows.Forms.ToolStripMenuItem selectAllToolStripMenuItem; private System.Windows.Forms.ContextMenuStrip contextMenuWb; } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.IO; using Internal.Cryptography; using Microsoft.Win32.SafeHandles; namespace System.Security.Cryptography { #if INTERNAL_ASYMMETRIC_IMPLEMENTATIONS internal static partial class ECDsaImplementation { #endif public sealed partial class ECDsaOpenSsl : ECDsa { internal const string ECDSA_P256_OID_VALUE = "1.2.840.10045.3.1.7"; // Also called nistP256 or secP256r1 internal const string ECDSA_P384_OID_VALUE = "1.3.132.0.34"; // Also called nistP384 or secP384r1 internal const string ECDSA_P521_OID_VALUE = "1.3.132.0.35"; // Also called nistP521or secP521r1 private Lazy<SafeEcKeyHandle> _key; /// <summary> /// Create an ECDsaOpenSsl algorithm with a named curve. /// </summary> /// <param name="curve">The <see cref="ECCurve"/> representing the curve.</param> /// <exception cref="ArgumentNullException">if <paramref name="curve" /> is null.</exception> public ECDsaOpenSsl(ECCurve curve) { GenerateKey(curve); } /// <summary> /// Create an ECDsaOpenSsl algorithm with a random 521 bit key pair. /// </summary> public ECDsaOpenSsl() : this(521) { } /// <summary> /// Creates a new ECDsaOpenSsl object that will use a randomly generated key of the specified size. /// </summary> /// <param name="keySize">Size of the key to generate, in bits.</param> public ECDsaOpenSsl(int keySize) { KeySize = keySize; } /// <summary> /// Set the KeySize without validating against LegalKeySizes. /// </summary> /// <param name="newKeySize">The value to set the KeySize to.</param> private void ForceSetKeySize(int newKeySize) { // In the event that a key was loaded via ImportParameters, curve name, or an IntPtr/SafeHandle // it could be outside of the bounds that we currently represent as "legal key sizes". // Since that is our view into the underlying component it can be detached from the // component's understanding. If it said it has opened a key, and this is the size, trust it. KeySizeValue = newKeySize; } public override KeySizes[] LegalKeySizes { get { // Return the three sizes that can be explicitly set (for backwards compatibility) return new[] { new KeySizes(minSize: 256, maxSize: 384, skipSize: 128), new KeySizes(minSize: 521, maxSize: 521, skipSize: 0), }; } } public override byte[] SignHash(byte[] hash) { if (hash == null) throw new ArgumentNullException(nameof(hash)); SafeEcKeyHandle key = _key.Value; int signatureLength = Interop.Crypto.EcDsaSize(key); byte[] signature = new byte[signatureLength]; if (!Interop.Crypto.EcDsaSign(hash, hash.Length, signature, ref signatureLength, key)) throw Interop.Crypto.CreateOpenSslCryptographicException(); byte[] converted = AsymmetricAlgorithmHelpers.ConvertDerToIeee1363(signature, 0, signatureLength, KeySize); return converted; } public override bool TrySignHash(ReadOnlySpan<byte> source, Span<byte> destination, out int bytesWritten) { SafeEcKeyHandle key = _key.Value; byte[] converted; int signatureLength = Interop.Crypto.EcDsaSize(key); byte[] signature = ArrayPool<byte>.Shared.Rent(signatureLength); try { if (!Interop.Crypto.EcDsaSign(source, source.Length, new Span<byte>(signature, 0, signatureLength), ref signatureLength, key)) { throw Interop.Crypto.CreateOpenSslCryptographicException(); } converted = AsymmetricAlgorithmHelpers.ConvertDerToIeee1363(signature, 0, signatureLength, KeySize); } finally { Array.Clear(signature, 0, signatureLength); ArrayPool<byte>.Shared.Return(signature); } if (converted.Length <= destination.Length) { new ReadOnlySpan<byte>(converted).CopyTo(destination); bytesWritten = converted.Length; return true; } else { bytesWritten = 0; return false; } } public override bool VerifyHash(byte[] hash, byte[] signature) { if (hash == null) throw new ArgumentNullException(nameof(hash)); if (signature == null) throw new ArgumentNullException(nameof(signature)); return VerifyHash((ReadOnlySpan<byte>)hash, (ReadOnlySpan<byte>)signature); } public override bool VerifyHash(ReadOnlySpan<byte> hash, ReadOnlySpan<byte> signature) { // The signature format for .NET is r.Concat(s). Each of r and s are of length BitsToBytes(KeySize), even // when they would have leading zeroes. If it's the correct size, then we need to encode it from // r.Concat(s) to SEQUENCE(INTEGER(r), INTEGER(s)), because that's the format that OpenSSL expects. int expectedBytes = 2 * AsymmetricAlgorithmHelpers.BitsToBytes(KeySize); if (signature.Length != expectedBytes) { // The input isn't of the right length, so we can't sensibly re-encode it. return false; } byte[] openSslFormat = AsymmetricAlgorithmHelpers.ConvertIeee1363ToDer(signature); SafeEcKeyHandle key = _key.Value; int verifyResult = Interop.Crypto.EcDsaVerify(hash, hash.Length, openSslFormat, openSslFormat.Length, key); return verifyResult == 1; } protected override byte[] HashData(byte[] data, int offset, int count, HashAlgorithmName hashAlgorithm) => AsymmetricAlgorithmHelpers.HashData(data, offset, count, hashAlgorithm); protected override byte[] HashData(Stream data, HashAlgorithmName hashAlgorithm) => AsymmetricAlgorithmHelpers.HashData(data, hashAlgorithm); protected override bool TryHashData(ReadOnlySpan<byte> source, Span<byte> destination, HashAlgorithmName hashAlgorithm, out int bytesWritten) => AsymmetricAlgorithmHelpers.TryHashData(source, destination, hashAlgorithm, out bytesWritten); protected override void Dispose(bool disposing) { if (disposing) { FreeKey(); } base.Dispose(disposing); } public override int KeySize { get { return base.KeySize; } set { if (KeySize == value) return; // Set the KeySize before FreeKey so that an invalid value doesn't throw away the key base.KeySize = value; FreeKey(); _key = new Lazy<SafeEcKeyHandle>(GenerateKeyLazy); } } public override void GenerateKey(ECCurve curve) { curve.Validate(); FreeKey(); if (curve.IsNamed) { string oid = null; // Use oid Value first if present, otherwise FriendlyName because Oid maintains a hard-coded // cache that may have different casing for FriendlyNames than OpenSsl oid = !string.IsNullOrEmpty(curve.Oid.Value) ? curve.Oid.Value : curve.Oid.FriendlyName; SafeEcKeyHandle key = Interop.Crypto.EcKeyCreateByOid(oid); if (key == null || key.IsInvalid) throw new PlatformNotSupportedException(string.Format(SR.Cryptography_CurveNotSupported, oid)); if (!Interop.Crypto.EcKeyGenerateKey(key)) throw Interop.Crypto.CreateOpenSslCryptographicException(); SetKey(key); } else if (curve.IsExplicit) { SafeEcKeyHandle key = Interop.Crypto.EcKeyCreateByExplicitCurve(curve); if (!Interop.Crypto.EcKeyGenerateKey(key)) throw Interop.Crypto.CreateOpenSslCryptographicException(); SetKey(key); } else { throw new PlatformNotSupportedException(string.Format(SR.Cryptography_CurveNotSupported, curve.CurveType.ToString())); } } private SafeEcKeyHandle GenerateKeyLazy() { string oid = null; switch (KeySize) { case 256: oid = ECDSA_P256_OID_VALUE; break; case 384: oid = ECDSA_P384_OID_VALUE; break; case 521: oid = ECDSA_P521_OID_VALUE; break; default: // Only above three sizes supported for backwards compatibility; named curves should be used instead throw new InvalidOperationException(SR.Cryptography_InvalidKeySize); } SafeEcKeyHandle key = Interop.Crypto.EcKeyCreateByOid(oid); if (key == null || key.IsInvalid) throw new PlatformNotSupportedException(string.Format(SR.Cryptography_CurveNotSupported, oid)); if (!Interop.Crypto.EcKeyGenerateKey(key)) throw Interop.Crypto.CreateOpenSslCryptographicException(); return key; } private void FreeKey() { if (_key != null) { if (_key.IsValueCreated) { SafeEcKeyHandle handle = _key.Value; if (handle != null) handle.Dispose(); } _key = null; } } private void SetKey(SafeEcKeyHandle newKey) { // Use ForceSet instead of the property setter to ensure that LegalKeySizes doesn't interfere // with the already loaded key. ForceSetKeySize(Interop.Crypto.EcKeyGetSize(newKey)); _key = new Lazy<SafeEcKeyHandle>(newKey); } } #if INTERNAL_ASYMMETRIC_IMPLEMENTATIONS } #endif }
//--------------------------------------------------------------------------- // // Copyright (C) Microsoft Corporation. All rights reserved. // //--------------------------------------------------------------------------- using System; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Windows; using System.Windows.Controls.Primitives; using System.Windows.Data; using System.Windows.Input; using System.Windows.Media; using System.Windows.Threading; using System.Windows.Automation; namespace System.Windows.Controls { /// <summary> /// A control for displaying a cell of the DataGrid. /// </summary> public class DataGridCell : ContentControl, IProvideDataGridColumn { #region Constructors /// <summary> /// Instantiates global information. /// </summary> static DataGridCell() { DefaultStyleKeyProperty.OverrideMetadata(typeof(DataGridCell), new FrameworkPropertyMetadata(typeof(DataGridCell))); StyleProperty.OverrideMetadata(typeof(DataGridCell), new FrameworkPropertyMetadata(null, OnNotifyPropertyChanged, OnCoerceStyle)); ClipProperty.OverrideMetadata(typeof(DataGridCell), new FrameworkPropertyMetadata(null, new CoerceValueCallback(OnCoerceClip))); KeyboardNavigation.TabNavigationProperty.OverrideMetadata(typeof(DataGridCell), new FrameworkPropertyMetadata(KeyboardNavigationMode.Local)); AutomationProperties.IsOffscreenBehaviorProperty.OverrideMetadata(typeof(DataGridCell), new FrameworkPropertyMetadata(IsOffscreenBehavior.FromClip)); // Set SnapsToDevicePixels to true so that this element can draw grid lines. The metadata options are so that the property value doesn't inherit down the tree from here. SnapsToDevicePixelsProperty.OverrideMetadata(typeof(DataGridCell), new FrameworkPropertyMetadata(true, FrameworkPropertyMetadataOptions.AffectsArrange)); EventManager.RegisterClassHandler(typeof(DataGridCell), MouseLeftButtonDownEvent, new MouseButtonEventHandler(OnAnyMouseLeftButtonDownThunk), true); IsMouseOverPropertyKey.OverrideMetadata(typeof(DataGridCell), new UIPropertyMetadata(new PropertyChangedCallback(OnVisualStatePropertyChanged))); EventManager.RegisterClassHandler(typeof(DataGridCell), LostFocusEvent, new RoutedEventHandler(OnAnyLostFocus), true); EventManager.RegisterClassHandler(typeof(DataGridCell), GotFocusEvent, new RoutedEventHandler(OnAnyGotFocus), true); } /// <summary> /// Instantiates a new instance of this class. /// </summary> public DataGridCell() { _tracker = new ContainerTracking<DataGridCell>(this); } #endregion #region Automation protected override System.Windows.Automation.Peers.AutomationPeer OnCreateAutomationPeer() { return new System.Windows.Automation.Peers.DataGridCellAutomationPeer(this); } #endregion #region Cell Generation /// <summary> /// Prepares a cell for use. /// </summary> /// <remarks> /// Updates the column reference. /// This overload computes the column index from the ItemContainerGenerator. /// </remarks> internal void PrepareCell(object item, ItemsControl cellsPresenter, DataGridRow ownerRow) { PrepareCell(item, ownerRow, cellsPresenter.ItemContainerGenerator.IndexFromContainer(this)); } /// <summary> /// Prepares a cell for use. /// </summary> /// <remarks> /// Updates the column reference. /// </remarks> internal void PrepareCell(object item, DataGridRow ownerRow, int index) { Debug.Assert(_owner == null || _owner == ownerRow, "_owner should be null before PrepareCell is called or the same value as the ownerRow."); _owner = ownerRow; DataGrid dataGrid = _owner.DataGridOwner; if (dataGrid != null) { // The index of the container should correspond to the index of the column if ((index >= 0) && (index < dataGrid.Columns.Count)) { // Retrieve the column definition and pass it to the cell container DataGridColumn column = dataGrid.Columns[index]; Column = column; TabIndex = column.DisplayIndex; } if (IsEditing) { // If IsEditing was left on and this container was recycled, reset it here. // Setting this property will result in BuildVisualTree being called. IsEditing = false; } else if ((Content as FrameworkElement) == null) { // If there isn't already a visual tree, then create one. BuildVisualTree(); if (!NeedsVisualTree) { Content = item; } } // Update cell Selection bool isSelected = dataGrid.SelectedCellsInternal.Contains(this); SyncIsSelected(isSelected); } DataGridHelper.TransferProperty(this, StyleProperty); DataGridHelper.TransferProperty(this, IsReadOnlyProperty); CoerceValue(ClipProperty); } /// <summary> /// Clears the cell of references. /// </summary> internal void ClearCell(DataGridRow ownerRow) { Debug.Assert(_owner == ownerRow, "_owner should be the same as the DataGridRow that is clearing the cell."); _owner = null; } /// <summary> /// Used by the DataGridRowGenerator owner to send notifications to the cell container. /// </summary> internal ContainerTracking<DataGridCell> Tracker { get { return _tracker; } } #endregion #region Column Information /// <summary> /// The column that defines how this cell should appear. /// </summary> public DataGridColumn Column { get { return (DataGridColumn)GetValue(ColumnProperty); } internal set { SetValue(ColumnPropertyKey, value); } } /// <summary> /// The DependencyPropertyKey that allows writing the Column property value. /// </summary> private static readonly DependencyPropertyKey ColumnPropertyKey = DependencyProperty.RegisterReadOnly("Column", typeof(DataGridColumn), typeof(DataGridCell), new FrameworkPropertyMetadata(null, new PropertyChangedCallback(OnColumnChanged))); /// <summary> /// The DependencyProperty for the Columns property. /// </summary> public static readonly DependencyProperty ColumnProperty = ColumnPropertyKey.DependencyProperty; /// <summary> /// Called when the Column property changes. /// Calls the protected virtual OnColumnChanged. /// </summary> private static void OnColumnChanged(object sender, DependencyPropertyChangedEventArgs e) { DataGridCell cell = sender as DataGridCell; if (cell != null) { cell.OnColumnChanged((DataGridColumn)e.OldValue, (DataGridColumn)e.NewValue); } } /// <summary> /// Called due to the cell's column definition changing. /// Not called due to changes within the current column definition. /// </summary> /// <remarks> /// Coerces ContentTemplate and ContentTemplateSelector. /// </remarks> /// <param name="oldColumn">The old column definition.</param> /// <param name="newColumn">The new column definition.</param> protected virtual void OnColumnChanged(DataGridColumn oldColumn, DataGridColumn newColumn) { // We need to call BuildVisualTree after changing the column (PrepareCell does this). Content = null; DataGridHelper.TransferProperty(this, StyleProperty); DataGridHelper.TransferProperty(this, IsReadOnlyProperty); } #endregion #region Notification Propagation /// <summary> /// Notifies the Cell of a property change. /// </summary> private static void OnNotifyPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { ((DataGridCell)d).NotifyPropertyChanged(d, string.Empty, e, DataGridNotificationTarget.Cells); } /// <summary> /// Cancels editing the current cell & notifies the cell of a change to IsReadOnly. /// </summary> private static void OnNotifyIsReadOnlyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e) { var cell = (DataGridCell)d; var dataGrid = cell.DataGridOwner; if ((bool)e.NewValue && dataGrid != null) { dataGrid.CancelEdit(cell); } // re-evalutate the BeginEdit command's CanExecute. CommandManager.InvalidateRequerySuggested(); cell.NotifyPropertyChanged(d, string.Empty, e, DataGridNotificationTarget.Cells); } /// <summary> /// General notification for DependencyProperty changes from the grid or from columns. /// </summary> internal void NotifyPropertyChanged(DependencyObject d, string propertyName, DependencyPropertyChangedEventArgs e, DataGridNotificationTarget target) { DataGridColumn column = d as DataGridColumn; if ((column != null) && (column != Column)) { // This notification does not apply to this cell return; } // All the notifications which are to be handled by the cell if (DataGridHelper.ShouldNotifyCells(target)) { if (e.Property == DataGridColumn.WidthProperty) { DataGridHelper.OnColumnWidthChanged(this, e); } else if (e.Property == DataGrid.CellStyleProperty || e.Property == DataGridColumn.CellStyleProperty || e.Property == StyleProperty) { DataGridHelper.TransferProperty(this, StyleProperty); } else if (e.Property == DataGrid.IsReadOnlyProperty || e.Property == DataGridColumn.IsReadOnlyProperty || e.Property == IsReadOnlyProperty) { DataGridHelper.TransferProperty(this, IsReadOnlyProperty); } else if (e.Property == DataGridColumn.DisplayIndexProperty) { TabIndex = column.DisplayIndex; } else if (e.Property == DataGrid.IsKeyboardFocusWithinProperty) { UpdateVisualState(); } } // All the notifications which needs forward to columns if (DataGridHelper.ShouldRefreshCellContent(target)) { if (column != null && NeedsVisualTree) { if (!string.IsNullOrEmpty(propertyName)) { column.RefreshCellContent(this, propertyName); } else if (e != null && e.Property != null) { column.RefreshCellContent(this, e.Property.Name); } } } } #endregion #region Style private static object OnCoerceStyle(DependencyObject d, object baseValue) { var cell = d as DataGridCell; return DataGridHelper.GetCoercedTransferPropertyValue( cell, baseValue, StyleProperty, cell.Column, DataGridColumn.CellStyleProperty, cell.DataGridOwner, DataGrid.CellStyleProperty); } #endregion #region Template internal override void ChangeVisualState(bool useTransitions) { if (DataGridOwner == null) { return; } // CommonStates if (IsMouseOver) { VisualStates.GoToState(this, useTransitions, VisualStates.StateMouseOver, VisualStates.StateNormal); } else { VisualStateManager.GoToState(this, VisualStates.StateNormal, useTransitions); } // SelectionStates if (IsSelected) { VisualStates.GoToState(this, useTransitions, VisualStates.StateSelected, VisualStates.StateUnselected); } else { VisualStates.GoToState(this, useTransitions, VisualStates.StateUnselected); } // FocusStates if (DataGridOwner.IsKeyboardFocusWithin) { VisualStates.GoToState(this, useTransitions, VisualStates.StateFocused, VisualStates.StateUnfocused); } else { VisualStateManager.GoToState(this, VisualStates.StateUnfocused, useTransitions); } // CurrentStates if (IsCurrent) { VisualStates.GoToState(this, useTransitions, VisualStates.StateCurrent, VisualStates.StateRegular); } else { VisualStateManager.GoToState(this, VisualStates.StateRegular, useTransitions); } // Interaction states if (IsEditing) { VisualStates.GoToState(this, useTransitions, VisualStates.StateEditing, VisualStates.StateDisplay); } else { VisualStateManager.GoToState(this, VisualStates.StateDisplay, useTransitions); } base.ChangeVisualState(useTransitions); } /// <summary> /// Builds a column's visual tree if not using templates. /// </summary> internal void BuildVisualTree() { if (NeedsVisualTree) { var column = Column; if (column != null) { // Work around a problem with BindingGroup not removing BindingExpressions. var row = RowOwner; if (row != null) { var bindingGroup = row.BindingGroup; if (bindingGroup != null) { RemoveBindingExpressions(bindingGroup, Content as DependencyObject); } } // Ask the column to build a visual tree and // hook the visual tree up through the Content property. Content = column.BuildVisualTree(IsEditing, RowDataItem, this); } } } private void RemoveBindingExpressions(BindingGroup bindingGroup, DependencyObject element) { if (element == null) return; // no content, hence no bindings to remove var bindingExpressions = bindingGroup.BindingExpressions; BindingExpressionBase[] bindingExpressionsCopy = new BindingExpressionBase[bindingExpressions.Count]; bindingExpressions.CopyTo(bindingExpressionsCopy, 0); for (int i = 0; i < bindingExpressionsCopy.Length; i++) { // Check the binding's target element - it might have been GC'd // (this can happen when column-virtualization is enabled, see Dev11 131232). // If so, fetching TargetElement will detach the binding and remove it // from the binding group's collection. This side-effect is why we // loop through a copy of the original collection, and don't rely // on i to be a valid index into the original collection. DependencyObject targetElement = bindingExpressionsCopy[i].TargetElement; if (targetElement != null && VisualTreeHelper.IsAncestorOf(element, targetElement, typeof(DataGridCell))) { bindingExpressions.Remove(bindingExpressionsCopy[i]); } } } #endregion #region Editing /// <summary> /// Whether the cell is in editing mode. /// </summary> public bool IsEditing { get { return (bool)GetValue(IsEditingProperty); } set { SetValue(IsEditingProperty, value); } } /// <summary> /// Represents the IsEditing property. /// </summary> public static readonly DependencyProperty IsEditingProperty = DependencyProperty.Register("IsEditing", typeof(bool), typeof(DataGridCell), new FrameworkPropertyMetadata(false, new PropertyChangedCallback(OnIsEditingChanged))); private static void OnIsEditingChanged(object sender, DependencyPropertyChangedEventArgs e) { ((DataGridCell)sender).OnIsEditingChanged((bool)e.NewValue); } /// <summary> /// Called when the value of IsEditing changes. /// </summary> /// <remarks> /// Coerces the value of ContentTemplate. /// </remarks> /// <param name="isEditing">The new value of IsEditing.</param> protected virtual void OnIsEditingChanged(bool isEditing) { if (IsKeyboardFocusWithin && !IsKeyboardFocused) { // Keep focus on the cell when flipping modes Focus(); } // If templates aren't being used, then a new visual tree needs to be built. BuildVisualTree(); UpdateVisualState(); } internal void NotifyCurrentCellContainerChanged() { UpdateVisualState(); } /// <summary> /// Whether the cell is the current cell. /// </summary> private bool IsCurrent { get { var row = RowOwner; var column = Column; if (row != null && column != null) { var dataGrid = row.DataGridOwner; if (dataGrid != null) { return dataGrid.IsCurrent(row, column); } } return false; } } /// <summary> /// Whether the cell can be placed in edit mode. /// </summary> public bool IsReadOnly { get { return (bool)GetValue(IsReadOnlyProperty); } } private static readonly DependencyPropertyKey IsReadOnlyPropertyKey = DependencyProperty.RegisterReadOnly("IsReadOnly", typeof(bool), typeof(DataGridCell), new FrameworkPropertyMetadata(false, OnNotifyIsReadOnlyChanged, OnCoerceIsReadOnly)); /// <summary> /// The DependencyProperty for IsReadOnly. /// </summary> public static readonly DependencyProperty IsReadOnlyProperty = IsReadOnlyPropertyKey.DependencyProperty; private static object OnCoerceIsReadOnly(DependencyObject d, object baseValue) { var cell = d as DataGridCell; var column = cell.Column; var dataGrid = cell.DataGridOwner; // We dont use the cell & 'baseValue' here because this property is read only on cell. // the column may coerce a default value to 'true', so we'll use it's effective value for IsReadOnly // as the baseValue. return DataGridHelper.GetCoercedTransferPropertyValue( column, column.IsReadOnly, DataGridColumn.IsReadOnlyProperty, dataGrid, DataGrid.IsReadOnlyProperty); } private static void OnAnyLostFocus(object sender, RoutedEventArgs e) { // Get the ancestor cell of old focused element. // Set DataGrid.FocusedCell to null, if the cell doesn't // have keyboard focus. DataGridCell cell = DataGridHelper.FindVisualParent<DataGridCell>(e.OriginalSource as UIElement); if (cell != null && cell == sender) { DataGrid owner = cell.DataGridOwner; if (owner != null && !cell.IsKeyboardFocusWithin && owner.FocusedCell == cell) { owner.FocusedCell = null; } } } private static void OnAnyGotFocus(object sender, RoutedEventArgs e) { DataGridCell cell = DataGridHelper.FindVisualParent<DataGridCell>(e.OriginalSource as UIElement); if (cell != null && cell == sender) { DataGrid owner = cell.DataGridOwner; if (owner != null) { owner.FocusedCell = cell; } } } internal void BeginEdit(RoutedEventArgs e) { Debug.Assert(!IsEditing, "Should not call BeginEdit when IsEditing is true."); IsEditing = true; DataGridColumn column = Column; if (column != null) { // Ask the column to store the original value column.BeginEdit(Content as FrameworkElement, e); } RaisePreparingCellForEdit(e); } internal void CancelEdit() { Debug.Assert(IsEditing, "Should not call CancelEdit when IsEditing is false."); DataGridColumn column = Column; if (column != null) { // Ask the column to restore the original value column.CancelEdit(Content as FrameworkElement); } IsEditing = false; } internal bool CommitEdit() { Debug.Assert(IsEditing, "Should not call CommitEdit when IsEditing is false."); bool validationPassed = true; DataGridColumn column = Column; if (column != null) { // Ask the column to access the binding and update the data source // If validation fails, then remain in editing mode validationPassed = column.CommitEdit(Content as FrameworkElement); } if (validationPassed) { IsEditing = false; } return validationPassed; } private void RaisePreparingCellForEdit(RoutedEventArgs editingEventArgs) { DataGrid dataGridOwner = DataGridOwner; if (dataGridOwner != null) { FrameworkElement currentEditingElement = EditingElement; DataGridPreparingCellForEditEventArgs preparingCellForEditEventArgs = new DataGridPreparingCellForEditEventArgs(Column, RowOwner, editingEventArgs, currentEditingElement); dataGridOwner.OnPreparingCellForEdit(preparingCellForEditEventArgs); } } internal FrameworkElement EditingElement { get { // The editing element was stored in the Content property. return Content as FrameworkElement; } } #endregion #region Selection /// <summary> /// Whether the cell is selected or not. /// </summary> public bool IsSelected { get { return (bool)GetValue(IsSelectedProperty); } set { SetValue(IsSelectedProperty, value); } } /// <summary> /// Represents the IsSelected property. /// </summary> public static readonly DependencyProperty IsSelectedProperty = DependencyProperty.Register("IsSelected", typeof(bool), typeof(DataGridCell), new FrameworkPropertyMetadata(false, new PropertyChangedCallback(OnIsSelectedChanged))); private static void OnIsSelectedChanged(object sender, DependencyPropertyChangedEventArgs e) { DataGridCell cell = (DataGridCell)sender; bool isSelected = (bool)e.NewValue; // There is no reason to notify the DataGrid if IsSelected's value came // from the DataGrid. if (!cell._syncingIsSelected) { DataGrid dataGrid = cell.DataGridOwner; if (dataGrid != null) { // Notify the DataGrid that a cell's IsSelected property changed // in case it was done programmatically instead of by the // DataGrid itself. dataGrid.CellIsSelectedChanged(cell, isSelected); } } cell.RaiseSelectionChangedEvent(isSelected); cell.UpdateVisualState(); } /// <summary> /// Used to synchronize IsSelected with the DataGrid. /// Prevents unncessary notification back to the DataGrid. /// </summary> internal void SyncIsSelected(bool isSelected) { bool originalValue = _syncingIsSelected; _syncingIsSelected = true; try { IsSelected = isSelected; } finally { _syncingIsSelected = originalValue; } } private void RaiseSelectionChangedEvent(bool isSelected) { if (isSelected) { OnSelected(new RoutedEventArgs(SelectedEvent, this)); } else { OnUnselected(new RoutedEventArgs(UnselectedEvent, this)); } } /// <summary> /// Raised when the item's IsSelected property becomes true. /// </summary> public static readonly RoutedEvent SelectedEvent = EventManager.RegisterRoutedEvent("Selected", RoutingStrategy.Bubble, typeof(RoutedEventHandler), typeof(DataGridCell)); /// <summary> /// Raised when the item's IsSelected property becomes true. /// </summary> public event RoutedEventHandler Selected { add { AddHandler(SelectedEvent, value); } remove { RemoveHandler(SelectedEvent, value); } } /// <summary> /// Called when IsSelected becomes true. Raises the Selected event. /// </summary> /// <param name="e">Empty event arguments.</param> protected virtual void OnSelected(RoutedEventArgs e) { RaiseEvent(e); } /// <summary> /// Raised when the item's IsSelected property becomes false. /// </summary> public static readonly RoutedEvent UnselectedEvent = EventManager.RegisterRoutedEvent("Unselected", RoutingStrategy.Bubble, typeof(RoutedEventHandler), typeof(DataGridCell)); /// <summary> /// Raised when the item's IsSelected property becomes false. /// </summary> public event RoutedEventHandler Unselected { add { AddHandler(UnselectedEvent, value); } remove { RemoveHandler(UnselectedEvent, value); } } /// <summary> /// Called when IsSelected becomes false. Raises the Unselected event. /// </summary> /// <param name="e">Empty event arguments.</param> protected virtual void OnUnselected(RoutedEventArgs e) { RaiseEvent(e); } #endregion #region GridLines // Different parts of the DataGrid draw different pieces of the GridLines. // Cells draw a single line on their right side. /// <summary> /// Measure. This is overridden so that the cell can extend its size to account for a grid line on the right. /// </summary> protected override Size MeasureOverride(Size constraint) { // Make space for the GridLine on the right and bottom: // Remove space from the constraint (since it implicitly includes the GridLine's thickness), // call the base implementation, and add the thickness back for the returned size. DataGrid dataGridOwner = DataGridOwner; bool horizontalLinesVisible = DataGridHelper.IsGridLineVisible(dataGridOwner, /*isHorizontal = */ true); bool verticalLinesVisible = DataGridHelper.IsGridLineVisible(dataGridOwner, /*isHorizontal = */ false); double horizontalLineThickness = 0; double verticalLineThickness = 0; if (horizontalLinesVisible) { horizontalLineThickness = dataGridOwner.HorizontalGridLineThickness; constraint = DataGridHelper.SubtractFromSize(constraint, horizontalLineThickness, /*height = */ true); } if (verticalLinesVisible) { verticalLineThickness = dataGridOwner.VerticalGridLineThickness; constraint = DataGridHelper.SubtractFromSize(constraint, verticalLineThickness, /*height = */ false); } Size desiredSize = base.MeasureOverride(constraint); if (horizontalLinesVisible) { desiredSize.Height += horizontalLineThickness; } if (verticalLinesVisible) { desiredSize.Width += verticalLineThickness; } return desiredSize; } /// <summary> /// Arrange. This is overriden so that the cell can position its content to account for a grid line on the right. /// </summary> /// <param name="arrangeSize">Arrange size</param> protected override Size ArrangeOverride(Size arrangeSize) { // We don't need to adjust the Arrange position of the content. By default it is arranged at 0,0 and we're // adding a line to the right and bottom. All we have to do is compress and extend the size, just like Measure. DataGrid dataGridOwner = DataGridOwner; bool horizontalLinesVisible = DataGridHelper.IsGridLineVisible(dataGridOwner, /*isHorizontal = */ true); bool verticalLinesVisible = DataGridHelper.IsGridLineVisible(dataGridOwner, /*isHorizontal = */ false); double horizontalLineThickness = 0; double verticalLineThickness = 0; if (horizontalLinesVisible) { horizontalLineThickness = dataGridOwner.HorizontalGridLineThickness; arrangeSize = DataGridHelper.SubtractFromSize(arrangeSize, horizontalLineThickness, /*height = */ true); } if (verticalLinesVisible) { verticalLineThickness = dataGridOwner.VerticalGridLineThickness; arrangeSize = DataGridHelper.SubtractFromSize(arrangeSize, verticalLineThickness, /*height = */ false); } Size returnSize = base.ArrangeOverride(arrangeSize); if (horizontalLinesVisible) { returnSize.Height += horizontalLineThickness; } if (verticalLinesVisible) { returnSize.Width += verticalLineThickness; } return returnSize; } /// <summary> /// OnRender. Overriden to draw a vertical line on the right. /// </summary> /// <param name="drawingContext"></param> protected override void OnRender(DrawingContext drawingContext) { base.OnRender(drawingContext); DataGrid dataGrid = DataGridOwner; if (DataGridHelper.IsGridLineVisible(dataGrid, /*isHorizontal = */ false)) { double thickness = DataGridOwner.VerticalGridLineThickness; Rect rect = new Rect(new Size(thickness, RenderSize.Height)); rect.X = RenderSize.Width - thickness; drawingContext.DrawRectangle(DataGridOwner.VerticalGridLinesBrush, null, rect); } if (DataGridHelper.IsGridLineVisible(dataGrid, /*isHorizontal = */ true)) { double thickness = dataGrid.HorizontalGridLineThickness; Rect rect = new Rect(new Size(RenderSize.Width, thickness)); rect.Y = RenderSize.Height - thickness; drawingContext.DrawRectangle(dataGrid.HorizontalGridLinesBrush, null, rect); } } #endregion #region Input private static void OnAnyMouseLeftButtonDownThunk(object sender, MouseButtonEventArgs e) { ((DataGridCell)sender).OnAnyMouseLeftButtonDown(e); } /// <summary> /// The left mouse button was pressed /// </summary> /// private void OnAnyMouseLeftButtonDown(MouseButtonEventArgs e) { bool focusWithin = IsKeyboardFocusWithin; bool isCtrlKeyPressed = (Keyboard.Modifiers & ModifierKeys.Control) == ModifierKeys.Control; if (focusWithin && !isCtrlKeyPressed && !e.Handled && IsSelected) { // The cell is focused and there are no other special selection gestures, // enter edit mode. DataGrid dataGridOwner = DataGridOwner; if (dataGridOwner != null) { // The cell was clicked, which means that other cells may // need to be de-selected, let the DataGrid handle that. dataGridOwner.HandleSelectionForCellInput(this, /* startDragging = */ false, /* allowsExtendSelect = */ true, /* allowsMinimalSelect = */ false); if (!IsEditing && !IsReadOnly) { // Enter edit mode dataGridOwner.BeginEdit(e); e.Handled = true; } // Please note that unselecting rows is not really considered a // handlable operation. Please see Dev11 bug#245510 about // this issue. //e.Handled = true; } } else if (!focusWithin || !IsSelected || isCtrlKeyPressed) { if (!focusWithin) { // The cell should receive focus on click Focus(); } DataGrid dataGridOwner = DataGridOwner; if (dataGridOwner != null) { // Let the DataGrid process selection dataGridOwner.HandleSelectionForCellInput(this, /* startDragging = */ Mouse.Captured == null, /* allowsExtendSelect = */ true, /* allowsMinimalSelect = */ true); } e.Handled = true; } #if PUBLIC_ONINPUT else { SendInputToColumn(e); } #endif } /// <summary> /// Reporting text composition. /// </summary> protected override void OnTextInput(TextCompositionEventArgs e) { SendInputToColumn(e); } protected override void OnPreviewKeyDown(KeyEventArgs e) { SendInputToColumn(e); } /// <summary> /// Reporting a key was pressed. /// </summary> protected override void OnKeyDown(KeyEventArgs e) { SendInputToColumn(e); } #if PUBLIC_ONINPUT // /// <summary> /// Reporting a key was released /// </summary> protected override void OnKeyUp(KeyEventArgs e) { SendInputToColumn(e); } /// <summary> /// Reporting the mouse button was released /// </summary> protected override void OnMouseUp(MouseButtonEventArgs e) { SendInputToColumn(e); } #endif private void SendInputToColumn(InputEventArgs e) { var column = Column; if (column != null) { column.OnInput(e); } } #endregion #region Frozen Columns /// <summary> /// Coercion call back for clip property which ensures that the cell overlapping with frozen /// column gets clipped appropriately. /// </summary> /// <param name="d"></param> /// <param name="baseValue"></param> /// <returns></returns> private static object OnCoerceClip(DependencyObject d, object baseValue) { DataGridCell cell = (DataGridCell)d; Geometry geometry = baseValue as Geometry; Geometry frozenGeometry = DataGridHelper.GetFrozenClipForCell(cell); if (frozenGeometry != null) { if (geometry == null) { return frozenGeometry; } geometry = new CombinedGeometry(GeometryCombineMode.Intersect, geometry, frozenGeometry); } return geometry; } #endregion #region Helpers internal DataGrid DataGridOwner { get { if (_owner != null) { DataGrid dataGridOwner = _owner.DataGridOwner; if (dataGridOwner == null) { dataGridOwner = ItemsControl.ItemsControlFromItemContainer(_owner) as DataGrid; } return dataGridOwner; } return null; } } private Panel ParentPanel { get { return VisualParent as Panel; } } internal DataGridRow RowOwner { get { return _owner; } } internal object RowDataItem { get { DataGridRow row = RowOwner; if (row != null) { return row.Item; } return DataContext; } } private DataGridCellsPresenter CellsPresenter { get { return ItemsControl.ItemsControlFromItemContainer(this) as DataGridCellsPresenter; } } private bool NeedsVisualTree { get { return (ContentTemplate == null) && (ContentTemplateSelector == null); } } #endregion #region Data private DataGridRow _owner; private ContainerTracking<DataGridCell> _tracker; private bool _syncingIsSelected; // Used to prevent unnecessary notifications #endregion } }
// SF API version v50.0 // Custom fields included: False // Relationship objects included: True using System; using NetCoreForce.Client.Models; using NetCoreForce.Client.Attributes; using Newtonsoft.Json; namespace NetCoreForce.Models { ///<summary> /// Prompt Action ///<para>SObject Name: PromptAction</para> ///<para>Custom Object: False</para> ///</summary> public class SfPromptAction : SObject { [JsonIgnore] public static string SObjectTypeName { get { return "PromptAction"; } } ///<summary> /// Prompt Action ID /// <para>Name: Id</para> /// <para>SF Type: id</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "id")] [Updateable(false), Createable(false)] public string Id { get; set; } ///<summary> /// Owner ID /// <para>Name: OwnerId</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "ownerId")] public string OwnerId { get; set; } ///<summary> /// Deleted /// <para>Name: IsDeleted</para> /// <para>SF Type: boolean</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "isDeleted")] [Updateable(false), Createable(false)] public bool? IsDeleted { get; set; } ///<summary> /// Name /// <para>Name: Name</para> /// <para>SF Type: string</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "name")] public string Name { get; set; } ///<summary> /// Created Date /// <para>Name: CreatedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "createdDate")] [Updateable(false), Createable(false)] public DateTimeOffset? CreatedDate { get; set; } ///<summary> /// Created By ID /// <para>Name: CreatedById</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "createdById")] [Updateable(false), Createable(false)] public string CreatedById { get; set; } ///<summary> /// ReferenceTo: User /// <para>RelationshipName: CreatedBy</para> ///</summary> [JsonProperty(PropertyName = "createdBy")] [Updateable(false), Createable(false)] public SfUser CreatedBy { get; set; } ///<summary> /// Last Modified Date /// <para>Name: LastModifiedDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedDate")] [Updateable(false), Createable(false)] public DateTimeOffset? LastModifiedDate { get; set; } ///<summary> /// Last Modified By ID /// <para>Name: LastModifiedById</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedById")] [Updateable(false), Createable(false)] public string LastModifiedById { get; set; } ///<summary> /// ReferenceTo: User /// <para>RelationshipName: LastModifiedBy</para> ///</summary> [JsonProperty(PropertyName = "lastModifiedBy")] [Updateable(false), Createable(false)] public SfUser LastModifiedBy { get; set; } ///<summary> /// System Modstamp /// <para>Name: SystemModstamp</para> /// <para>SF Type: datetime</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "systemModstamp")] [Updateable(false), Createable(false)] public DateTimeOffset? SystemModstamp { get; set; } ///<summary> /// Prompt Version ID /// <para>Name: PromptVersionId</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "promptVersionId")] public string PromptVersionId { get; set; } ///<summary> /// ReferenceTo: PromptVersion /// <para>RelationshipName: PromptVersion</para> ///</summary> [JsonProperty(PropertyName = "promptVersion")] [Updateable(false), Createable(false)] public SfPromptVersion PromptVersion { get; set; } ///<summary> /// User ID /// <para>Name: UserId</para> /// <para>SF Type: reference</para> /// <para>Nillable: False</para> ///</summary> [JsonProperty(PropertyName = "userId")] public string UserId { get; set; } ///<summary> /// ReferenceTo: User /// <para>RelationshipName: User</para> ///</summary> [JsonProperty(PropertyName = "user")] [Updateable(false), Createable(false)] public SfUser User { get; set; } ///<summary> /// Times Displayed /// <para>Name: TimesDisplayed</para> /// <para>SF Type: int</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "timesDisplayed")] public int? TimesDisplayed { get; set; } ///<summary> /// Times Action Taken /// <para>Name: TimesActionTaken</para> /// <para>SF Type: int</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "timesActionTaken")] public int? TimesActionTaken { get; set; } ///<summary> /// Times Dismissed /// <para>Name: TimesDismissed</para> /// <para>SF Type: int</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "timesDismissed")] public int? TimesDismissed { get; set; } ///<summary> /// Last Display Date /// <para>Name: LastDisplayDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "lastDisplayDate")] public DateTimeOffset? LastDisplayDate { get; set; } ///<summary> /// Last Result /// <para>Name: LastResult</para> /// <para>SF Type: picklist</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "lastResult")] public string LastResult { get; set; } ///<summary> /// Last Result Date /// <para>Name: LastResultDate</para> /// <para>SF Type: datetime</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "lastResultDate")] public DateTimeOffset? LastResultDate { get; set; } ///<summary> /// Step Number /// <para>Name: StepNumber</para> /// <para>SF Type: int</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "stepNumber")] public int? StepNumber { get; set; } ///<summary> /// Step Count /// <para>Name: StepCount</para> /// <para>SF Type: int</para> /// <para>Nillable: True</para> ///</summary> [JsonProperty(PropertyName = "stepCount")] public int? StepCount { get; set; } } }
//----------------------------------------------------------------------------- // Copyright (c) Microsoft Corporation. All rights reserved. //----------------------------------------------------------------------------- namespace System.Activities.Tracking { using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.Runtime; using System.Runtime.Serialization; using System.Collections; using System.Reflection; using System.Runtime.Diagnostics; [Fx.Tag.XamlVisible(false)] [DataContract] public sealed class ActivityStateRecord : TrackingRecord { IDictionary<string, object> variables; IDictionary<string, object> arguments; ActivityInfo activity; string state; static ReadOnlyCollection<string> wildcardCollection = new ReadOnlyCollection<string>(new List<string>(1) { "*" }); internal ActivityStateRecord(Guid instanceId, ActivityInstance instance, ActivityInstanceState state) : this(instanceId, new ActivityInfo(instance), state) { } internal ActivityStateRecord(Guid instanceId, ActivityInfo activity, ActivityInstanceState state) : base(instanceId) { this.Activity = activity; switch (state) { case ActivityInstanceState.Executing: this.State = ActivityStates.Executing; break; case ActivityInstanceState.Closed: this.State = ActivityStates.Closed; break; case ActivityInstanceState.Canceled: this.State = ActivityStates.Canceled; break; case ActivityInstanceState.Faulted: this.State = ActivityStates.Faulted; break; default: throw Fx.AssertAndThrow("Invalid state value"); } } public ActivityStateRecord( Guid instanceId, long recordNumber, ActivityInfo activity, string state) : base(instanceId, recordNumber) { if (activity == null) { throw FxTrace.Exception.ArgumentNull("activity"); } if (string.IsNullOrEmpty(state)) { throw FxTrace.Exception.ArgumentNullOrEmpty("state"); } this.Activity = activity; this.State = state; } ActivityStateRecord(ActivityStateRecord record) : base(record) { this.Activity = record.Activity; this.State = record.State; if (record.variables != null) { if (record.variables == ActivityUtilities.EmptyParameters) { this.variables = ActivityUtilities.EmptyParameters; } else { this.variables = new Dictionary<string, object>(record.variables); } } if (record.arguments != null) { if (record.arguments == ActivityUtilities.EmptyParameters) { this.arguments = ActivityUtilities.EmptyParameters; } else { this.arguments = new Dictionary<string, object>(record.arguments); } } } public ActivityInfo Activity { get { return this.activity; } private set { this.activity = value; } } public string State { get { return this.state; } private set { this.state = value; } } public IDictionary<string, object> Variables { get { if (this.variables == null) { this.variables = GetVariables(wildcardCollection); Fx.Assert(this.variables.IsReadOnly, "only readonly dictionary can be set for variables"); } return this.variables; } internal set { Fx.Assert(value.IsReadOnly, "only readonly dictionary can be set for variables"); this.variables = value; } } public IDictionary<string, object> Arguments { get { if (this.arguments == null) { this.arguments = GetArguments(wildcardCollection); Fx.Assert(this.arguments.IsReadOnly, "only readonly dictionary can be set for arguments"); } return this.arguments; } internal set { Fx.Assert(value.IsReadOnly, "only readonly dictionary can be set for arguments"); this.arguments = value; } } [DataMember(EmitDefaultValue = false, Name = "variables")] internal IDictionary<string, object> SerializedVariables { get { return this.variables; } set { this.variables = value; } } [DataMember(EmitDefaultValue = false, Name = "arguments")] internal IDictionary<string, object> SerializedArguments { get { return this.arguments; } set { this.arguments = value; } } [DataMember(Name = "Activity")] internal ActivityInfo SerializedActivity { get { return this.Activity; } set { this.Activity = value; } } [DataMember(Name = "State")] internal string SerializedState { get { return this.State; } set { this.State = value; } } protected internal override TrackingRecord Clone() { return new ActivityStateRecord(this); } public override string ToString() { return string.Format(CultureInfo.CurrentCulture, "ActivityStateRecord {{ {0}, Activity {{ {1} }}, State = {2} }}", base.ToString(), this.Activity.ToString(), this.State); } internal IDictionary<string, object> GetVariables(ICollection<string> variables) { Dictionary<string, object> trackedVariables = null; // delay allocated through TrackData ActivityInstance currentInstance = this.Activity.Instance; if (currentInstance != null) { Activity currentElement = currentInstance.Activity; Activity startActivity = currentInstance.Activity; bool containsWildcard = variables.Contains("*"); //count defines how many items we can get in this lookup. It represents the maximum number of items that can be extracted, //if * is specified, any other names specified are expected to be variables defined in scope, not in the activity itself. //if a variable name in the activity is specified, the lookup continues through the variables in scope. int count = containsWildcard ? currentElement.RuntimeVariables.Count + variables.Count - 1 : variables.Count; IdSpace activityIdSpace = currentElement.MemberOf; while (currentInstance != null) { //* only extracts variables of the current Activity and not variables in scope. bool useWildCard = containsWildcard && startActivity == currentElement; // we only track public Variables, not ImplementationVariables for (int i = 0; i < currentElement.RuntimeVariables.Count; i++) { Variable variable = currentElement.RuntimeVariables[i]; if (TrackData(variable.Name, variable.Id, currentInstance, variables, useWildCard, ref trackedVariables)) { if (trackedVariables.Count == count) { return new ReadOnlyDictionaryInternal<string, object>(trackedVariables); } } } bool foundNext = false; while (!foundNext) { currentInstance = currentInstance.Parent; if (currentInstance != null) { currentElement = currentInstance.Activity; foundNext = currentElement.MemberOf.Equals(activityIdSpace); } else { // We set foundNext to true to get out of our loop. foundNext = true; } } } } if (trackedVariables == null) { return ActivityUtilities.EmptyParameters; } else { Fx.Assert(trackedVariables.Count > 0, "we should only allocate the dictionary if we're putting data in it"); return new ReadOnlyDictionaryInternal<string, object>(trackedVariables); } } internal IDictionary<string, object> GetArguments(ICollection<string> arguments) { Dictionary<string, object> trackedArguments = null; // delay allocated through TrackData ActivityInstance currentInstance = this.Activity.Instance; if (currentInstance != null) { Activity currentElement = currentInstance.Activity; bool containsWildcard = arguments.Contains("*"); int count = containsWildcard ? currentElement.RuntimeArguments.Count : arguments.Count; bool isActivityStateExecuting = ActivityStates.Executing.Equals(this.State, StringComparison.Ordinal); //look at arguments for this element. for (int i = 0; i < currentElement.RuntimeArguments.Count; i++) { RuntimeArgument argument = currentElement.RuntimeArguments[i]; // OutArguments will always start with default(T), so there is no need to track them when state == Executing if (isActivityStateExecuting && argument.Direction == ArgumentDirection.Out) { continue; } if (TrackData(argument.Name, argument.Id, currentInstance, arguments, containsWildcard, ref trackedArguments)) { if (trackedArguments.Count == count) { break; } } } } if (trackedArguments == null) { return ActivityUtilities.EmptyParameters; } else { Fx.Assert(trackedArguments.Count > 0, "we should only allocate the dictionary if we're putting data in it"); return new ReadOnlyDictionaryInternal<string, object>(trackedArguments); } } bool TrackData(string name, int id, ActivityInstance currentInstance, ICollection<string> data, bool wildcard, ref Dictionary<string, object> trackedData) { if (wildcard || data.Contains(name)) { Location location = currentInstance.Environment.GetSpecificLocation(id); if (location != null) { if (trackedData == null) { trackedData = new Dictionary<string, object>(10); } string dataName = name ?? NameGenerator.Next(); trackedData[dataName] = location.Value; if (TD.TrackingDataExtractedIsEnabled()) { TD.TrackingDataExtracted(dataName, this.Activity.Name); } return true; } } return false; } } }
/* Copyright 2014 David Bordoley Copyright 2014 Zumero, LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Collections.Generic; namespace SQLitePCL.pretty { /// <summary> /// Static methods for opening instances of <see cref="IDatabaseConnection"/> /// and for accessing static SQLite3 properties, and functions. /// </summary> public static class SQLite3 { private static IEnumerator<string> CompilerOptionsEnumerator() { for (int i = 0; ; i++) { var option = raw.sqlite3_compileoption_get(i); if (option != null) { yield return option; } else { break; } } } private static readonly IEnumerable<string> compilerOptions = new DelegatingEnumerable<string>(CompilerOptionsEnumerator); /// <summary> /// The SQLite compiler options that were defined at compile time. /// </summary> /// <seealso href="https://sqlite.org/c3ref/compileoption_get.html"/> public static IEnumerable<string> CompilerOptions { get { return compilerOptions; } } private static readonly SQLiteVersion version = SQLiteVersion.Of(raw.sqlite3_libversion_number()); /// <summary> /// Enables or disables the sharing of the database cache and schema data structures /// between connections to the same database. /// </summary> /// <seealso href="https://www.sqlite.org/c3ref/enable_shared_cache.html"/> public static bool EnableSharedCache { set { int rc = raw.sqlite3_enable_shared_cache(value ? 1 : 0); SQLiteException.CheckOk(rc); } } /// <summary> /// The SQLite version. /// </summary> /// <seealso href="https://sqlite.org/c3ref/libversion.html"/> public static SQLiteVersion Version { get { return version; } } /// <summary> /// The SQLite source id. /// </summary> /// <seealso href="https://sqlite.org/c3ref/libversion.html"/> public static string SourceId { get { return raw.sqlite3_sourceid(); } } /// <summary> /// Returns the number of bytes of memory currently outstanding (malloced but not freed) by SQLite. /// </summary> /// <seealso href="https://sqlite.org/c3ref/memory_highwater.html"/> public static long MemoryUsed { get { return raw.sqlite3_memory_used(); } } /// <summary> /// Returns the maximum value of <see cref="MemoryUsed"/> since the high-water mark was last reset. /// </summary> /// <seealso href="https://sqlite.org/c3ref/memory_highwater.html"/> public static long MemoryHighWater { get { return raw.sqlite3_memory_highwater(0); } } /// <summary> /// Indicates whether the specified option was defined at compile time. /// </summary> /// <param name="option">The SQLite compile option. The SQLITE_ prefix may be omitted.</param> /// <returns><see langword="true"/> if the compile option is use, otherwise <see langword="false"/></returns> /// <seealso href="https://sqlite.org/c3ref/compileoption_get.html"/> public static bool CompileOptionUsed(string option) { Contract.Requires(option != null); return raw.sqlite3_compileoption_used(option) != 0; } /// <summary> /// Opens a SQLite database. /// </summary> /// <param name="filename">The database filename.</param> /// <returns>A <see cref="SQLiteDatabaseConnection"/> instance.</returns> /// <seealso href="https://sqlite.org/c3ref/open.html"/> public static SQLiteDatabaseConnection Open(string filename) { Contract.Requires(filename != null); sqlite3 db; int rc = raw.sqlite3_open(filename, out db); SQLiteException.CheckOk(db, rc); return new SQLiteDatabaseConnection(db); } /// <summary> /// Opens an in memory SQLite database. This is useful for testing. /// </summary> /// <returns>A <see cref="SQLiteDatabaseConnection"/> instance.</returns> /// <seealso href="https://sqlite.org/c3ref/open.html"/> public static SQLiteDatabaseConnection OpenInMemory() => SQLite3.Open(":memory:"); /// <summary> /// Opens a SQLite database. /// </summary> /// <param name="filename">The database filename.</param> /// <param name="flags"><see cref="ConnectionFlags"/> used to defined if the database is readonly, /// read/write and whether a new database file should be created if it does not already exist.</param> /// <param name="vfs"> /// The name of the sqlite3_vfs object that defines the operating system interface /// that the new database connection should use. If <see langword="null"/>, then /// the default sqlite3_vfs object is used.</param> /// <returns>A <see cref="SQLiteDatabaseConnection"/> instance.</returns> /// <seealso href="https://sqlite.org/c3ref/open.html"/> public static SQLiteDatabaseConnection Open(string filename, ConnectionFlags flags, string vfs) { Contract.Requires(filename != null); sqlite3 db; int rc = raw.sqlite3_open_v2(filename, out db, (int)flags, vfs); SQLiteException.CheckOk(rc); return new SQLiteDatabaseConnection(db); } /// <summary> /// Reset the memory high-water mark to the current value of <see cref="MemoryUsed"/>. /// </summary> /// <seealso href="https://sqlite.org/c3ref/memory_highwater.html"/> public static void ResetMemoryHighWater() => raw.sqlite3_memory_highwater(1); /// <summary> /// Determines if the text provided forms a complete SQL statement. /// </summary> /// <param name="sql">The text to evaluate.</param> /// <returns><see langword="true"/> if the text forms a complete SQL /// statement, otherwise <see langword="false"/>.</returns> public static bool IsCompleteStatement(string sql) { Contract.Requires(sql != null); return raw.sqlite3_complete(sql) != 0; } /// <summary> /// Retrieve runtime status information about the /// performance of SQLite, and optionally to reset various highwater marks. /// </summary> /// <seealso href="https://www.sqlite.org/c3ref/status.html"/> /// <param name="statusCode">The specific parameter to measure.</param> /// <param name="current">The current value of the performance metric.</param> /// <param name="highwater">The highwater value of the performance metric.</param> /// <param name="reset">If <see langword="true"/>, then the highest record value is reset.</param> public static void Status(SQLiteStatusCode statusCode, out int current, out int highwater, bool reset) { int rc = raw.sqlite3_status((int)statusCode, out current, out highwater, reset ? 1 : 0); SQLiteException.CheckOk(rc); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; using Microsoft.CodeAnalysis.ExpressionEvaluator; using Microsoft.VisualStudio.Debugger.Clr; using Microsoft.VisualStudio.Debugger.Evaluation; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.CSharp.UnitTests { public class DebuggerDisplayAttributeTests : CSharpResultProviderTestBase { [Fact] public void WithoutExpressionHoles() { var source = @" using System.Diagnostics; class C0 { } [DebuggerDisplay(""Value"")] class C1 { } [DebuggerDisplay(""Value"", Name=""Name"")] class C2 { } [DebuggerDisplay(""Value"", Type=""Type"")] class C3 { } [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] class C4 { } class Wrapper { C0 c0 = new C0(); C1 c1 = new C1(); C2 c2 = new C2(); C3 c3 = new C3(); C4 c4 = new C4(); } "; var assembly = GetAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(GetChildren(FormatResult("w", value)), EvalResult("c0", "{C0}", "C0", "w.c0", DkmEvaluationResultFlags.None), EvalResult("c1", "Value", "C1", "w.c1", DkmEvaluationResultFlags.None), EvalResult("Name", "Value", "C2", "w.c2", DkmEvaluationResultFlags.None), EvalResult("c3", "Value", "Type", "w.c3", DkmEvaluationResultFlags.None), EvalResult("Name", "Value", "Type", "w.c4", DkmEvaluationResultFlags.None)); } [Fact] public void OnlyExpressionHoles() { var source = @" using System.Diagnostics; [DebuggerDisplay(""{value}"", Name=""{name}"", Type=""{type}"")] class C { string name = ""Name""; string value = ""Value""; string type = ""Type""; } class Wrapper { C c = new C(); } "; var assembly = GetAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(GetChildren(FormatResult("c", value)), EvalResult("\"Name\"", "\"Value\"", "\"Type\"", "c.c", DkmEvaluationResultFlags.Expandable)); } [Fact] public void FormatStrings() { var source = @" using System.Diagnostics; [DebuggerDisplay(""<{value}>"", Name=""<{name}>"", Type=""<{type}>"")] class C { string name = ""Name""; string value = ""Value""; string type = ""Type""; } class Wrapper { C c = new C(); } "; var assembly = GetAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(GetChildren(FormatResult("w", value)), EvalResult("<\"Name\">", "<\"Value\">", "<\"Type\">", "w.c", DkmEvaluationResultFlags.Expandable)); } [Fact] public void BindingError() { var source = @" using System.Diagnostics; [DebuggerDisplay(""<{missing}>"")] class C { } "; const string rootExpr = "c"; // Note that this is the full name in all cases - DebuggerDisplayAttribute does not affect it. var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "<Problem evaluating expression>", "C", rootExpr, DkmEvaluationResultFlags.None)); // Message inlined without quotation marks. } [Fact] public void RecursiveDebuggerDisplay() { var source = @" using System.Diagnostics; [DebuggerDisplay(""{value}"")] class C { C value; C() { this.value = this; } } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); // No stack overflow, since attribute on computed value is ignored. Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "{C}", "C", rootExpr, DkmEvaluationResultFlags.Expandable)); } [Fact] public void MultipleAttributes() { var source = @" using System.Diagnostics; [DebuggerDisplay(""V1"")] [DebuggerDisplay(""V2"")] class C { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); // First attribute wins, as in dev12. Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "V1", "C", rootExpr)); } [Fact] public void NullValues() { var source = @" using System.Diagnostics; [DebuggerDisplay(null, Name=null, Type=null)] class C { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "{C}", "C", rootExpr)); } [Fact] public void EmptyStringValues() { var source = @" using System.Diagnostics; [DebuggerDisplay("""", Name="""", Type="""")] class C { } class Wrapper { C c = new C(); } "; var assembly = GetAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(GetChildren(FormatResult("w", value)), EvalResult("", "", "", "w.c")); } [Fact] public void ConstructedGenericType() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Name"")] class C<T> { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C`1").MakeGenericType(typeof(int)); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); Verify(FormatResult(rootExpr, value), EvalResult("c", "Name", "C<int>", rootExpr)); } [Fact] public void MemberExpansion() { var source = @" using System.Diagnostics; interface I { D P { get; } } class C : I { D I.P { get { return new D(); } } D Q { get { return new D(); } } } [DebuggerDisplay(""Value"", Name=""Name"")] class D { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var root = FormatResult(rootExpr, value); Verify(root, EvalResult(rootExpr, "{C}", "C", rootExpr, DkmEvaluationResultFlags.Expandable)); Verify(GetChildren(root), EvalResult("Name", "Value", "D", "((I)c).P", DkmEvaluationResultFlags.ReadOnly), // Not "I.Name". EvalResult("Name", "Value", "D", "c.Q", DkmEvaluationResultFlags.ReadOnly)); } [Fact] public void PointerDereferenceExpansion_Null() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] unsafe struct Display { Display* DisplayPointer; NoDisplay* NoDisplayPointer; } unsafe struct NoDisplay { Display* DisplayPointer; NoDisplay* NoDisplayPointer; } class Wrapper { Display display = new Display(); } "; var assembly = GetUnsafeAssembly(source); var type = assembly.GetType("Wrapper"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var root = FormatResult("wrapper", value); Verify(DepthFirstSearch(GetChildren(root).Single(), maxDepth: 3), EvalResult("Name", "Value", "Type", "wrapper.display", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayPointer", PointerToString(IntPtr.Zero), "Display*", "wrapper.display.DisplayPointer"), EvalResult("NoDisplayPointer", PointerToString(IntPtr.Zero), "NoDisplay*", "wrapper.display.NoDisplayPointer")); } [Fact] public void PointerDereferenceExpansion_NonNull() { var source = @" using System; using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] unsafe struct Display { public Display* DisplayPointer; public NoDisplay* NoDisplayPointer; } unsafe struct NoDisplay { public Display* DisplayPointer; public NoDisplay* NoDisplayPointer; } unsafe class C { Display* DisplayPointer; NoDisplay* NoDisplayPointer; public C(IntPtr d, IntPtr nd) { this.DisplayPointer = (Display*)d; this.NoDisplayPointer = (NoDisplay*)nd; this.DisplayPointer->DisplayPointer = this.DisplayPointer; this.DisplayPointer->NoDisplayPointer = this.NoDisplayPointer; this.NoDisplayPointer->DisplayPointer = this.DisplayPointer; this.NoDisplayPointer->NoDisplayPointer = this.NoDisplayPointer; } } "; var assembly = GetUnsafeAssembly(source); unsafe { var displayType = assembly.GetType("Display"); var displayInstance = displayType.Instantiate(); var displayHandle = GCHandle.Alloc(displayInstance, GCHandleType.Pinned); var displayPtr = displayHandle.AddrOfPinnedObject(); var noDisplayType = assembly.GetType("NoDisplay"); var noDisplayInstance = noDisplayType.Instantiate(); var noDisplayHandle = GCHandle.Alloc(noDisplayInstance, GCHandleType.Pinned); var noDisplayPtr = noDisplayHandle.AddrOfPinnedObject(); var testType = assembly.GetType("C"); var testInstance = ReflectionUtilities.Instantiate(testType, displayPtr, noDisplayPtr); var testValue = CreateDkmClrValue(testInstance, testType, evalFlags: DkmEvaluationResultFlags.None); var displayPtrString = PointerToString(displayPtr); var noDisplayPtrString = PointerToString(noDisplayPtr); Verify(DepthFirstSearch(FormatResult("c", testValue), maxDepth: 3), EvalResult("c", "{C}", "C", "c", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayPointer", displayPtrString, "Display*", "c.DisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "*c.DisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayPointer", displayPtrString, "Display*", "(*c.DisplayPointer).DisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayPointer", noDisplayPtrString, "NoDisplay*", "(*c.DisplayPointer).NoDisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayPointer", noDisplayPtrString, "NoDisplay*", "c.NoDisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("*c.NoDisplayPointer", "{NoDisplay}", "NoDisplay", "*c.NoDisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayPointer", displayPtrString, "Display*", "(*c.NoDisplayPointer).DisplayPointer", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayPointer", noDisplayPtrString, "NoDisplay*", "(*c.NoDisplayPointer).NoDisplayPointer", DkmEvaluationResultFlags.Expandable)); displayHandle.Free(); noDisplayHandle.Free(); } } [Fact] public void ArrayExpansion() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] struct Display { public Display[] DisplayArray; public NoDisplay[] NoDisplayArray; } struct NoDisplay { public Display[] DisplayArray; public NoDisplay[] NoDisplayArray; } class C { public Display[] DisplayArray; public NoDisplay[] NoDisplayArray; public C() { this.DisplayArray = new[] { new Display() }; this.NoDisplayArray = new[] { new NoDisplay() }; this.DisplayArray[0].DisplayArray = this.DisplayArray; this.DisplayArray[0].NoDisplayArray = this.NoDisplayArray; this.NoDisplayArray[0].DisplayArray = this.DisplayArray; this.NoDisplayArray[0].NoDisplayArray = this.NoDisplayArray; } } "; var assembly = GetUnsafeAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var root = FormatResult("c", value); Verify(DepthFirstSearch(root, maxDepth: 4), EvalResult("c", "{C}", "C", "c", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayArray", "{Display[1]}", "Display[]", "c.DisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "c.DisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayArray", "{Display[1]}", "Display[]", "c.DisplayArray[0].DisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "c.DisplayArray[0].DisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayArray", "{NoDisplay[1]}", "NoDisplay[]", "c.DisplayArray[0].NoDisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("[0]", "{NoDisplay}", "NoDisplay", "c.DisplayArray[0].NoDisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayArray", "{NoDisplay[1]}", "NoDisplay[]", "c.NoDisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("[0]", "{NoDisplay}", "NoDisplay", "c.NoDisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("DisplayArray", "{Display[1]}", "Display[]", "c.NoDisplayArray[0].DisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "c.NoDisplayArray[0].DisplayArray[0]", DkmEvaluationResultFlags.Expandable), EvalResult("NoDisplayArray", "{NoDisplay[1]}", "NoDisplay[]", "c.NoDisplayArray[0].NoDisplayArray", DkmEvaluationResultFlags.Expandable), EvalResult("[0]", "{NoDisplay}", "NoDisplay", "c.NoDisplayArray[0].NoDisplayArray[0]", DkmEvaluationResultFlags.Expandable)); } [Fact] public void DebuggerTypeProxyExpansion() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] public struct Display { } public struct NoDisplay { } [DebuggerTypeProxy(typeof(P))] public class C { public Display DisplayC = new Display(); public NoDisplay NoDisplayC = new NoDisplay(); } public class P { public Display DisplayP = new Display(); public NoDisplay NoDisplayP = new NoDisplay(); public P(C c) { } } "; var assembly = GetUnsafeAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var root = FormatResult("c", value); Verify(DepthFirstSearch(root, maxDepth: 4), EvalResult("c", "{C}", "C", "c", DkmEvaluationResultFlags.Expandable), EvalResult("Name", "Value", "Type", "new P(c).DisplayP"), EvalResult("NoDisplayP", "{NoDisplay}", "NoDisplay", "new P(c).NoDisplayP"), EvalResult("Raw View", null, "", "c, raw", DkmEvaluationResultFlags.Expandable | DkmEvaluationResultFlags.ReadOnly, DkmEvaluationResultCategory.Data), EvalResult("Name", "Value", "Type", "c.DisplayC"), EvalResult("NoDisplayC", "{NoDisplay}", "NoDisplay", "c.NoDisplayC")); } [Fact] public void NullInstance() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Hello"")] class C { } "; const string rootExpr = "c"; var assembly = GetAssembly(source); var type = assembly.GetType("C"); var value = CreateDkmClrValue(null, type, evalFlags: DkmEvaluationResultFlags.None); Verify(FormatResult(rootExpr, value), EvalResult(rootExpr, "null", "C", rootExpr)); } [Fact] public void NonGenericDisplayAttributeOnGenericBase() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Type={GetType()}"")] class A<T> { } class B : A<int> { } "; var assembly = GetAssembly(source); var type = assembly.GetType("B"); var value = CreateDkmClrValue(type.Instantiate(), type, evalFlags: DkmEvaluationResultFlags.None); var result = FormatResult("b", value); Verify(result, EvalResult("b", "Type={B}", "B", "b", DkmEvaluationResultFlags.None)); } [WorkItem(1016895)] [Fact] public void RootVersusInternal() { var source = @" using System.Diagnostics; [DebuggerDisplay(""Value"", Name = ""Name"")] class A { } class B { A a; public B(A a) { this.a = a; } } "; var assembly = GetAssembly(source); var typeA = assembly.GetType("A"); var typeB = assembly.GetType("B"); var instanceA = typeA.Instantiate(); var instanceB = typeB.Instantiate(instanceA); var result = FormatResult("a", CreateDkmClrValue(instanceA)); Verify(result, EvalResult("a", "Value", "A", "a", DkmEvaluationResultFlags.None)); result = FormatResult("b", CreateDkmClrValue(instanceB)); Verify(GetChildren(result), EvalResult("Name", "Value", "A", "b.a", DkmEvaluationResultFlags.None)); } [Fact] public void Error() { var source = @"using System.Diagnostics; [DebuggerDisplay(""Value"", Name=""Name"", Type=""Type"")] class A { } class B { bool f; internal A P { get { return new A(); } } internal A Q { get { while(f) { } return new A(); } } }"; DkmClrRuntimeInstance runtime = null; GetMemberValueDelegate getMemberValue = (v, m) => (m == "Q") ? CreateErrorValue(runtime.GetType("A"), "Function evaluation timed out") : null; runtime = new DkmClrRuntimeInstance(ReflectionUtilities.GetMscorlibAndSystemCore(GetAssembly(source)), getMemberValue: getMemberValue); using (runtime.Load()) { var type = runtime.GetType("B"); var value = CreateDkmClrValue(type.Instantiate(), type: type); var evalResult = FormatResult("o", value); var children = GetChildren(evalResult); Verify(children, EvalResult("Name", "Value", "Type", "o.P", DkmEvaluationResultFlags.ReadOnly), EvalFailedResult("Q", "Function evaluation timed out", "A", "o.Q"), EvalResult("f", "false", "bool", "o.f", DkmEvaluationResultFlags.Boolean)); } } private IReadOnlyList<DkmEvaluationResult> DepthFirstSearch(DkmEvaluationResult root, int maxDepth) { var builder = ArrayBuilder<DkmEvaluationResult>.GetInstance(); DepthFirstSearchInternal(builder, root, 0, maxDepth); return builder.ToImmutableAndFree(); } private void DepthFirstSearchInternal(ArrayBuilder<DkmEvaluationResult> builder, DkmEvaluationResult curr, int depth, int maxDepth) { Assert.InRange(depth, 0, maxDepth); builder.Add(curr); var childDepth = depth + 1; if (childDepth <= maxDepth) { foreach (var child in GetChildren(curr)) { DepthFirstSearchInternal(builder, child, childDepth, maxDepth); } } } } }
using System; using System.Collections.Generic; using System.Linq; using NUnit.Framework; using Mono.Cecil.Rocks; namespace N { /// <summary> /// ID string generated is "T:N.X". /// </summary> public class X : IX<KVP<string, int>> { /// <summary> /// ID string generated is "M:N.X.#ctor". /// </summary> public X() { } /// <summary> /// ID string generated is "M:N.X.#ctor(System.Int32)". /// </summary> /// <param name="i">Describe parameter.</param> public X(int i) { } /// <summary> /// ID string generated is "F:N.X.q". /// </summary> public string q; /// <summary> /// ID string generated is "F:N.X.PI". /// </summary> public const double PI = 3.14; /// <summary> /// ID string generated is "M:N.X.f". /// </summary> public int f() { return 1; } /// <summary> /// ID string generated is "M:N.X.bb(System.String,System.Int32@)". /// </summary> public int bb(string s, ref int y) { return 1; } /// <summary> /// ID string generated is "M:N.X.gg(System.Int16[],System.Int32[0:,0:])". /// </summary> public int gg(short[] array1, int[,] array) { return 0; } /// <summary> /// ID string generated is "M:N.X.op_Addition(N.X,N.X)". /// </summary> public static X operator +(X x, X xx) { return x; } /// <summary> /// ID string generated is "P:N.X.prop". /// </summary> public int prop { get { return 1; } set { } } /// <summary> /// ID string generated is "E:N.X.d". /// </summary> #pragma warning disable 67 public event D d; #pragma warning restore 67 /// <summary> /// ID string generated is "P:N.X.Item(System.String)". /// </summary> public int this[string s] { get { return 1; } } /// <summary> /// ID string generated is "T:N.X.Nested". /// </summary> public class Nested { } /// <summary> /// ID string generated is "T:N.X.D". /// </summary> public delegate void D(int i); /// <summary> /// ID string generated is "M:N.X.op_Explicit(N.X)~System.Int32". /// </summary> public static explicit operator int(X x) { return 1; } public static void Linq (IEnumerable<string> enumerable, Func<string> selector) { } /// <summary> /// ID string generated is "M:N.X.N#IX{N#KVP{System#String,System#Int32}}#IXA(N.KVP{System.String,System.Int32})" /// </summary> void IX<KVP<string, int>>.IXA (KVP<string, int> k) { } } public interface IX<K> { void IXA (K k); } public class KVP<K, T> { } } namespace Mono.Cecil.Tests { [TestFixture] public class DocCommentIdTests { [Test] public void TypeDef () { AssertDocumentID ("T:N.X", GetTestType ()); } [Test] public void ParameterlessCtor () { var type = GetTestType (); var ctor = type.GetConstructors ().Single (m => m.Parameters.Count == 0); AssertDocumentID ("M:N.X.#ctor", ctor); } [Test] public void CtorWithParameters () { var type = GetTestType (); var ctor = type.GetConstructors ().Single (m => m.Parameters.Count == 1); AssertDocumentID ("M:N.X.#ctor(System.Int32)", ctor); } [Test] public void Field () { var type = GetTestType (); var field = type.Fields.Single (m => m.Name == "q"); AssertDocumentID ("F:N.X.q", field); } [Test] public void ConstField () { var type = GetTestType (); var field = type.Fields.Single (m => m.Name == "PI"); AssertDocumentID ("F:N.X.PI", field); } [Test] public void ParameterlessMethod () { var type = GetTestType (); var method = type.Methods.Single (m => m.Name == "f"); AssertDocumentID ("M:N.X.f", method); } [Test] public void MethodWithByRefParameters () { var type = GetTestType (); var method = type.Methods.Single (m => m.Name == "bb"); AssertDocumentID ("M:N.X.bb(System.String,System.Int32@)", method); } [Test] public void MethodWithArrayParameters () { var type = GetTestType (); var method = type.Methods.Single (m => m.Name == "gg"); AssertDocumentID ("M:N.X.gg(System.Int16[],System.Int32[0:,0:])", method); } [Test] public void OpAddition () { var type = GetTestType (); var op = type.Methods.Single (m => m.Name == "op_Addition"); AssertDocumentID ("M:N.X.op_Addition(N.X,N.X)", op); } [Test] public void OpExplicit () { var type = GetTestType (); var op = type.Methods.Single (m => m.Name == "op_Explicit"); AssertDocumentID ("M:N.X.op_Explicit(N.X)~System.Int32", op); } [Test] public void Property () { var type = GetTestType (); var property = type.Properties.Single (p => p.Name == "prop"); AssertDocumentID ("P:N.X.prop", property); } [Test] public void Indexer () { var type = GetTestType (); var indexer = type.Properties.Single (p => p.Name == "Item"); AssertDocumentID ("P:N.X.Item(System.String)", indexer); } [Test] public void Event () { var type = GetTestType (); var @event = type.Events.Single (e => e.Name == "d"); AssertDocumentID ("E:N.X.d", @event); } [Test] public void Delegate () { var type = GetTestType (); var @delegate = type.NestedTypes.Single (t => t.Name == "D"); AssertDocumentID ("T:N.X.D", @delegate); } [Test] public void NestedType () { var type = GetTestType (); var nestedType = type.NestedTypes.Single (t => t.Name == "Nested"); AssertDocumentID ("T:N.X.Nested", nestedType); } [Test] public void Linq () { var type = GetTestType (); var method = type.GetMethod ("Linq"); AssertDocumentID ("M:N.X.Linq(System.Collections.Generic.IEnumerable{System.String},System.Func{System.String})", method); } [Test] public void EII () { var type = GetTestType (); var method = type.Methods.Where (m => m.Name.Contains("IXA")).First (); AssertDocumentID ("M:N.X.N#IX{N#KVP{System#String,System#Int32}}#IXA(N.KVP{System.String,System.Int32})", method); } TypeDefinition GetTestType () { return typeof (N.X).ToDefinition (); } static void AssertDocumentID (string docId, IMemberDefinition member) { Assert.AreEqual (docId, DocCommentId.GetDocCommentId (member)); } } }
// <copyright file="PausableFtpService.cs" company="Fubar Development Junker"> // Copyright (c) Fubar Development Junker. All rights reserved. // </copyright> using System; using System.IO; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; namespace FubarDev.FtpServer.Networking { /// <summary> /// Base class for communication services. /// </summary> public abstract class PausableFtpService : IPausableFtpService { private readonly CancellationTokenSource _jobStopped = new CancellationTokenSource(); private readonly CancellationToken _connectionClosed; private CancellationTokenSource _jobPaused = new CancellationTokenSource(); private Task _task = Task.CompletedTask; private volatile FtpServiceStatus _status = FtpServiceStatus.ReadyToRun; /// <summary> /// Initializes a new instance of the <see cref="PausableFtpService"/> class. /// </summary> /// <param name="logger">The logger.</param> /// <param name="connectionClosed">Cancellation token source for a closed connection.</param> protected PausableFtpService( CancellationToken connectionClosed, ILogger? logger = null) { Logger = logger; _connectionClosed = connectionClosed; } /// <inheritdoc /> public FtpServiceStatus Status { get => _status; private set => _status = value; } protected ILogger? Logger { get; } protected bool IsConnectionClosed => _connectionClosed.IsCancellationRequested; protected bool IsStopRequested => _jobStopped.IsCancellationRequested; protected bool IsPauseRequested => _jobPaused.IsCancellationRequested; /// <summary> /// Gets a value indicating whether the service is running. /// </summary> protected bool IsRunning { get { var status = Status; return status == FtpServiceStatus.Running; } } /// <inheritdoc /> public virtual async Task StartAsync(CancellationToken cancellationToken) { if (Status != FtpServiceStatus.ReadyToRun) { throw new InvalidOperationException($"Status must be {FtpServiceStatus.ReadyToRun}, but was {Status}."); } using var semaphore = new SemaphoreSlim(0, 1); _jobPaused = new CancellationTokenSource(); _task = RunAsync( new Progress<FtpServiceStatus>( status => { Status = status; if (status == FtpServiceStatus.Running) { // ReSharper disable once AccessToDisposedClosure semaphore.Release(); } })); await semaphore.WaitAsync(cancellationToken); } /// <inheritdoc /> public virtual async Task StopAsync(CancellationToken cancellationToken) { var wasRunning = IsRunning; await OnStopRequestingAsync(cancellationToken) .ConfigureAwait(false); _jobStopped.Cancel(); await OnStopRequestedAsync(cancellationToken) .ConfigureAwait(false); await _task .ConfigureAwait(false); Status = FtpServiceStatus.Stopped; if (!wasRunning || IsPauseRequested) { await OnStoppedAsync(cancellationToken) .ConfigureAwait(false); } } /// <inheritdoc /> public virtual async Task PauseAsync(CancellationToken cancellationToken) { if (Status == FtpServiceStatus.Paused) { return; } if (!IsRunning) { throw new InvalidOperationException($"Status must be {FtpServiceStatus.Running}, but was {Status}."); } await OnPauseRequestingAsync(cancellationToken) .ConfigureAwait(false); _jobPaused.Cancel(); await OnPauseRequestedAsync(cancellationToken) .ConfigureAwait(false); await _task .ConfigureAwait(false); Status = FtpServiceStatus.Paused; if (IsStopRequested) { await OnPausedAsync(cancellationToken) .ConfigureAwait(false); } } /// <inheritdoc /> public virtual async Task ContinueAsync(CancellationToken cancellationToken) { if (Status == FtpServiceStatus.Stopped) { // Stay stopped! return; } if (Status == FtpServiceStatus.Running) { // Already running! return; } if (Status != FtpServiceStatus.Paused) { throw new InvalidOperationException($"Status must be {FtpServiceStatus.Paused}, but was {Status}."); } _jobPaused = new CancellationTokenSource(); await OnContinueRequestingAsync(cancellationToken) .ConfigureAwait(false); using var semaphore = new SemaphoreSlim(0, 1); _task = RunAsync(new Progress<FtpServiceStatus>(status => { Status = status; if (status == FtpServiceStatus.Running) { // ReSharper disable once AccessToDisposedClosure semaphore.Release(); } })); await semaphore.WaitAsync(cancellationToken) .ConfigureAwait(false); await OnContinuedAsync(cancellationToken) .ConfigureAwait(false); } protected abstract Task ExecuteAsync( CancellationToken cancellationToken); protected virtual Task OnStopRequestingAsync( CancellationToken cancellationToken) { Logger?.LogTrace("STOP requesting"); return Task.CompletedTask; } protected virtual Task OnStopRequestedAsync( CancellationToken cancellationToken) { Logger?.LogTrace("STOP requested"); return Task.CompletedTask; } protected virtual Task OnPauseRequestingAsync( CancellationToken cancellationToken) { Logger?.LogTrace("PAUSE requesting"); return Task.CompletedTask; } protected virtual Task OnPauseRequestedAsync( CancellationToken cancellationToken) { Logger?.LogTrace("PAUSE requested"); return Task.CompletedTask; } protected virtual Task OnContinueRequestingAsync( CancellationToken cancellationToken) { Logger?.LogTrace("CONTINUE requesting"); return Task.CompletedTask; } protected virtual Task OnPausedAsync( CancellationToken cancellationToken) { Logger?.LogTrace("PAUSED"); return Task.CompletedTask; } protected virtual Task OnStoppedAsync( CancellationToken cancellationToken) { Logger?.LogTrace("STOPPED"); return Task.CompletedTask; } protected virtual Task OnContinuedAsync( CancellationToken cancellationToken) { Logger?.LogTrace("CONTINUED"); return Task.CompletedTask; } protected virtual Task<bool> OnFailedAsync( Exception exception, CancellationToken cancellationToken) { Logger?.LogCritical(exception, "{ErrorMessage}", exception.Message); return Task.FromResult(false); } private async Task RunAsync( IProgress<FtpServiceStatus> statusProgress) { using (var globalCts = CancellationTokenSource.CreateLinkedTokenSource( _connectionClosed, _jobStopped.Token, _jobPaused.Token)) { statusProgress.Report(FtpServiceStatus.Running); try { await ExecuteAsync(globalCts.Token) .ConfigureAwait(false); } catch (Exception ex) when (ex.Is<OperationCanceledException>()) { // Ignore - everything is fine // Logger?.LogTrace("Operation cancelled"); } catch (Exception ex) when (ex.Is<IOException>()) { // Ignore - everything is fine // Logger?.LogTrace(0, ex, "I/O exception: {message}", ex.Message); } catch (Exception ex) { Logger?.LogTrace(0, ex, "Failed: {message}", ex.Message); var isHandled = await OnFailedAsync(ex, _connectionClosed) .ConfigureAwait(false); if (!isHandled) { throw; } } } // Don't call Complete() when the job was just paused. if (IsPauseRequested) { statusProgress.Report(FtpServiceStatus.Paused); await OnPausedAsync(_connectionClosed) .ConfigureAwait(false); return; } // Change the status statusProgress.Report(FtpServiceStatus.Stopped); await OnStoppedAsync(CancellationToken.None) .ConfigureAwait(false); } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System; using Microsoft.TeamFoundation.TestClient.PublishTestResults; using Microsoft.TeamFoundation.TestManagement.WebApi; using Microsoft.VisualStudio.Services.WebApi; using Microsoft.TeamFoundation.Core.WebApi; using System.Linq; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Newtonsoft.Json; using Newtonsoft.Json.Serialization; namespace Microsoft.VisualStudio.Services.Agent.Worker.TestResults.Utils { internal static class TestResultUtils { public static void StoreTestRunSummaryInEnvVar(IExecutionContext executionContext, TestRunSummary testRunSummary, string testRunner, string name, string description="") { try { string metadata = GetEvidenceStoreMetadata(executionContext, testRunSummary, testRunner, name, description); string taskVariableName = "METADATA_" + Guid.NewGuid().ToString(); // This variable will be read by the PublishPipelineMetadatTask and publish to Evidence store. executionContext.SetVariable(taskVariableName, metadata); executionContext.Debug($"Setting task variable {taskVariableName}: {metadata} "); } catch (Exception ex) { executionContext.Debug($"Unable to set the METADATA_* env variable, error details: {ex}"); } } private static string GetEvidenceStoreMetadata(IExecutionContext executionContext, TestRunSummary testRunSummary, string testRunner, string name, string description) { string evidenceStoreMetadataString = string.Empty; try { // Need these settings for converting the property name to camelCase, that's what honored in the tasks. var camelCaseJsonSerializerSettings = new JsonSerializerSettings { ContractResolver = new CamelCasePropertyNamesContractResolver() }; TestAttestation testAttestation = new TestAttestation(name, testRunner, testRunSummary); TestMetadata testMetadata = new TestMetadata() { Description = description, HumanReadableName = "Test Results from Publish Test Results utility" }; string pipelinesUrl = GetPipelinesUrl(executionContext); if (!string.IsNullOrEmpty(pipelinesUrl)) { var relatedUrls = new[] { new RelatedUrl() { Label="pipeline-url", Url=pipelinesUrl} }; testMetadata.RelatedUrls = relatedUrls; testAttestation.RelatedUrls = relatedUrls; } testMetadata.SerializedPayload = JsonConvert.SerializeObject(testAttestation, camelCaseJsonSerializerSettings); EvidenceStoreMetadata evidenceStoreMetadata = new EvidenceStoreMetadata() { Name = Guid.NewGuid().ToString(), ResourceUris = GetResourceUris(executionContext), Metadata = testMetadata }; evidenceStoreMetadataString = JsonConvert.SerializeObject(evidenceStoreMetadata, camelCaseJsonSerializerSettings); } catch (Exception ex) { executionContext.Debug($"Unable to construct evidence store metadata, error details: {ex}"); } return evidenceStoreMetadataString; } private static string[] GetResourceUris(IExecutionContext executionContext) { string[] resourceUris = {}; try { var resourceUrisEnvVar = executionContext.GetVariableValueOrDefault("RESOURCE_URIS"); executionContext.Debug("RESOURCE_URIS:" + resourceUrisEnvVar); if (!string.IsNullOrEmpty(resourceUrisEnvVar)) { resourceUris = resourceUrisEnvVar.Split(','); } } catch (Exception ex) { executionContext.Debug($"RESOURCE_URIS is not set or unable to get the variable, error details: {ex}"); } return resourceUris; } private static string GetPipelinesUrl(IExecutionContext executionContext) { try { string hostType = executionContext.Variables.System_HostType.ToString(); if (string.IsNullOrEmpty(hostType)) { return string.Empty; } bool isBuild = string.Equals(hostType, "build", StringComparison.OrdinalIgnoreCase); string pipeLineId = isBuild ? executionContext.Variables.Build_BuildId.Value.ToString() : executionContext.Variables.Release_ReleaseId; if(string.IsNullOrEmpty(pipeLineId)) { return string.Empty; } string baseUri = executionContext.Variables.System_TFCollectionUrl; string project = executionContext.Variables.System_TeamProject; if(string.IsNullOrEmpty(baseUri) || string.IsNullOrEmpty(project)) { return string.Empty; } string pipelineUri; if(isBuild) { pipelineUri = $"{baseUri.TrimEnd('/')}/{project}/_build/results?buildId={pipeLineId}"; } else { pipelineUri = $"{baseUri.TrimEnd('/')}/{project}/_releaseProgress?releaseId={pipeLineId}"; } return pipelineUri; } catch (Exception ex) { executionContext.Debug($"Unable to get pipelines url, error details: {ex}"); } return string.Empty; } } internal class TestRunSummary { public TestRunSummary() { Total = 0; Failed = 0; Passed = 0; Skipped = 0; } public int Total; public int Failed; public int Passed; public int Skipped; } internal class TestAttestation { public string TestId; public string TestTool; public TestRunSummary TestResultAttestation; public double TestDurationSeconds; public string TestPassPercentage; public RelatedUrl[] RelatedUrls; public TestAttestation(string testId, string testTool, TestRunSummary testRunSummary) { this.TestId = testId; this.TestTool = testTool; this.TestResultAttestation = testRunSummary; this.TestPassPercentage = (testRunSummary.Total > 0 && testRunSummary.Total - testRunSummary.Skipped > 0 ? ((double)testRunSummary.Passed/(testRunSummary.Total-testRunSummary.Skipped)) * 100 : 0).ToString(); // Will populate this in separate PR. As it required change in logic at client side. this.TestDurationSeconds = 0.0; } } internal class RelatedUrl { public string Url; public string Label; } internal class TestMetadata { public string Description; public RelatedUrl[] RelatedUrls; public string HumanReadableName; public string SerializedPayload; } internal class EvidenceStoreMetadata { public string Name; public string[] ResourceUris; public TestMetadata Metadata; } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure; using Microsoft.WindowsAzure.Management.Network.Models; namespace Microsoft.WindowsAzure.Management.Network { /// <summary> /// The Network Management API includes operations for managing the routes /// for your subscription. /// </summary> public partial interface IRouteOperations { /// <summary> /// Abort routetable migration api validates and aborts the given /// routetable for IaaS Classic to ARM migration. /// </summary> /// <param name='routeTableName'> /// Name of the routetable to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> Task<OperationStatusResponse> AbortMigrationAsync(string routeTableName, CancellationToken cancellationToken); /// <summary> /// Set the specified route table for the provided subnet in the /// provided virtual network in this subscription. /// </summary> /// <param name='vnetName'> /// The name of the virtual network that contains the provided subnet. /// </param> /// <param name='subnetName'> /// The name of the subnet that the route table will be added to. /// </param> /// <param name='parameters'> /// The parameters necessary to add a route table to the provided /// subnet. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> Task<OperationStatusResponse> AddRouteTableToSubnetAsync(string vnetName, string subnetName, AddRouteTableToSubnetParameters parameters, CancellationToken cancellationToken); /// <summary> /// Abort routetable migration api validates and aborts the given /// routetable for IaaS Classic to ARM migration. /// </summary> /// <param name='routeTableName'> /// Name of the routetable to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginAbortMigrationAsync(string routeTableName, CancellationToken cancellationToken); /// <summary> /// Set the specified route table for the provided subnet in the /// provided virtual network in this subscription. /// </summary> /// <param name='vnetName'> /// The name of the virtual network that contains the provided subnet. /// </param> /// <param name='subnetName'> /// The name of the subnet that the route table will be added to. /// </param> /// <param name='parameters'> /// The parameters necessary to add a route table to the provided /// subnet. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginAddRouteTableToSubnetAsync(string vnetName, string subnetName, AddRouteTableToSubnetParameters parameters, CancellationToken cancellationToken); /// <summary> /// Commit routetable migration api validates and commits the given /// routetable for IaaS Classic to ARM migration. /// </summary> /// <param name='routeTableName'> /// Name of the routetable to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginCommitMigrationAsync(string routeTableName, CancellationToken cancellationToken); /// <summary> /// Create the specified route table for this subscription. /// </summary> /// <param name='parameters'> /// The parameters necessary to create a new route table. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginCreateRouteTableAsync(CreateRouteTableParameters parameters, CancellationToken cancellationToken); /// <summary> /// Set the specified route for the provided table in this subscription. /// </summary> /// <param name='routeTableName'> /// The name of the route table where the provided route will be set. /// </param> /// <param name='routeName'> /// The name of the route that will be set on the provided route table. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginDeleteRouteAsync(string routeTableName, string routeName, CancellationToken cancellationToken); /// <summary> /// Delete the specified route table for this subscription. /// </summary> /// <param name='routeTableName'> /// The name of the route table to delete. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginDeleteRouteTableAsync(string routeTableName, CancellationToken cancellationToken); /// <summary> /// Prepare routetable migration api validates and prepares the given /// routetable for IaaS Classic to ARM migration. /// </summary> /// <param name='routeTableName'> /// Name of the routetable to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginPrepareMigrationAsync(string routeTableName, CancellationToken cancellationToken); /// <summary> /// Remove the route table from the provided subnet in the provided /// virtual network in this subscription. /// </summary> /// <param name='vnetName'> /// The name of the virtual network that contains the provided subnet. /// </param> /// <param name='subnetName'> /// The name of the subnet that the route table will be removed from. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginRemoveRouteTableFromSubnetAsync(string vnetName, string subnetName, CancellationToken cancellationToken); /// <summary> /// Set the specified route for the provided table in this subscription. /// </summary> /// <param name='routeTableName'> /// The name of the route table where the provided route will be set. /// </param> /// <param name='routeName'> /// The name of the route that will be set on the provided route table. /// </param> /// <param name='parameters'> /// The parameters necessary to create a new route table. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginSetRouteAsync(string routeTableName, string routeName, SetRouteParameters parameters, CancellationToken cancellationToken); /// <summary> /// Commit routetable migration api validates and commits the given /// routetable for IaaS Classic to ARM migration. /// </summary> /// <param name='routeTableName'> /// Name of the routetable to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> Task<OperationStatusResponse> CommitMigrationAsync(string routeTableName, CancellationToken cancellationToken); /// <summary> /// Create the specified route table for this subscription. /// </summary> /// <param name='parameters'> /// The parameters necessary to create a new route table. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> Task<OperationStatusResponse> CreateRouteTableAsync(CreateRouteTableParameters parameters, CancellationToken cancellationToken); /// <summary> /// Set the specified route for the provided table in this subscription. /// </summary> /// <param name='routeTableName'> /// The name of the route table where the provided route will be set. /// </param> /// <param name='routeName'> /// The name of the route that will be set on the provided route table. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> Task<OperationStatusResponse> DeleteRouteAsync(string routeTableName, string routeName, CancellationToken cancellationToken); /// <summary> /// Delete the specified route table for this subscription. /// </summary> /// <param name='routeTableName'> /// The name of the route table to delete. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> Task<OperationStatusResponse> DeleteRouteTableAsync(string routeTableName, CancellationToken cancellationToken); /// <summary> /// Get the effective route table for the provided network interface in /// this subscription. /// </summary> /// <param name='serviceName'> /// The name of the cloud service. /// </param> /// <param name='deploymentName'> /// The name of the deployment. /// </param> /// <param name='roleinstanceName'> /// The name of the role instance. /// </param> /// <param name='networkInterfaceName'> /// The name of the network interface. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<GetEffectiveRouteTableResponse> GetEffectiveRouteTableForNetworkInterfaceAsync(string serviceName, string deploymentName, string roleinstanceName, string networkInterfaceName, CancellationToken cancellationToken); /// <summary> /// Get the effective route table for the provided role instance in /// this subscription. /// </summary> /// <param name='serviceName'> /// The name of the cloud service. /// </param> /// <param name='deploymentName'> /// The name of the deployment. /// </param> /// <param name='roleinstanceName'> /// The name of the role instance. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<GetEffectiveRouteTableResponse> GetEffectiveRouteTableForRoleInstanceAsync(string serviceName, string deploymentName, string roleinstanceName, CancellationToken cancellationToken); /// <summary> /// Get the specified route table for this subscription. /// </summary> /// <param name='routeTableName'> /// The name of the route table in this subscription to retrieve. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<GetRouteTableResponse> GetRouteTableAsync(string routeTableName, CancellationToken cancellationToken); /// <summary> /// Get the specified route table for the provided subnet in the /// provided virtual network in this subscription. /// </summary> /// <param name='vnetName'> /// The name of the virtual network that contains the provided subnet. /// </param> /// <param name='subnetName'> /// The name of the subnet. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<GetRouteTableForSubnetResponse> GetRouteTableForSubnetAsync(string vnetName, string subnetName, CancellationToken cancellationToken); /// <summary> /// Get the specified route table for this subscription. /// </summary> /// <param name='routeTableName'> /// The name of the route table in this subscription to retrieve. /// </param> /// <param name='detailLevel'> /// The amount of detail about the requested route table that will be /// returned. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<GetRouteTableResponse> GetRouteTableWithDetailsAsync(string routeTableName, string detailLevel, CancellationToken cancellationToken); /// <summary> /// List the existing route tables for this subscription. /// </summary> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<ListRouteTablesResponse> ListRouteTablesAsync(CancellationToken cancellationToken); /// <summary> /// Prepare routetable migration api validates and prepares the given /// routetable for IaaS Classic to ARM migration. /// </summary> /// <param name='routeTableName'> /// Name of the routetable to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> Task<OperationStatusResponse> PrepareMigrationAsync(string routeTableName, CancellationToken cancellationToken); /// <summary> /// Remove the route table from the provided subnet in the provided /// virtual network in this subscription. /// </summary> /// <param name='vnetName'> /// The name of the virtual network that contains the provided subnet. /// </param> /// <param name='subnetName'> /// The name of the subnet that the route table will be removed from. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> Task<OperationStatusResponse> RemoveRouteTableFromSubnetAsync(string vnetName, string subnetName, CancellationToken cancellationToken); /// <summary> /// Set the specified route for the provided table in this subscription. /// </summary> /// <param name='routeTableName'> /// The name of the route table where the provided route will be set. /// </param> /// <param name='routeName'> /// The name of the route that will be set on the provided route table. /// </param> /// <param name='parameters'> /// The parameters necessary to create a new route table. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request, and also includes error /// information regarding the failure. /// </returns> Task<OperationStatusResponse> SetRouteAsync(string routeTableName, string routeName, SetRouteParameters parameters, CancellationToken cancellationToken); /// <summary> /// Validate routetable migration api validates the given routetable /// for IaaS Classic to ARM migration. /// </summary> /// <param name='routeTableName'> /// Name of the routetable to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The Validate Network Migration operation response. /// </returns> Task<NetworkMigrationValidationResponse> ValidateMigrationAsync(string routeTableName, CancellationToken cancellationToken); } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.Globalization; using System.IO; using System.Linq; using System.Web; using System.Web.Mvc; using Autofac.Features.Metadata; using Orchard.Environment.Extensions.Models; namespace Orchard.UI.Resources { public class ResourceManager : IResourceManager, IUnitOfWorkDependency { private readonly Dictionary<Tuple<String, String>, RequireSettings> _required = new Dictionary<Tuple<String, String>, RequireSettings>(); private readonly List<LinkEntry> _links = new List<LinkEntry>(); private readonly Dictionary<string, MetaEntry> _metas = new Dictionary<string, MetaEntry> { { "generator", new MetaEntry { Content = "Orchard", Name = "generator" } } }; private readonly Dictionary<string, IList<ResourceRequiredContext>> _builtResources = new Dictionary<string, IList<ResourceRequiredContext>>(StringComparer.OrdinalIgnoreCase); private readonly IEnumerable<Meta<IResourceManifestProvider>> _providers; private ResourceManifest _dynamicManifest; private List<String> _headScripts; private List<String> _footScripts; private IEnumerable<IResourceManifest> _manifests; private const string NotIE = "!IE"; private static string ToAppRelativePath(string resourcePath) { if (!String.IsNullOrEmpty(resourcePath) && !Uri.IsWellFormedUriString(resourcePath, UriKind.Absolute)) { resourcePath = VirtualPathUtility.ToAppRelative(resourcePath); } return resourcePath; } private static string FixPath(string resourcePath, string relativeFromPath) { if (!String.IsNullOrEmpty(resourcePath) && !VirtualPathUtility.IsAbsolute(resourcePath) && !Uri.IsWellFormedUriString(resourcePath, UriKind.Absolute)) { // appears to be a relative path (e.g. 'foo.js' or '../foo.js', not "/foo.js" or "http://..") if (String.IsNullOrEmpty(relativeFromPath)) { throw new InvalidOperationException("ResourcePath cannot be relative unless a base relative path is also provided."); } resourcePath = VirtualPathUtility.ToAbsolute(VirtualPathUtility.Combine(relativeFromPath, resourcePath)); } return resourcePath; } private static TagBuilder GetTagBuilder(ResourceDefinition resource, string url) { var tagBuilder = new TagBuilder(resource.TagName); tagBuilder.MergeAttributes(resource.TagBuilder.Attributes); if (!String.IsNullOrEmpty(resource.FilePathAttributeName)) { if (!String.IsNullOrEmpty(url)) { if (VirtualPathUtility.IsAppRelative(url)) { url = VirtualPathUtility.ToAbsolute(url); } tagBuilder.MergeAttribute(resource.FilePathAttributeName, url, true); } } return tagBuilder; } public static void WriteResource(TextWriter writer, ResourceDefinition resource, string url, string condition, Dictionary<string, string> attributes) { if (!string.IsNullOrEmpty(condition)) { if (condition == NotIE) { writer.WriteLine("<!--[if " + condition + "]>-->"); } else { writer.WriteLine("<!--[if " + condition + "]>"); } } var tagBuilder = GetTagBuilder(resource, url); if (attributes != null) { // todo: try null value tagBuilder.MergeAttributes(attributes, true); } writer.WriteLine(tagBuilder.ToString(resource.TagRenderMode)); if (!string.IsNullOrEmpty(condition)) { if (condition == NotIE) { writer.WriteLine("<!--<![endif]-->"); } else { writer.WriteLine("<![endif]-->"); } } } public ResourceManager(IEnumerable<Meta<IResourceManifestProvider>> resourceProviders) { _providers = resourceProviders; } public IEnumerable<IResourceManifest> ResourceProviders { get { if (_manifests == null) { var builder = new ResourceManifestBuilder(); foreach (var provider in _providers) { builder.Feature = provider.Metadata.ContainsKey("Feature") ? (Feature)provider.Metadata["Feature"] : null; provider.Value.BuildManifests(builder); } _manifests = builder.ResourceManifests; } return _manifests; } } public virtual ResourceManifest DynamicResources { get { return _dynamicManifest ?? (_dynamicManifest = new ResourceManifest()); } } public virtual RequireSettings Require(string resourceType, string resourceName) { if (resourceType == null) { throw new ArgumentNullException("resourceType"); } if (resourceName == null) { throw new ArgumentNullException("resourceName"); } RequireSettings settings; var key = new Tuple<string, string>(resourceType, resourceName); if (!_required.TryGetValue(key, out settings)) { settings = new RequireSettings { Type = resourceType, Name = resourceName }; _required[key] = settings; } _builtResources[resourceType] = null; return settings; } public virtual RequireSettings Include(string resourceType, string resourcePath, string resourceDebugPath) { return Include(resourceType, resourcePath, resourceDebugPath, null); } public virtual RequireSettings Include(string resourceType, string resourcePath, string resourceDebugPath, string relativeFromPath) { if (resourceType == null) { throw new ArgumentNullException("resourceType"); } if (resourcePath == null) { throw new ArgumentNullException("resourcePath"); } // ~/ ==> convert to absolute path (e.g. /orchard/..) if (VirtualPathUtility.IsAppRelative(resourcePath)) { resourcePath = VirtualPathUtility.ToAbsolute(resourcePath); } if (resourceDebugPath != null && VirtualPathUtility.IsAppRelative(resourceDebugPath)) { resourceDebugPath = VirtualPathUtility.ToAbsolute(resourceDebugPath); } resourcePath = FixPath(resourcePath, relativeFromPath); resourceDebugPath = FixPath(resourceDebugPath, relativeFromPath); return Require(resourceType, ToAppRelativePath(resourcePath)).Define(d => d.SetUrl(resourcePath, resourceDebugPath)); } public virtual void RegisterHeadScript(string script) { if (_headScripts == null) { _headScripts = new List<string>(); } _headScripts.Add(script); } public virtual void RegisterFootScript(string script) { if (_footScripts == null) { _footScripts = new List<string>(); } _footScripts.Add(script); } public virtual void NotRequired(string resourceType, string resourceName) { if (resourceType == null) { throw new ArgumentNullException("resourceType"); } if (resourceName == null) { throw new ArgumentNullException("resourceName"); } var key = new Tuple<string, string>(resourceType, resourceName); _builtResources[resourceType] = null; _required.Remove(key); } public virtual ResourceDefinition FindResource(RequireSettings settings) { return FindResource(settings, true); } private ResourceDefinition FindResource(RequireSettings settings, bool resolveInlineDefinitions) { // find the resource with the given type and name // that has at least the given version number. If multiple, // return the resource with the greatest version number. // If not found and an inlineDefinition is given, define the resource on the fly // using the action. var name = settings.Name ?? ""; var type = settings.Type; var resource = (from p in ResourceProviders from r in p.GetResources(type) where name.Equals(r.Key, StringComparison.OrdinalIgnoreCase) orderby r.Value.Version descending select r.Value).FirstOrDefault(); if (resource == null && _dynamicManifest != null) { resource = (from r in _dynamicManifest.GetResources(type) where name.Equals(r.Key, StringComparison.OrdinalIgnoreCase) orderby r.Value.Version descending select r.Value).FirstOrDefault(); } if (resolveInlineDefinitions && resource == null) { // Does not seem to exist, but it's possible it is being // defined by a Define() from a RequireSettings somewhere. if (ResolveInlineDefinitions(settings.Type)) { // if any were defined, now try to find it resource = FindResource(settings, false); } } return resource; } private bool ResolveInlineDefinitions(string resourceType) { bool anyWereDefined = false; foreach (var settings in GetRequiredResources(resourceType).Where(settings => settings.InlineDefinition != null)) { // defining it on the fly var resource = FindResource(settings, false); if (resource == null) { // does not already exist, so define it resource = DynamicResources.DefineResource(resourceType, settings.Name).SetBasePath(settings.BasePath); anyWereDefined = true; } settings.InlineDefinition(resource); settings.InlineDefinition = null; } return anyWereDefined; } public virtual IEnumerable<RequireSettings> GetRequiredResources(string type) { return _required.Where(r => r.Key.Item1 == type).Select(r => r.Value); } public virtual IList<LinkEntry> GetRegisteredLinks() { return _links.AsReadOnly(); } public virtual IList<MetaEntry> GetRegisteredMetas() { return _metas.Values.ToList().AsReadOnly(); } public virtual IList<String> GetRegisteredHeadScripts() { return _headScripts == null ? null : _headScripts.AsReadOnly(); } public virtual IList<String> GetRegisteredFootScripts() { return _footScripts == null ? null : _footScripts.AsReadOnly(); } public virtual IList<ResourceRequiredContext> BuildRequiredResources(string resourceType) { IList<ResourceRequiredContext> requiredResources; if (_builtResources.TryGetValue(resourceType, out requiredResources) && requiredResources != null) { return requiredResources; } var allResources = new OrderedDictionary(); foreach (var settings in GetRequiredResources(resourceType)) { var resource = FindResource(settings); if (resource == null) { throw new InvalidOperationException(String.Format(CultureInfo.CurrentCulture, "A '{1}' named '{0}' could not be found.", settings.Name, settings.Type)); } ExpandDependencies(resource, settings, allResources); } requiredResources = (from DictionaryEntry entry in allResources select new ResourceRequiredContext { Resource = (ResourceDefinition)entry.Key, Settings = (RequireSettings)entry.Value }).ToList(); _builtResources[resourceType] = requiredResources; return requiredResources; } protected virtual void ExpandDependencies(ResourceDefinition resource, RequireSettings settings, OrderedDictionary allResources) { if (resource == null) { return; } // Settings is given so they can cascade down into dependencies. For example, if Foo depends on Bar, and Foo's required // location is Head, so too should Bar's location. // forge the effective require settings for this resource // (1) If a require exists for the resource, combine with it. Last settings in gets preference for its specified values. // (2) If no require already exists, form a new settings object based on the given one but with its own type/name. settings = allResources.Contains(resource) ? ((RequireSettings)allResources[resource]).Combine(settings) : new RequireSettings { Type = resource.Type, Name = resource.Name }.Combine(settings); if (resource.Dependencies != null) { var dependencies = from d in resource.Dependencies select FindResource(new RequireSettings { Type = resource.Type, Name = d }); foreach (var dependency in dependencies) { if (dependency == null) { continue; } ExpandDependencies(dependency, settings, allResources); } } allResources[resource] = settings; } public void RegisterLink(LinkEntry link) { _links.Add(link); } public void SetMeta(MetaEntry meta) { if (meta == null) { return; } var index = meta.Name ?? meta.HttpEquiv ?? "charset"; _metas[index] = meta; } public void AppendMeta(MetaEntry meta, string contentSeparator) { if (meta == null) { return; } var index = meta.Name ?? meta.HttpEquiv; if (String.IsNullOrEmpty(index)) { return; } MetaEntry existingMeta; if (_metas.TryGetValue(index, out existingMeta)) { meta = MetaEntry.Combine(existingMeta, meta, contentSeparator); } _metas[index] = meta; } } }
using System; using System.Collections.Generic; using NUnit.Framework; using NFluent; namespace FileHelpers.Tests.CommonTests { [TestFixture] public class ArrayFields { [Test] public void ArrayFields1() { var res = FileTest.Good.ArrayFields .ReadWithEngine<ArrayType1>(); SimpleComparer(res); } [Test] public void ArrayFields2() { var res = FileTest.Good.ArrayFields .ReadWithEngine<ArrayType3>(); SimpleComparer2(res); } [Test] public void ArrayFieldsString() { var res = FileTest.Good.ArrayFields .ReadWithEngine<ArrayTypeStrings>(); SimpleComparerStrings(res); } private static void SimpleComparer2(ArrayType3[] res) { Assert.AreEqual(3, res.Length); Assert.AreEqual("58745", res[0].CustomerID); Assert.AreEqual("13", res[0].BuyedArts[0]); Assert.AreEqual("+8", res[0].BuyedArts[1]); Assert.AreEqual("+3", res[0].BuyedArts[2]); Assert.AreEqual("-7", res[0].BuyedArts[3]); Assert.AreEqual(20, res[0].BuyedArts.Length); Assert.AreEqual("31245", res[1].CustomerID); Assert.AreEqual("6", res[1].BuyedArts[0]); Assert.AreEqual(17, res[1].BuyedArts.Length); Assert.AreEqual(" 1245", res[2].CustomerID); Assert.AreEqual(0, res[2].BuyedArts.Length); } private static void SimpleComparer(ArrayType1[] res) { Assert.AreEqual(3, res.Length); Assert.AreEqual(58745, res[0].CustomerID); Assert.AreEqual(13, res[0].BuyedArts[0]); Assert.AreEqual(8, res[0].BuyedArts[1]); Assert.AreEqual(3, res[0].BuyedArts[2]); Assert.AreEqual(-7, res[0].BuyedArts[3]); Assert.AreEqual(20, res[0].BuyedArts.Length); Assert.AreEqual(31245, res[1].CustomerID); Assert.AreEqual(6, res[1].BuyedArts[0]); Assert.AreEqual(17, res[1].BuyedArts.Length); Assert.AreEqual(1245, res[2].CustomerID); Assert.AreEqual(0, res[2].BuyedArts.Length); } [Test] public void ArrayFieldsDelimited() { var res = FileTest.Good.ArrayFieldsDelimited .ReadWithEngine<ArrayTypeDelimited>(); Assert.AreEqual(10, res.Length); } private static void SimpleComparerStrings(ArrayTypeStrings[] res) { Assert.AreEqual(3, res.Length); Assert.AreEqual(58745, res[0].CustomerID); Assert.AreEqual("13", res[0].BuyedArts[0]); Assert.AreEqual("+8", res[0].BuyedArts[1]); Assert.AreEqual("+3", res[0].BuyedArts[2]); Assert.AreEqual("-7", res[0].BuyedArts[3]); Assert.AreEqual(20, res[0].BuyedArts.Length); Assert.AreEqual(31245, res[1].CustomerID); Assert.AreEqual("6", res[1].BuyedArts[0]); Assert.AreEqual(17, res[1].BuyedArts.Length); Assert.AreEqual(1245, res[2].CustomerID); Assert.AreEqual(0, res[2].BuyedArts.Length); } [Test] public void ArrayFieldsRW() { var engine = new FileHelperEngine<ArrayType1>(); var res = engine.ReadFile(FileTest.Good.ArrayFields.Path); SimpleComparer(res); res = engine.ReadString(engine.WriteString(res)); SimpleComparer(res); } /// <summary> /// TODO: Implement layout engine to handle this ArrayFieldsComplex test case. /// Test a class containing an array of other objects /// Objects are delimitted and finite so in theory should be parsable. /// </summary> [Test] [Ignore("Class containing an array in turn containing many fields is not yet supported")] public void ArrayFieldsComplex() { var engine = new FileHelperEngine<ArrayComplexType>(); engine.ReadString(""); } [Test] public void ArrayFieldsBad01() { Assert.Throws<BadUsageException>( () => new FileHelperEngine<ArrayTypeBad1>()); } [Test] public void ArrayFieldsBad02() { Assert.Throws<BadUsageException>( () => new FileHelperEngine<ArrayTypeBad2>()); } [Test] public void ArrayFieldsBad03() { Assert.Throws<BadUsageException>( () => new FileHelperEngine<ArrayTypeBad3>()); } [Test] public void ArrayFieldsBad04() { Assert.Throws<BadUsageException>( () => new FileHelperEngine<ArrayTypeBad4>()); } [Test] public void ArrayFieldsBad05() { Assert.Throws<BadUsageException>( () => new FileHelperEngine<ArrayTypeBad5>()); } [Test] public void ArrayFieldsBad06() { Assert.Throws<BadUsageException>( () => new FileHelperEngine<ArrayTypeBad6>()); } [Test] public void ArrayFieldsBad10() { var engine = new FileHelperEngine<ArrayType2>(); engine.ErrorManager.ErrorMode = ErrorMode.SaveAndContinue; var res = engine.ReadFile(FileTest.Good.ArrayFields2.Path); Assert.AreEqual(0, res.Length); Assert.AreEqual(2, engine.ErrorManager.ErrorCount); Assert.AreEqual( "Line: 1 Column: 33 Field: BuyedArts. The array has only 4 values, less than the minimum length of 5", engine.ErrorManager.Errors[0].ExceptionInfo.Message); Assert.AreEqual( "Line: 2 Column: 40 Field: BuyedArts. The array has more values than the maximum length of 5", engine.ErrorManager.Errors[1].ExceptionInfo.Message); } [Test] public void ArrayWriteMinErrorNull() { Assert.Throws<InvalidOperationException>( () => { var engine = new DelimitedFileEngine<ArrayModel2To4>(); engine.WriteString(new[] { new ArrayModel2To4() { Id = 1, Name = "name1", Weighting = null } }); }); } [Test] public void ArrayWriteMinError0() { Assert.Throws<InvalidOperationException>( () => { var engine = new DelimitedFileEngine<ArrayModel2To4>(); engine.WriteString(new[] { new ArrayModel2To4() { Id = 1, Name = "name1", Weighting = new float[] {} } }); }); } [Test] public void ArrayWriteMinError1() { Assert.Throws<InvalidOperationException>( () => { var engine = new DelimitedFileEngine<ArrayModel2To4>(); engine.WriteString(new[] { new ArrayModel2To4() { Id = 1, Name = "name1", Weighting = new float[] {10.2f} } }); }); } [Test] public void ArrayWriteMaxError5() { Assert.Throws<InvalidOperationException>( () => { var engine = new DelimitedFileEngine<ArrayModel2To4>(); var res = engine.WriteString(new[] { new ArrayModel2To4() { Id = 1, Name = "name1", Weighting = new float[] {10.2f, 1, 2, 3, 4} } }); }); } [Test] public void ArrayWriteFloatFieldsNull() { var dataToExport = new List<ArrayModel1>(); dataToExport.Add(new ArrayModel1() { Id = 1, Name = "name1", Weighting = null }); var engine = new DelimitedFileEngine<ArrayModel1>(); var res = engine.WriteString(dataToExport); Assert.AreEqual("1,name1," + Environment.NewLine, res); var vals = engine.ReadString(res); Check.That(vals.Length).IsEqualTo(1); Check.That(vals[0].Weighting.Length).IsEqualTo(0); } [Test] public void ArrayReadFieldsNull() { var info = "1,name1,10.2,,30.5"; var engine = new DelimitedFileEngine<ArrayModel1>(); var res = engine.ReadString(info); Check.That(res.Length).IsEqualTo(1); Check.That(res[0].Weighting.Length).IsEqualTo(3); Check.That(res[0].Weighting[1]).IsEqualTo(-5f); } [Test] public void ArrayReadFieldsNullAndNullable() { var info = "1,name1,10.2,,30.5"; var engine = new DelimitedFileEngine<ArrayModelNullable>(); var res = engine.ReadString(info); Check.That(res.Length).IsEqualTo(1); Check.That(res[0].Weighting.Length).IsEqualTo(3); Check.That(res[0].Weighting[1]).IsEqualTo(null); } [Test] public void ArrayWriteFloatFields0() { var dataToExport = new List<ArrayModel1>(); dataToExport.Add(new ArrayModel1() { Id = 1, Name = "name1", Weighting = new float[] {} }); var engine = new DelimitedFileEngine<ArrayModel1>(); var res = engine.WriteString(dataToExport); Assert.AreEqual("1,name1," + Environment.NewLine, res); var vals = engine.ReadString(res); Check.That(vals.Length).IsEqualTo(1); Check.That(vals[0].Weighting.Length).IsEqualTo(0); } [Test] public void ArrayWriteFloatFieldsNullable() { var dataToExport = new List<ArrayModelNullable>(); dataToExport.Add(new ArrayModelNullable() { Id = 1, Name = "name1", Weighting = new float?[] {} }); var engine = new DelimitedFileEngine<ArrayModelNullable>(); var res = engine.WriteString(dataToExport); Assert.AreEqual("1,name1," + Environment.NewLine, res); var vals = engine.ReadString(res); Check.That(vals.Length).IsEqualTo(1); Check.That(vals[0].Weighting.Length).IsEqualTo(0); } [Test] public void ArrayWriteFloatFields1() { var dataToExport = new List<ArrayModel1>(); dataToExport.Add(new ArrayModel1() { Id = 1, Name = "name1", Weighting = new float[] {10.2f} }); var engine = new DelimitedFileEngine<ArrayModel1>(); var res = engine.WriteString(dataToExport); Assert.AreEqual("1,name1,10.2" + Environment.NewLine, res); } [Test] public void ArrayWriteFloatFields2() { var dataToExport = new List<ArrayModel1>(); dataToExport.Add(new ArrayModel1() { Id = 1, Name = "name1", Weighting = new float[] {10.2f, 30.5f} }); var engine = new DelimitedFileEngine<ArrayModel1>(); var res = engine.WriteString(dataToExport); Assert.AreEqual("1,name1,10.2,30.5" + Environment.NewLine, res); } [Test] public void ArrayWriteFloatFields3() { var dataToExport = new List<ArrayModel1>(); dataToExport.Add(new ArrayModel1() { Id = 1, Name = "name1", Weighting = new float[] {10.2f, 30.5f, 11f} }); var engine = new DelimitedFileEngine<ArrayModel1>(); var res = engine.WriteString(dataToExport); Assert.AreEqual("1,name1,10.2,30.5,11" + Environment.NewLine, res); } [DelimitedRecord(",")] public class ArrayModel1 { public int Id; public string Name; [FieldNullValue(-5f)] [FieldArrayLength(0, 15)] public float[] Weighting; } [DelimitedRecord(",")] public class ArrayModelNullable { public int Id; public string Name; [FieldArrayLength(0, 15)] public float?[] Weighting; } [DelimitedRecord(",")] public class ArrayModel2To4 { public int Id; public string Name; [FieldArrayLength(2, 4)] public float[] Weighting; } [FixedLengthRecord(FixedMode.ExactLength)] public class ArrayType1 { [FieldFixedLength(5)] public int CustomerID; [FieldFixedLength(7)] public int[] BuyedArts; } [FixedLengthRecord(FixedMode.ExactLength)] public class ArrayTypeStrings { [FieldFixedLength(5)] public int CustomerID; [FieldFixedLength(7)] [FieldTrim(TrimMode.Both)] public string[] BuyedArts; } [FixedLengthRecord(FixedMode.ExactLength)] public class ArrayType2 { [FieldFixedLength(5)] public int CustomerID; [FieldFixedLength(7)] [FieldArrayLength(5)] public int[] BuyedArts; } [FixedLengthRecord(FixedMode.ExactLength)] public class ArrayType3 { [FieldFixedLength(5)] public string CustomerID; [FieldFixedLength(7)] [FieldTrim(TrimMode.Both)] public string[] BuyedArts; } [DelimitedRecord("|")] public class ArrayTypeBad1 { [FieldArrayLength(2, 30)] public int CustomerID; } [DelimitedRecord("|")] public class ArrayTypeBad2 { public int[][] JaggedArray; } [DelimitedRecord("|")] public class ArrayTypeBad3 { public int[,] TableArray; } [DelimitedRecord("|")] public class ArrayTypeBad4 { [FieldArrayLength(20, 10)] public int[] ArrayField; } [DelimitedRecord("|")] public class ArrayTypeBad5 { public int[] Customers; public int CustomerID; } [DelimitedRecord("|")] public class ArrayTypeBad6 { [FieldArrayLength(20, 10)] public int[] ArrayField; public int CustomerID; } } [DelimitedRecord("\t")] public class ArrayTypeDelimited { public string[] Values; } public class ArrayComplexSubClass { public string A; public string B; public DateTime C; } [DelimitedRecord("\t")] public class ArrayComplexType { public string someOtherInfo; public string maybeAnotherThing; [FieldArrayLength(10)] public ArrayComplexSubClass[] internalClasses; } }
/* *************************************************************************** * This file is part of SharpNEAT - Evolution of Neural Networks. * * Copyright 2004-2006, 2009-2010 Colin Green (sharpneat@gmail.com) * * SharpNEAT is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * SharpNEAT is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with SharpNEAT. If not, see <http://www.gnu.org/licenses/>. * * * * Changed (well hacked ...) by Christoph Kutza for Unity support. * */ #if NET4 using System; using System.Collections.Generic; using System.Diagnostics; using SharpNeat.Core; using SharpNeat.DistanceMetrics; using SharpNeat.EvolutionAlgorithms.ComplexityRegulation; using SharpNeat.SpeciationStrategies; using SharpNeat.Utility; namespace SharpNeat.EvolutionAlgorithms { /// <summary> /// Implementation of the NEAT evolution algorithm. /// Incorporates: /// - Speciation with fitness sharing. /// - Creating offspring via both sexual and asexual reproduction. /// </summary> /// <typeparam name="TGenome">The genome type that the algorithm will operate on.</typeparam> public class NeatEvolutionAlgorithm<TGenome> : AbstractGenerationalAlgorithm<TGenome> where TGenome : class, IGenome<TGenome> { NeatEvolutionAlgorithmParameters _eaParams; readonly NeatEvolutionAlgorithmParameters _eaParamsComplexifying; readonly NeatEvolutionAlgorithmParameters _eaParamsSimplifying; readonly ISpeciationStrategy<TGenome> _speciationStrategy; IList<Specie<TGenome>> _specieList; /// <summary>Index of the specie that contains _currentBestGenome.</summary> int _bestSpecieIdx; readonly FastRandom _rng = new FastRandom(); readonly NeatAlgorithmStats _stats; ComplexityRegulationMode _complexityRegulationMode; readonly IComplexityRegulationStrategy _complexityRegulationStrategy; #region Constructors /// <summary> /// Constructs with the default NeatEvolutionAlgorithmParameters and speciation strategy /// (KMeansClusteringStrategy with ManhattanDistanceMetric). /// </summary> public NeatEvolutionAlgorithm() { _eaParams = new NeatEvolutionAlgorithmParameters(); _eaParamsComplexifying = _eaParams; _eaParamsSimplifying = _eaParams.CreateSimplifyingParameters(); _stats = new NeatAlgorithmStats(_eaParams); _speciationStrategy = new KMeansClusteringStrategy<TGenome>(new ManhattanDistanceMetric()); _complexityRegulationMode = ComplexityRegulationMode.Complexifying; _complexityRegulationStrategy = new NullComplexityRegulationStrategy(); } /// <summary> /// Constructs with the provided NeatEvolutionAlgorithmParameters and ISpeciationStrategy. /// </summary> public NeatEvolutionAlgorithm(NeatEvolutionAlgorithmParameters eaParams, ISpeciationStrategy<TGenome> speciationStrategy, IComplexityRegulationStrategy complexityRegulationStrategy) { _eaParams = eaParams; _eaParamsComplexifying = _eaParams; _eaParamsSimplifying = _eaParams.CreateSimplifyingParameters(); _stats = new NeatAlgorithmStats(_eaParams); _speciationStrategy = speciationStrategy; _complexityRegulationMode = ComplexityRegulationMode.Complexifying; _complexityRegulationStrategy = complexityRegulationStrategy; } #endregion #region Properties /// <summary> /// Gets a list of all current genomes. The current population of genomes. These genomes /// are also divided into the species available through the SpeciesList property. /// </summary> public IList<TGenome> GenomeList { get { return _genomeList; } } /// <summary> /// Gets a list of all current species. The genomes contained within the species are the same genomes /// available through the GenomeList property. /// </summary> public IList<Specie<TGenome>> SpecieList { get { return _specieList; } } /// <summary> /// Gets the algorithm statistics object. /// </summary> public NeatAlgorithmStats Statistics { get { return _stats; } } /// <summary> /// Gets the current complexity regulation mode. /// </summary> public ComplexityRegulationMode ComplexityRegulationMode { get { return _complexityRegulationMode; } } #endregion #region Public Methods [Initialization] /// <summary> /// Initializes the evolution algorithm with the provided IGenomeListEvaluator, IGenomeFactory /// and an initial population of genomes. /// </summary> /// <param name="genomeListEvaluator">The genome evaluation scheme for the evolution algorithm.</param> /// <param name="genomeFactory">The factory that was used to create the genomeList and which is therefore referenced by the genomes.</param> /// <param name="genomeList">An initial genome population.</param> public override void Initialize(IGenomeListEvaluator<TGenome> genomeListEvaluator, IGenomeFactory<TGenome> genomeFactory, List<TGenome> genomeList) { base.Initialize(genomeListEvaluator, genomeFactory, genomeList); Initialize(); } /// <summary> /// Initializes the evolution algorithm with the provided IGenomeListEvaluator /// and an IGenomeFactory that can be used to create an initial population of genomes. /// </summary> /// <param name="genomeListEvaluator">The genome evaluation scheme for the evolution algorithm.</param> /// <param name="genomeFactory">The factory that was used to create the genomeList and which is therefore referenced by the genomes.</param> /// <param name="populationSize">The number of genomes to create for the initial population.</param> public override void Initialize(IGenomeListEvaluator<TGenome> genomeListEvaluator, IGenomeFactory<TGenome> genomeFactory, int populationSize) { base.Initialize(genomeListEvaluator, genomeFactory, populationSize); Initialize(); } /// <summary> /// Code common to both public Initialize methods. /// </summary> private void Initialize() { // Evaluate the genomes. _genomeListEvaluator.Evaluate(_genomeList); // Speciate the genomes. _specieList = _speciationStrategy.InitializeSpeciation(_genomeList, _eaParams.SpecieCount); Debug.Assert(!TestForEmptySpecies(_specieList), "Speciation resulted in one or more empty species."); // Sort the genomes in each specie fittest first, secondary sort youngest first. SortSpecieGenomes(); // Store ref to best genome. UpdateBestGenome(); } #endregion #region Evolution Algorithm Main Method [PerformOneGeneration] /// <summary> /// Progress forward by one generation. Perform one generation/iteration of the evolution algorithm. /// </summary> protected override void PerformOneGeneration() { // Calculate statistics for each specie (mean fitness, target size, number of offspring to produce etc.) int offspringCount; SpecieStats[] specieStatsArr = CalcSpecieStats(out offspringCount); // Create offspring. List<TGenome> offspringList = CreateOffspring(specieStatsArr, offspringCount); // Trim species back to their elite genomes. bool emptySpeciesFlag = TrimSpeciesBackToElite(specieStatsArr); // Rebuild _genomeList. It will now contain just the elite genomes. RebuildGenomeList(); // Append offspring genomes to the elite genomes in _genomeList. We do this before calling the // _genomeListEvaluator.Evaluate because some evaluation schemes re-evaluate the elite genomes // (otherwise we could just evaluate offspringList). _genomeList.AddRange(offspringList); // Evaluate genomes. _genomeListEvaluator.Evaluate(_genomeList); // Integrate offspring into species. if(emptySpeciesFlag) { // We have one or more terminated species. Therefore we need to fully re-speciate all genomes to divide them // evenly between the required number of species. // Clear all genomes from species (we still have the elite genomes in _genomeList). ClearAllSpecies(); // Speciate genomeList. _speciationStrategy.SpeciateGenomes(_genomeList, _specieList); } else { // Integrate offspring into the existing species. _speciationStrategy.SpeciateOffspring(offspringList, _specieList); } Debug.Assert(!TestForEmptySpecies(_specieList), "Speciation resulted in one or more empty species."); // Sort the genomes in each specie. Fittest first (secondary sort - youngest first). SortSpecieGenomes(); // Update stats and store reference to best genome. UpdateBestGenome(); UpdateStats(); // Determine the complexity regulation mode and switch over to the appropriate set of evolution // algorithm parameters. Also notify the genome factory to allow it to modify how it creates genomes // (e.g. reduce or disable additive mutations). _complexityRegulationMode = _complexityRegulationStrategy.DetermineMode(_stats); _genomeFactory.SearchMode = (int)_complexityRegulationMode; switch(_complexityRegulationMode) { case ComplexityRegulationMode.Complexifying: _eaParams = _eaParamsComplexifying; break; case ComplexityRegulationMode.Simplifying: _eaParams = _eaParamsSimplifying; break; } // TODO: More checks. Debug.Assert(_genomeList.Count == _populationSize); } #endregion #region Private Methods [High Level Algorithm Methods. CalcSpecieStats/CreateOffspring] /// <summary> /// Calculate statistics for each specie. This method is at the heart of the evolutionary algorithm, /// the key things that are achieved in this method are - for each specie we calculate: /// 1) The target size based on fitness of the specie's member genomes. /// 2) The elite size based on the current size. Potentially this could be higher than the target /// size, so a target size is taken to be a hard limit. /// 3) Following (1) and (2) we can calculate the total number offspring that need to be generated /// for the current generation. /// </summary> private SpecieStats[] CalcSpecieStats(out int offspringCount) { double totalMeanFitness = 0.0; // Build stats array and get the mean fitness of each specie. int specieCount = _specieList.Count; SpecieStats[] specieStatsArr = new SpecieStats[specieCount]; for(int i=0; i<specieCount; i++) { SpecieStats inst = new SpecieStats(); specieStatsArr[i] = inst; inst._meanFitness = _specieList[i].CalcMeanFitness(); totalMeanFitness += inst._meanFitness; } // Calculate the new target size of each specie using fitness sharing. // Keep a total of all allocated target sizes, typically this will vary slightly from the // overall target population size due to rounding of each real/fractional target size. int totalTargetSizeInt = 0; if(0.0 == totalMeanFitness) { // Handle specific case where all genomes/species have a zero fitness. // Assign all species an equal targetSize. double targetSizeReal = (double)_populationSize / (double)specieCount; for(int i=0; i<specieCount; i++) { SpecieStats inst = specieStatsArr[i]; inst._targetSizeReal = targetSizeReal; // Stochastic rounding will result in equal allocation if targetSizeReal is a whole // number, otherwise it will help to distribute allocations evenly. inst._targetSizeInt = (int)Utilities.ProbabilisticRound(targetSizeReal, _rng); // Total up discretized target sizes. totalTargetSizeInt += inst._targetSizeInt; } } else { // The size of each specie is based on its fitness relative to the other species. for(int i=0; i<specieCount; i++) { SpecieStats inst = specieStatsArr[i]; inst._targetSizeReal = (inst._meanFitness / totalMeanFitness) * (double)_populationSize; // Discretize targetSize (stochastic rounding). inst._targetSizeInt = (int)Utilities.ProbabilisticRound(inst._targetSizeReal, _rng); // Total up discretized target sizes. totalTargetSizeInt += inst._targetSizeInt; } } // Discretized target sizes may total up to a value that is not equal to the required overall population // size. Here we check this and if there is a difference then we adjust the specie's targetSizeInt values // to compensate for the difference. // // E.g. If we are short of the required populationSize then we add the required additional allocation to // selected species based on the difference between each specie's targetSizeReal and targetSizeInt values. // What we're effectively doing here is assigning the additional required target allocation to species based // on their real target size in relation to their actual (integer) target size. // Those species that have an actual allocation below there real allocation (the difference will often // be a fractional amount) will be assigned extra allocation probabilistically, where the probability is // based on the differences between real and actual target values. // // Where the actual target allocation is higher than the required target (due to rounding up), we use the same // method but we adjust specie target sizes down rather than up. int targetSizeDeltaInt = totalTargetSizeInt - _populationSize; if(targetSizeDeltaInt < 0) { // Check for special case. If we are short by just 1 then increment targetSizeInt for the specie containing // the best genome. We always ensure that this specie has a minimum target size of 1 with a final test (below), // by incrementing here we avoid the probabilistic allocation below followed by a further correction if // the champ specie ended up with a zero target size. if(-1 == targetSizeDeltaInt) { specieStatsArr[_bestSpecieIdx]._targetSizeInt++; } else { // We are short of the required populationSize. Add the required additional allocations. // Determine each specie's relative probability of receiving additional allocation. double[] probabilities = new double[specieCount]; for(int i=0; i<specieCount; i++) { SpecieStats inst = specieStatsArr[i]; probabilities[i] = Math.Max(0.0, inst._targetSizeReal - (double)inst._targetSizeInt); } // Use a built in class for choosing an item based on a list of relative probabilities. RouletteWheelLayout rwl = new RouletteWheelLayout(probabilities); // Probabilistically assign the required number of additional allocations. // ENHANCEMENT: We can improve the allocation fairness by updating the RouletteWheelLayout // after each allocation (to reflect that allocation). // targetSizeDeltaInt is negative, so flip the sign for code clarity. targetSizeDeltaInt *= -1; for(int i=0; i<targetSizeDeltaInt; i++) { int specieIdx = RouletteWheel.SingleThrow(rwl, _rng); specieStatsArr[specieIdx]._targetSizeInt++; } } } else if(targetSizeDeltaInt > 0) { // We have overshot the required populationSize. Adjust target sizes down to compensate. // Determine each specie's relative probability of target size downward adjustment. double[] probabilities = new double[specieCount]; for(int i=0; i<specieCount; i++) { SpecieStats inst = specieStatsArr[i]; probabilities[i] = Math.Max(0.0, (double)inst._targetSizeInt - inst._targetSizeReal); } // Use a built in class for choosing an item based on a list of relative probabilities. RouletteWheelLayout rwl = new RouletteWheelLayout(probabilities); // Probabilistically decrement specie target sizes. // ENHANCEMENT: We can improve the selection fairness by updating the RouletteWheelLayout // after each decrement (to reflect that decrement). for(int i=0; i<targetSizeDeltaInt;) { int specieIdx = RouletteWheel.SingleThrow(rwl, _rng); // Skip empty species. This can happen because the same species can be selected more than once. if(0 != specieStatsArr[specieIdx]._targetSizeInt) { specieStatsArr[specieIdx]._targetSizeInt--; i++; } } } // We now have Sum(_targetSizeInt) == _populationSize. Debug.Assert(SumTargetSizeInt(specieStatsArr) == _populationSize); // TODO: Better way of ensuring champ species has non-zero target size? // However we need to check that the specie with the best genome has a non-zero targetSizeInt in order // to ensure that the best genome is preserved. A zero size may have been allocated in some pathological cases. if(0 == specieStatsArr[_bestSpecieIdx]._targetSizeInt) { specieStatsArr[_bestSpecieIdx]._targetSizeInt++; // Adjust down the target size of one of the other species to compensate. // Pick a specie at random (but not the champ specie). Note that this may result in a specie with a zero // target size, this is OK at this stage. We handle allocations of zero in PerformOneGeneration(). int idx = RouletteWheel.SingleThrowEven(specieCount-1, _rng); idx = idx==_bestSpecieIdx ? idx+1 : idx; if(specieStatsArr[idx]._targetSizeInt > 0) { specieStatsArr[idx]._targetSizeInt--; } else { // Scan forward from this specie to find a suitable one. bool done = false; idx++; for(; idx<specieCount; idx++) { if(idx != _bestSpecieIdx && specieStatsArr[idx]._targetSizeInt > 0) { specieStatsArr[idx]._targetSizeInt--; done = true; break; } } // Scan forward from start of species list. if(!done) { for(int i=0; i<specieCount; i++) { if(i != _bestSpecieIdx && specieStatsArr[i]._targetSizeInt > 0) { specieStatsArr[i]._targetSizeInt--; done = true; break; } } if(!done) { throw new SharpNeatException("CalcSpecieStats(). Error adjusting target population size down. Is the population size less than or equal to the number of species?"); } } } } // Now determine the eliteSize for each specie. This is the number of genomes that will remain in a // specie from the current generation and is a proportion of the specie's current size. // Also here we calculate the total number of offspring that will need to be generated. offspringCount = 0; for(int i=0; i<specieCount; i++) { // Special case - zero target size. if(0 == specieStatsArr[i]._targetSizeInt) { specieStatsArr[i]._eliteSizeInt = 0; continue; } // Discretize the real size with a probabilistic handling of the fractional part. double eliteSizeReal = _specieList[i].GenomeList.Count * _eaParams.ElitismProportion; int eliteSizeInt = (int)Utilities.ProbabilisticRound(eliteSizeReal, _rng); // Ensure eliteSizeInt is no larger than the current target size (remember it was calculated // against the current size of the specie not its new target size). SpecieStats inst = specieStatsArr[i]; inst._eliteSizeInt = Math.Min(eliteSizeInt, inst._targetSizeInt); // Ensure the champ specie preserves the champ genome. We do this even if the targetsize is just 1 // - which means the champ genome will remain and no offspring will be produced from it, apart from // the (usually small) chance of a cross-species mating. if(i == _bestSpecieIdx && inst._eliteSizeInt==0) { Debug.Assert(inst._targetSizeInt !=0, "Zero target size assigned to champ specie."); inst._eliteSizeInt = 1; } // Now we can determine how many offspring to produce for the specie. inst._offspringCount = inst._targetSizeInt - inst._eliteSizeInt; offspringCount += inst._offspringCount; // While we're here we determine the split between asexual and sexual reproduction. Again using // some probabilistic logic to compensate for any rounding bias. double offspringAsexualCountReal = (double)inst._offspringCount * _eaParams.OffspringAsexualProportion; inst._offspringAsexualCount = (int)Utilities.ProbabilisticRound(offspringAsexualCountReal, _rng); inst._offspringSexualCount = inst._offspringCount - inst._offspringAsexualCount; // Also while we're here we calculate the selectionSize. The number of the specie's fittest genomes // that are selected from to create offspring. This should always be at least 1. double selectionSizeReal = _specieList[i].GenomeList.Count * _eaParams.SelectionProportion; inst._selectionSizeInt = Math.Max(1, (int)Utilities.ProbabilisticRound(selectionSizeReal, _rng)); } return specieStatsArr; } /// <summary> /// Create the required number of offspring genomes, using specieStatsArr as the basis for selecting how /// many offspring are produced from each species. /// </summary> private List<TGenome> CreateOffspring(SpecieStats[] specieStatsArr, int offspringCount) { // Build a RouletteWheelLayout for selecting species for cross-species reproduction. // While we're in the loop we also pre-build a RouletteWheelLayout for each specie; // Doing this before the main loop means we have RouletteWheelLayouts available for // all species when performing cross-specie matings. int specieCount = specieStatsArr.Length; double[] specieFitnessArr = new double[specieCount]; RouletteWheelLayout[] rwlArr = new RouletteWheelLayout[specieCount]; // Count of species with non-zero selection size. // If this is exactly 1 then we skip inter-species mating. One is a special case because for 0 the // species all get an even chance of selection, and for >1 we can just select normally. int nonZeroSpecieCount = 0; for(int i=0; i<specieCount; i++) { // Array of probabilities for specie selection. Note that some of these probabilites can be zero, but at least one of them won't be. SpecieStats inst = specieStatsArr[i]; specieFitnessArr[i] = inst._selectionSizeInt; if(0 != inst._selectionSizeInt) { nonZeroSpecieCount++; } // For each specie we build a RouletteWheelLayout for genome selection within // that specie. Fitter genomes have higher probability of selection. List<TGenome> genomeList = _specieList[i].GenomeList; double[] probabilities = new double[inst._selectionSizeInt]; for(int j=0; j<inst._selectionSizeInt; j++) { probabilities[j] = genomeList[j].EvaluationInfo.Fitness; } rwlArr[i] = new RouletteWheelLayout(probabilities); } // Complete construction of RouletteWheelLayout for specie selection. RouletteWheelLayout rwlSpecies = new RouletteWheelLayout(specieFitnessArr); // Produce offspring from each specie in turn and store them in offspringList. List<TGenome> offspringList = new List<TGenome>(offspringCount); for(int specieIdx=0; specieIdx<specieCount; specieIdx++) { SpecieStats inst = specieStatsArr[specieIdx]; List<TGenome> genomeList = _specieList[specieIdx].GenomeList; // Get RouletteWheelLayout for genome selection. RouletteWheelLayout rwl = rwlArr[specieIdx]; // --- Produce the required number of offspring from asexual reproduction. for(int i=0; i<inst._offspringAsexualCount; i++) { int genomeIdx = RouletteWheel.SingleThrow(rwl, _rng); TGenome offspring = genomeList[genomeIdx].CreateOffspring(_currentGeneration); offspringList.Add(offspring); } _stats._asexualOffspringCount += (ulong)inst._offspringAsexualCount; // --- Produce the required number of offspring from sexual reproduction. // Cross-specie mating. // If nonZeroSpecieCount is exactly 1 then we skip inter-species mating. One is a special case because // for 0 the species all get an even chance of selection, and for >1 we can just select species normally. int crossSpecieMatings = nonZeroSpecieCount==1 ? 0 : (int)Utilities.ProbabilisticRound(_eaParams.InterspeciesMatingProportion * inst._offspringSexualCount, _rng); _stats._sexualOffspringCount += (ulong)(inst._offspringSexualCount - crossSpecieMatings); _stats._interspeciesOffspringCount += (ulong)crossSpecieMatings; // An index that keeps track of how many offspring have been produced in total. int matingsCount = 0; for(; matingsCount<crossSpecieMatings; matingsCount++) { TGenome offspring = CreateOffspring_CrossSpecieMating(rwl, rwlArr, rwlSpecies, specieIdx, genomeList); offspringList.Add(offspring); } // For the remainder we use normal intra-specie mating. // Test for special case - we only have one genome to select from in the current specie. if(1 == inst._selectionSizeInt) { // Fall-back to asexual reproduction. for(; matingsCount<inst._offspringSexualCount; matingsCount++) { int genomeIdx = RouletteWheel.SingleThrow(rwl, _rng); TGenome offspring = genomeList[genomeIdx].CreateOffspring(_currentGeneration); offspringList.Add(offspring); } } else { // Remainder of matings are normal within-specie. for(; matingsCount<inst._offspringSexualCount; matingsCount++) { // Select parents. SelectRouletteWheelItem() guarantees parent2Idx!=parent1Idx int parent1Idx = RouletteWheel.SingleThrow(rwl, _rng); TGenome parent1 = genomeList[parent1Idx]; // Remove selected parent from set of possible outcomes. RouletteWheelLayout rwlTmp = rwl.RemoveOutcome(parent1Idx); if(0.0 != rwlTmp.ProbabilitiesTotal) { // Get the two parents to mate. int parent2Idx = RouletteWheel.SingleThrow(rwlTmp, _rng); TGenome parent2 = genomeList[parent2Idx]; TGenome offspring = parent1.CreateOffspring(parent2, _currentGeneration); offspringList.Add(offspring); } else { // No other parent has a non-zero selection probability (they all have zero fitness). // Fall back to asexual reproduction of the single genome with a non-zero fitness. TGenome offspring = parent1.CreateOffspring(_currentGeneration); offspringList.Add(offspring); } } } } _stats._totalOffspringCount += (ulong)offspringCount; return offspringList; } /// <summary> /// Cross specie mating. /// </summary> /// <param name="rwl">RouletteWheelLayout for selectign genomes in teh current specie.</param> /// <param name="rwlArr">Array of RouletteWheelLayout objects for genome selection. One for each specie.</param> /// <param name="rwlSpecies">RouletteWheelLayout for selecting species. Based on relative fitness of species.</param> /// <param name="currentSpecieIdx">Current specie's index in _specieList</param> /// <param name="genomeList">Current specie's genome list.</param> private TGenome CreateOffspring_CrossSpecieMating(RouletteWheelLayout rwl, RouletteWheelLayout[] rwlArr, RouletteWheelLayout rwlSpecies, int currentSpecieIdx, IList<TGenome> genomeList) { // Select parent from current specie. int parent1Idx = RouletteWheel.SingleThrow(rwl, _rng); // Select specie other than current one for 2nd parent genome. RouletteWheelLayout rwlSpeciesTmp = rwlSpecies.RemoveOutcome(currentSpecieIdx); int specie2Idx = RouletteWheel.SingleThrow(rwlSpeciesTmp, _rng); // Select a parent genome from the second specie. int parent2Idx = RouletteWheel.SingleThrow(rwlArr[specie2Idx], _rng); // Get the two parents to mate. TGenome parent1 = genomeList[parent1Idx]; TGenome parent2 = _specieList[specie2Idx].GenomeList[parent2Idx]; return parent1.CreateOffspring(parent2, _currentGeneration); } #endregion #region Private Methods [Low Level Helper Methods] /// <summary> /// Updates _currentBestGenome and _bestSpecieIdx, these are the fittest genome and index of the specie /// containing the fittest genome respectively. /// /// This method assumes that all specie genomes are sorted fittest first and can therefore save much work /// by not having to scan all genomes. /// Note. We may have several genomes with equal best fitness, we just select one of them in that case. /// </summary> protected void UpdateBestGenome() { // If all genomes have the same fitness (including zero) then we simply return the first genome. TGenome bestGenome = null; double bestFitness = -1.0; int bestSpecieIdx = -1; int count = _specieList.Count; for(int i=0; i<count; i++) { // Get the specie's first genome. Genomes are sorted, therefore this is also the fittest // genome in the specie. TGenome genome = _specieList[i].GenomeList[0]; if(genome.EvaluationInfo.Fitness > bestFitness) { bestGenome = genome; bestFitness = genome.EvaluationInfo.Fitness; bestSpecieIdx = i; } } _currentBestGenome = bestGenome; _bestSpecieIdx = bestSpecieIdx; } /// <summary> /// Updates the NeatAlgorithmStats object. /// </summary> private void UpdateStats() { _stats._generation = _currentGeneration; _stats._totalEvaluationCount = _genomeListEvaluator.EvaluationCount; // Evaluation per second. DateTime now = DateTime.Now; TimeSpan duration = now - _stats._evalsPerSecLastSampleTime; // To smooth out the evals per sec statistic we only update if at least 1 second has elapsed // since it was last updated. if(duration.Ticks > 9999) { long evalsSinceLastUpdate = (long)(_genomeListEvaluator.EvaluationCount - _stats._evalsCountAtLastUpdate); _stats._evaluationsPerSec = (int)((evalsSinceLastUpdate*1e7) / duration.Ticks); // Reset working variables. _stats._evalsCountAtLastUpdate = _genomeListEvaluator.EvaluationCount; _stats._evalsPerSecLastSampleTime = now; } // Fitness and complexity stats. double totalFitness = _genomeList[0].EvaluationInfo.Fitness; double totalComplexity = _genomeList[0].Complexity; double maxComplexity = totalComplexity; int count = _genomeList.Count; for(int i=1; i<count; i++) { totalFitness += _genomeList[i].EvaluationInfo.Fitness; totalComplexity += _genomeList[i].Complexity; maxComplexity = Math.Max(maxComplexity, _genomeList[i].Complexity); } _stats._maxFitness = _currentBestGenome.EvaluationInfo.Fitness; _stats._meanFitness = totalFitness / count; _stats._maxComplexity = maxComplexity; _stats._meanComplexity = totalComplexity / count; // Specie champs mean fitness. double totalSpecieChampFitness = _specieList[0].GenomeList[0].EvaluationInfo.Fitness; int specieCount = _specieList.Count; for(int i=1; i<specieCount; i++) { totalSpecieChampFitness += _specieList[i].GenomeList[0].EvaluationInfo.Fitness; } _stats._meanSpecieChampFitness = totalSpecieChampFitness / specieCount; // Moving averages. _stats._prevBestFitnessMA = _stats._bestFitnessMA.Mean; _stats._bestFitnessMA.Enqueue(_stats._maxFitness); _stats._prevMeanSpecieChampFitnessMA = _stats._meanSpecieChampFitnessMA.Mean; _stats._meanSpecieChampFitnessMA.Enqueue(_stats._meanSpecieChampFitness); _stats._prevComplexityMA = _stats._complexityMA.Mean; _stats._complexityMA.Enqueue(_stats._meanComplexity); } /// <summary> /// Sorts the genomes within each species fittest first, secondary sorts on age. /// </summary> private void SortSpecieGenomes() { int minSize = _specieList[0].GenomeList.Count; int maxSize = minSize; int specieCount = _specieList.Count; for(int i=0; i<specieCount; i++) { _specieList[i].GenomeList.Sort(GenomeFitnessComparer<TGenome>.Singleton); minSize = Math.Min(minSize, _specieList[i].GenomeList.Count); maxSize = Math.Max(maxSize, _specieList[i].GenomeList.Count); } // Update stats. _stats._minSpecieSize = minSize; _stats._maxSpecieSize = maxSize; } /// <summary> /// Clear the genome list within each specie. /// </summary> private void ClearAllSpecies() { foreach(Specie<TGenome> specie in _specieList) { specie.GenomeList.Clear(); } } /// <summary> /// Rebuild _genomeList from genomes held within the species. /// </summary> private void RebuildGenomeList() { _genomeList.Clear(); foreach(Specie<TGenome> specie in _specieList) { _genomeList.AddRange(specie.GenomeList); } } /// <summary> /// Trims the genomeList in each specie back to the number of elite genomes specified in /// specieStatsArr. Returns true if there are empty species following trimming. /// </summary> private bool TrimSpeciesBackToElite(SpecieStats[] specieStatsArr) { bool emptySpeciesFlag = false; int count = _specieList.Count; for(int i=0; i<count; i++) { Specie<TGenome> specie = _specieList[i]; SpecieStats stats = specieStatsArr[i]; int removeCount = specie.GenomeList.Count - stats._eliteSizeInt; specie.GenomeList.RemoveRange(stats._eliteSizeInt, removeCount); if(0 == stats._eliteSizeInt) { emptySpeciesFlag = true; } } return emptySpeciesFlag; } #endregion #region Private Methods [Debugging] /// <summary> /// Returns true if there is one or more empty species. /// </summary> private bool TestForEmptySpecies(IList<Specie<TGenome>> specieList) { foreach(Specie<TGenome> specie in specieList) { if(specie.GenomeList.Count == 0) { return true; } } return false; } private void DumpSpecieCounts(SpecieStats[] specieStatsArr) { int count = specieStatsArr.Length; for(int i=0; i<count; i++) { Debug.Write("[" + _specieList[i].GenomeList.Count.ToString() + "," + specieStatsArr[i]._targetSizeInt + "] " ); } Debug.WriteLine(String.Empty); } private static int SumTargetSizeInt(SpecieStats[] specieStatsArr) { int total = 0; foreach(SpecieStats inst in specieStatsArr) { total += inst._targetSizeInt; } return total; } #endregion #region InnerClass [SpecieStats] class SpecieStats { // Real/continuous stats. public double _meanFitness; public double _targetSizeReal; // Integer stats. public int _targetSizeInt; public int _eliteSizeInt; public int _offspringCount; public int _offspringAsexualCount; public int _offspringSexualCount; // Selection data. public int _selectionSizeInt; } #endregion } } #endif
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. //------------------------------------------------------------------------------ using System.Diagnostics; using System.Data.SqlTypes; using System.Globalization; using System.Runtime.InteropServices; namespace System.Data.SqlClient { internal sealed class SqlBuffer { internal enum StorageType : byte { Empty = 0, Boolean, Byte, DateTime, Decimal, Double, Int16, Int32, Int64, Money, Single, String, SqlBinary, SqlCachedBuffer, SqlGuid, SqlXml, Date, DateTime2, DateTimeOffset, Time, } internal struct DateTimeInfo { // This is used to store DateTime internal Int32 daypart; internal Int32 timepart; } internal struct NumericInfo { // This is used to store Decimal data internal Int32 data1; internal Int32 data2; internal Int32 data3; internal Int32 data4; internal Byte precision; internal Byte scale; internal Boolean positive; } internal struct TimeInfo { internal Int64 ticks; internal byte scale; } internal struct DateTime2Info { internal Int32 date; internal TimeInfo timeInfo; } internal struct DateTimeOffsetInfo { internal DateTime2Info dateTime2Info; internal Int16 offset; } [StructLayout(LayoutKind.Explicit)] internal struct Storage { [FieldOffset(0)] internal Boolean _boolean; [FieldOffset(0)] internal Byte _byte; [FieldOffset(0)] internal DateTimeInfo _dateTimeInfo; [FieldOffset(0)] internal Double _double; [FieldOffset(0)] internal NumericInfo _numericInfo; [FieldOffset(0)] internal Int16 _int16; [FieldOffset(0)] internal Int32 _int32; [FieldOffset(0)] internal Int64 _int64; // also used to store Money, UtcDateTime, Date , and Time [FieldOffset(0)] internal Single _single; [FieldOffset(0)] internal TimeInfo _timeInfo; [FieldOffset(0)] internal DateTime2Info _dateTime2Info; [FieldOffset(0)] internal DateTimeOffsetInfo _dateTimeOffsetInfo; } private bool _isNull; private StorageType _type; private Storage _value; private object _object; // String, SqlBinary, SqlCachedBuffer, SqlGuid, SqlString, SqlXml internal SqlBuffer() { } private SqlBuffer(SqlBuffer value) { // Clone // value types _isNull = value._isNull; _type = value._type; // ref types - should also be read only unless at some point we allow this data // to be mutable, then we will need to copy _value = value._value; _object = value._object; } internal bool IsEmpty { get { return (StorageType.Empty == _type); } } internal bool IsNull { get { return _isNull; } } internal StorageType VariantInternalStorageType { get { return _type; } } internal Boolean Boolean { get { ThrowIfNull(); if (StorageType.Boolean == _type) { return _value._boolean; } return (Boolean)this.Value; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _value._boolean = value; _type = StorageType.Boolean; _isNull = false; } } internal Byte Byte { get { ThrowIfNull(); if (StorageType.Byte == _type) { return _value._byte; } return (Byte)this.Value; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _value._byte = value; _type = StorageType.Byte; _isNull = false; } } internal Byte[] ByteArray { get { ThrowIfNull(); return this.SqlBinary.Value; } } internal DateTime DateTime { get { ThrowIfNull(); if (StorageType.Date == _type) { return DateTime.MinValue.AddDays(_value._int32); } if (StorageType.DateTime2 == _type) { return new DateTime(GetTicksFromDateTime2Info(_value._dateTime2Info)); } if (StorageType.DateTime == _type) { return SqlDateTime.ToDateTime(_value._dateTimeInfo.daypart, _value._dateTimeInfo.timepart); } return (DateTime)this.Value; // anything else we haven't thought of goes through boxing. } } internal Decimal Decimal { get { ThrowIfNull(); if (StorageType.Decimal == _type) { if (_value._numericInfo.data4 != 0 || _value._numericInfo.scale > 28) { throw new OverflowException(SQLResource.ConversionOverflowMessage); } return new Decimal(_value._numericInfo.data1, _value._numericInfo.data2, _value._numericInfo.data3, !_value._numericInfo.positive, _value._numericInfo.scale); } if (StorageType.Money == _type) { long l = _value._int64; bool isNegative = false; if (l < 0) { isNegative = true; l = -l; } return new Decimal((int)(l & 0xffffffff), (int)(l >> 32), 0, isNegative, 4); } return (Decimal)this.Value; // anything else we haven't thought of goes through boxing. } } internal Double Double { get { ThrowIfNull(); if (StorageType.Double == _type) { return _value._double; } return (Double)this.Value; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _value._double = value; _type = StorageType.Double; _isNull = false; } } internal Guid Guid { get { ThrowIfNull(); return this.SqlGuid.Value; } } internal Int16 Int16 { get { ThrowIfNull(); if (StorageType.Int16 == _type) { return _value._int16; } return (Int16)this.Value; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _value._int16 = value; _type = StorageType.Int16; _isNull = false; } } internal Int32 Int32 { get { ThrowIfNull(); if (StorageType.Int32 == _type) { return _value._int32; } return (Int32)this.Value; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _value._int32 = value; _type = StorageType.Int32; _isNull = false; } } internal Int64 Int64 { get { ThrowIfNull(); if (StorageType.Int64 == _type) { return _value._int64; } return (Int64)this.Value; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _value._int64 = value; _type = StorageType.Int64; _isNull = false; } } internal Single Single { get { ThrowIfNull(); if (StorageType.Single == _type) { return _value._single; } return (Single)this.Value; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _value._single = value; _type = StorageType.Single; _isNull = false; } } internal String String { get { ThrowIfNull(); if (StorageType.String == _type) { return (String)_object; } else if (StorageType.SqlCachedBuffer == _type) { return ((SqlCachedBuffer)(_object)).ToString(); } return (String)this.Value; // anything else we haven't thought of goes through boxing. } } // use static list of format strings indexed by scale for perf private static readonly string[] s_katmaiDateTimeOffsetFormatByScale = new string[] { "yyyy-MM-dd HH:mm:ss zzz", "yyyy-MM-dd HH:mm:ss.f zzz", "yyyy-MM-dd HH:mm:ss.ff zzz", "yyyy-MM-dd HH:mm:ss.fff zzz", "yyyy-MM-dd HH:mm:ss.ffff zzz", "yyyy-MM-dd HH:mm:ss.fffff zzz", "yyyy-MM-dd HH:mm:ss.ffffff zzz", "yyyy-MM-dd HH:mm:ss.fffffff zzz", }; private static readonly string[] s_katmaiDateTime2FormatByScale = new string[] { "yyyy-MM-dd HH:mm:ss", "yyyy-MM-dd HH:mm:ss.f", "yyyy-MM-dd HH:mm:ss.ff", "yyyy-MM-dd HH:mm:ss.fff", "yyyy-MM-dd HH:mm:ss.ffff", "yyyy-MM-dd HH:mm:ss.fffff", "yyyy-MM-dd HH:mm:ss.ffffff", "yyyy-MM-dd HH:mm:ss.fffffff", }; private static readonly string[] s_katmaiTimeFormatByScale = new string[] { "HH:mm:ss", "HH:mm:ss.f", "HH:mm:ss.ff", "HH:mm:ss.fff", "HH:mm:ss.ffff", "HH:mm:ss.fffff", "HH:mm:ss.ffffff", "HH:mm:ss.fffffff", }; internal string KatmaiDateTimeString { get { ThrowIfNull(); if (StorageType.Date == _type) { return this.DateTime.ToString("yyyy-MM-dd", DateTimeFormatInfo.InvariantInfo); } if (StorageType.Time == _type) { byte scale = _value._timeInfo.scale; return new DateTime(_value._timeInfo.ticks).ToString(s_katmaiTimeFormatByScale[scale], DateTimeFormatInfo.InvariantInfo); } if (StorageType.DateTime2 == _type) { byte scale = _value._dateTime2Info.timeInfo.scale; return this.DateTime.ToString(s_katmaiDateTime2FormatByScale[scale], DateTimeFormatInfo.InvariantInfo); } if (StorageType.DateTimeOffset == _type) { DateTimeOffset dto = this.DateTimeOffset; byte scale = _value._dateTimeOffsetInfo.dateTime2Info.timeInfo.scale; return dto.ToString(s_katmaiDateTimeOffsetFormatByScale[scale], DateTimeFormatInfo.InvariantInfo); } return (String)this.Value; // anything else we haven't thought of goes through boxing. } } internal SqlString KatmaiDateTimeSqlString { get { if (StorageType.Date == _type || StorageType.Time == _type || StorageType.DateTime2 == _type || StorageType.DateTimeOffset == _type) { if (IsNull) { return SqlString.Null; } return new SqlString(KatmaiDateTimeString); } return (SqlString)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal TimeSpan Time { get { ThrowIfNull(); if (StorageType.Time == _type) { return new TimeSpan(_value._timeInfo.ticks); } return (TimeSpan)this.Value; // anything else we haven't thought of goes through boxing. } } internal DateTimeOffset DateTimeOffset { get { ThrowIfNull(); if (StorageType.DateTimeOffset == _type) { TimeSpan offset = new TimeSpan(0, _value._dateTimeOffsetInfo.offset, 0); // datetime part presents time in UTC return new DateTimeOffset(GetTicksFromDateTime2Info(_value._dateTimeOffsetInfo.dateTime2Info) + offset.Ticks, offset); } return (DateTimeOffset)this.Value; // anything else we haven't thought of goes through boxing. } } private static long GetTicksFromDateTime2Info(DateTime2Info dateTime2Info) { return (dateTime2Info.date * TimeSpan.TicksPerDay + dateTime2Info.timeInfo.ticks); } internal SqlBinary SqlBinary { get { if (StorageType.SqlBinary == _type) { return (SqlBinary)_object; } return (SqlBinary)this.SqlValue; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _object = value; _type = StorageType.SqlBinary; _isNull = value.IsNull; } } internal SqlBoolean SqlBoolean { get { if (StorageType.Boolean == _type) { if (IsNull) { return SqlBoolean.Null; } return new SqlBoolean(_value._boolean); } return (SqlBoolean)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlByte SqlByte { get { if (StorageType.Byte == _type) { if (IsNull) { return SqlByte.Null; } return new SqlByte(_value._byte); } return (SqlByte)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlCachedBuffer SqlCachedBuffer { get { if (StorageType.SqlCachedBuffer == _type) { if (IsNull) { return SqlCachedBuffer.Null; } return (SqlCachedBuffer)_object; } return (SqlCachedBuffer)this.SqlValue; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _object = value; _type = StorageType.SqlCachedBuffer; _isNull = value.IsNull; } } internal SqlXml SqlXml { get { if (StorageType.SqlXml == _type) { if (IsNull) { return SqlXml.Null; } return (SqlXml)_object; } return (SqlXml)this.SqlValue; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _object = value; _type = StorageType.SqlXml; _isNull = value.IsNull; } } internal SqlDateTime SqlDateTime { get { if (StorageType.DateTime == _type) { if (IsNull) { return SqlDateTime.Null; } return new SqlDateTime(_value._dateTimeInfo.daypart, _value._dateTimeInfo.timepart); } return (SqlDateTime)SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlDecimal SqlDecimal { get { if (StorageType.Decimal == _type) { if (IsNull) { return SqlDecimal.Null; } return new SqlDecimal(_value._numericInfo.precision, _value._numericInfo.scale, _value._numericInfo.positive, _value._numericInfo.data1, _value._numericInfo.data2, _value._numericInfo.data3, _value._numericInfo.data4 ); } return (SqlDecimal)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlDouble SqlDouble { get { if (StorageType.Double == _type) { if (IsNull) { return SqlDouble.Null; } return new SqlDouble(_value._double); } return (SqlDouble)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlGuid SqlGuid { get { if (StorageType.SqlGuid == _type) { return (SqlGuid)_object; } return (SqlGuid)this.SqlValue; // anything else we haven't thought of goes through boxing. } set { Debug.Assert(IsEmpty, "setting value a second time?"); _object = value; _type = StorageType.SqlGuid; _isNull = value.IsNull; } } internal SqlInt16 SqlInt16 { get { if (StorageType.Int16 == _type) { if (IsNull) { return SqlInt16.Null; } return new SqlInt16(_value._int16); } return (SqlInt16)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlInt32 SqlInt32 { get { if (StorageType.Int32 == _type) { if (IsNull) { return SqlInt32.Null; } return new SqlInt32(_value._int32); } return (SqlInt32)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlInt64 SqlInt64 { get { if (StorageType.Int64 == _type) { if (IsNull) { return SqlInt64.Null; } return new SqlInt64(_value._int64); } return (SqlInt64)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlMoney SqlMoney { get { if (StorageType.Money == _type) { if (IsNull) { return SqlMoney.Null; } return new SqlMoney(_value._int64, 1/*ignored*/); } return (SqlMoney)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlSingle SqlSingle { get { if (StorageType.Single == _type) { if (IsNull) { return SqlSingle.Null; } return new SqlSingle(_value._single); } return (SqlSingle)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal SqlString SqlString { get { if (StorageType.String == _type) { if (IsNull) { return SqlString.Null; } return new SqlString((String)_object); } else if (StorageType.SqlCachedBuffer == _type) { SqlCachedBuffer data = (SqlCachedBuffer)(_object); if (data.IsNull) { return SqlString.Null; } return data.ToSqlString(); } return (SqlString)this.SqlValue; // anything else we haven't thought of goes through boxing. } } internal object SqlValue { get { switch (_type) { case StorageType.Empty: return DBNull.Value; case StorageType.Boolean: return SqlBoolean; case StorageType.Byte: return SqlByte; case StorageType.DateTime: return SqlDateTime; case StorageType.Decimal: return SqlDecimal; case StorageType.Double: return SqlDouble; case StorageType.Int16: return SqlInt16; case StorageType.Int32: return SqlInt32; case StorageType.Int64: return SqlInt64; case StorageType.Money: return SqlMoney; case StorageType.Single: return SqlSingle; case StorageType.String: return SqlString; case StorageType.SqlCachedBuffer: { SqlCachedBuffer data = (SqlCachedBuffer)(_object); if (data.IsNull) { return SqlXml.Null; } return data.ToSqlXml(); } case StorageType.SqlBinary: case StorageType.SqlGuid: return _object; case StorageType.SqlXml: { if (_isNull) { return SqlXml.Null; } Debug.Assert(null != _object); return (SqlXml)_object; } case StorageType.Date: case StorageType.DateTime2: if (_isNull) { return DBNull.Value; } return DateTime; case StorageType.DateTimeOffset: if (_isNull) { return DBNull.Value; } return DateTimeOffset; case StorageType.Time: if (_isNull) { return DBNull.Value; } return Time; } return null; // need to return the value as an object of some SQL type } } internal object Value { get { if (IsNull) { return DBNull.Value; } switch (_type) { case StorageType.Empty: return DBNull.Value; case StorageType.Boolean: return Boolean; case StorageType.Byte: return Byte; case StorageType.DateTime: return DateTime; case StorageType.Decimal: return Decimal; case StorageType.Double: return Double; case StorageType.Int16: return Int16; case StorageType.Int32: return Int32; case StorageType.Int64: return Int64; case StorageType.Money: return Decimal; case StorageType.Single: return Single; case StorageType.String: return String; case StorageType.SqlBinary: return ByteArray; case StorageType.SqlCachedBuffer: { // If we have a CachedBuffer, it's because it's an XMLTYPE column // and we have to return a string when they're asking for the CLS // value of the column. return ((SqlCachedBuffer)(_object)).ToString(); } case StorageType.SqlGuid: return Guid; case StorageType.SqlXml: { // XMLTYPE columns must be returned as string when asking for the CLS value SqlXml data = (SqlXml)_object; string s = data.Value; return s; } case StorageType.Date: return DateTime; case StorageType.DateTime2: return DateTime; case StorageType.DateTimeOffset: return DateTimeOffset; case StorageType.Time: return Time; } return null; // need to return the value as an object of some CLS type } } internal Type GetTypeFromStorageType(bool isSqlType) { if (isSqlType) { switch (_type) { case SqlBuffer.StorageType.Empty: return null; case SqlBuffer.StorageType.Boolean: return typeof(SqlBoolean); case SqlBuffer.StorageType.Byte: return typeof(SqlByte); case SqlBuffer.StorageType.DateTime: return typeof(SqlDateTime); case SqlBuffer.StorageType.Decimal: return typeof(SqlDecimal); case SqlBuffer.StorageType.Double: return typeof(SqlDouble); case SqlBuffer.StorageType.Int16: return typeof(SqlInt16); case SqlBuffer.StorageType.Int32: return typeof(SqlInt32); case SqlBuffer.StorageType.Int64: return typeof(SqlInt64); case SqlBuffer.StorageType.Money: return typeof(SqlMoney); case SqlBuffer.StorageType.Single: return typeof(SqlSingle); case SqlBuffer.StorageType.String: return typeof(SqlString); case SqlBuffer.StorageType.SqlCachedBuffer: return typeof(SqlString); case SqlBuffer.StorageType.SqlBinary: return typeof(object); case SqlBuffer.StorageType.SqlGuid: return typeof(object); case SqlBuffer.StorageType.SqlXml: return typeof(SqlXml); } } else { //Is CLR Type switch (_type) { case SqlBuffer.StorageType.Empty: return null; case SqlBuffer.StorageType.Boolean: return typeof(Boolean); case SqlBuffer.StorageType.Byte: return typeof(Byte); case SqlBuffer.StorageType.DateTime: return typeof(DateTime); case SqlBuffer.StorageType.Decimal: return typeof(Decimal); case SqlBuffer.StorageType.Double: return typeof(Double); case SqlBuffer.StorageType.Int16: return typeof(Int16); case SqlBuffer.StorageType.Int32: return typeof(Int32); case SqlBuffer.StorageType.Int64: return typeof(Int64); case SqlBuffer.StorageType.Money: return typeof(Decimal); case SqlBuffer.StorageType.Single: return typeof(Single); case SqlBuffer.StorageType.String: return typeof(String); case SqlBuffer.StorageType.SqlBinary: return typeof(Byte[]); case SqlBuffer.StorageType.SqlCachedBuffer: return typeof(string); case SqlBuffer.StorageType.SqlGuid: return typeof(Guid); case SqlBuffer.StorageType.SqlXml: return typeof(string); } } return null; // need to return the value as an object of some CLS type } internal static SqlBuffer[] CreateBufferArray(int length) { SqlBuffer[] buffers = new SqlBuffer[length]; for (int i = 0; i < buffers.Length; ++i) { buffers[i] = new SqlBuffer(); } return buffers; } internal static void Clear(SqlBuffer[] values) { if (null != values) { for (int i = 0; i < values.Length; ++i) { values[i].Clear(); } } } internal void Clear() { _isNull = false; _type = StorageType.Empty; _object = null; } internal void SetToDateTime(int daypart, int timepart) { Debug.Assert(IsEmpty, "setting value a second time?"); _value._dateTimeInfo.daypart = daypart; _value._dateTimeInfo.timepart = timepart; _type = StorageType.DateTime; _isNull = false; } internal void SetToDecimal(byte precision, byte scale, bool positive, int[] bits) { Debug.Assert(IsEmpty, "setting value a second time?"); _value._numericInfo.precision = precision; _value._numericInfo.scale = scale; _value._numericInfo.positive = positive; _value._numericInfo.data1 = bits[0]; _value._numericInfo.data2 = bits[1]; _value._numericInfo.data3 = bits[2]; _value._numericInfo.data4 = bits[3]; _type = StorageType.Decimal; _isNull = false; } internal void SetToMoney(long value) { Debug.Assert(IsEmpty, "setting value a second time?"); _value._int64 = value; _type = StorageType.Money; _isNull = false; } internal void SetToNullOfType(StorageType storageType) { Debug.Assert(IsEmpty, "setting value a second time?"); _type = storageType; _isNull = true; _object = null; } internal void SetToString(string value) { Debug.Assert(IsEmpty, "setting value a second time?"); _object = value; _type = StorageType.String; _isNull = false; } internal void SetToDate(byte[] bytes) { Debug.Assert(IsEmpty, "setting value a second time?"); _type = StorageType.Date; _value._int32 = GetDateFromByteArray(bytes, 0); _isNull = false; } internal void SetToTime(byte[] bytes, int length, byte scale) { Debug.Assert(IsEmpty, "setting value a second time?"); _type = StorageType.Time; FillInTimeInfo(ref _value._timeInfo, bytes, length, scale); _isNull = false; } internal void SetToDateTime2(byte[] bytes, int length, byte scale) { Debug.Assert(IsEmpty, "setting value a second time?"); _type = StorageType.DateTime2; FillInTimeInfo(ref _value._dateTime2Info.timeInfo, bytes, length - 3, scale); // remaining 3 bytes is for date _value._dateTime2Info.date = GetDateFromByteArray(bytes, length - 3); // 3 bytes for date _isNull = false; } internal void SetToDateTimeOffset(byte[] bytes, int length, byte scale) { Debug.Assert(IsEmpty, "setting value a second time?"); _type = StorageType.DateTimeOffset; FillInTimeInfo(ref _value._dateTimeOffsetInfo.dateTime2Info.timeInfo, bytes, length - 5, scale); // remaining 5 bytes are for date and offset _value._dateTimeOffsetInfo.dateTime2Info.date = GetDateFromByteArray(bytes, length - 5); // 3 bytes for date _value._dateTimeOffsetInfo.offset = (Int16)(bytes[length - 2] + (bytes[length - 1] << 8)); // 2 bytes for offset (Int16) _isNull = false; } private static void FillInTimeInfo(ref TimeInfo timeInfo, byte[] timeBytes, int length, byte scale) { Debug.Assert(3 <= length && length <= 5, "invalid data length for timeInfo: " + length); Debug.Assert(0 <= scale && scale <= 7, "invalid scale: " + scale); Int64 tickUnits = (Int64)timeBytes[0] + ((Int64)timeBytes[1] << 8) + ((Int64)timeBytes[2] << 16); if (length > 3) { tickUnits += ((Int64)timeBytes[3] << 24); } if (length > 4) { tickUnits += ((Int64)timeBytes[4] << 32); } timeInfo.ticks = tickUnits * TdsEnums.TICKS_FROM_SCALE[scale]; timeInfo.scale = scale; } private static Int32 GetDateFromByteArray(byte[] buf, int offset) { return buf[offset] + (buf[offset + 1] << 8) + (buf[offset + 2] << 16); } private void ThrowIfNull() { if (IsNull) { throw new SqlNullValueException(); } } } }// namespace
/* * REST API Documentation for Schoolbus * * API Sample * * OpenAPI spec version: v1 * * */ using Microsoft.AspNetCore.Hosting; using Microsoft.AspNetCore.TestHost; using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net.Http; using System.Threading.Tasks; using Xunit; using SchoolBusAPI; using System.Text; using Newtonsoft.Json; using SchoolBusAPI.Models; using System.Net; using Microsoft.AspNetCore.WebUtilities; namespace SchoolBusAPI.Test { public class SchoolBusApiIntegrationTest { private readonly TestServer _server; private readonly HttpClient _client; /// <summary> /// Setup the test /// </summary> public SchoolBusApiIntegrationTest() { _server = new TestServer(new WebHostBuilder() .UseEnvironment("Development") .UseContentRoot(Directory.GetCurrentDirectory()) .UseStartup<Startup>()); _client = _server.CreateClient(); } [Fact] /// <summary> /// Integration test for AddBus /// </summary> public async void TestAddBus() { var response = await _client.GetAsync("/api/schoolbuses"); response.EnsureSuccessStatusCode(); // update this to test the API. Assert.True(true); } [Fact] /// <summary> /// Integration test for AddSchoolBusBulk /// </summary> public async void TestAddSchoolBusBulk() { var request = new HttpRequestMessage(HttpMethod.Post, "api/schoolbuses/bulk"); request.Content = new StringContent("[]", Encoding.UTF8, "application/json"); var response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); } [Fact] /// <summary> /// Integration test for FindBusById /// </summary> public async void TestSchoolBuses() { // create a service area. var request = new HttpRequestMessage(HttpMethod.Post, "/api/serviceareas"); ServiceArea servicearea = new ServiceArea(); string jsonString = servicearea.ToJson(); request.Content = new StringContent(jsonString, Encoding.UTF8, "application/json"); var response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // parse as JSON. jsonString = await response.Content.ReadAsStringAsync(); servicearea = JsonConvert.DeserializeObject<ServiceArea>(jsonString); var servicearea_id = servicearea.Id; // create a schoolbus owner. request = new HttpRequestMessage(HttpMethod.Post, "/api/schoolbusowners"); SchoolBusOwner schoolBusOwner = new SchoolBusOwner(); jsonString = schoolBusOwner.ToJson(); request.Content = new StringContent(jsonString, Encoding.UTF8, "application/json"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // parse as JSON. jsonString = await response.Content.ReadAsStringAsync(); schoolBusOwner = JsonConvert.DeserializeObject<SchoolBusOwner>(jsonString); var schoolBusOwner_id = schoolBusOwner.Id; // create a bus request = new HttpRequestMessage(HttpMethod.Post, "/api/schoolbuses"); // create a new schoolbus. SchoolBus schoolbus = new SchoolBus(); schoolbus.Status = "0"; schoolbus.ServiceArea = servicearea; schoolbus.SchoolBusOwner = schoolBusOwner; jsonString = schoolbus.ToJson(); request.Content = new StringContent(jsonString, Encoding.UTF8, "application/json"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // parse as JSON. jsonString = await response.Content.ReadAsStringAsync(); schoolbus = JsonConvert.DeserializeObject<SchoolBus>(jsonString); // get the id var id = schoolbus.Id; // make a change. string testStatus = "1"; schoolbus.Status = testStatus; // now do an update. request = new HttpRequestMessage(HttpMethod.Put, "/api/schoolbuses/" + id); request.Content = new StringContent(schoolbus.ToJson(), Encoding.UTF8, "application/json"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // do a get. request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbuses/" + id); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // parse as JSON. jsonString = await response.Content.ReadAsStringAsync(); schoolbus = JsonConvert.DeserializeObject<SchoolBus>(jsonString); // compare the change, should match. Assert.Equal(schoolbus.Status, testStatus); //test attachments request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbuses/" + id + "/attachments"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); //test attachments request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbuses/" + id + "/ccwdata"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); //test history request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbuses/" + id + "/history"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); //test notes request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbuses/" + id + "/notes"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); //test inspections request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbuses/" + id + "/inspections"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // do a delete. request = new HttpRequestMessage(HttpMethod.Post, "/api/schoolbuses/" + id + "/delete"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // should get a 404 if we try a get now. request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbuses/" + id); response = await _client.SendAsync(request); Assert.Equal(response.StatusCode, HttpStatusCode.NotFound); // cleanup service area. request = new HttpRequestMessage(HttpMethod.Post, "/api/serviceareas/" + servicearea_id + "/delete"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // should get a 404 if we try a get now. request = new HttpRequestMessage(HttpMethod.Get, "/api/serviceareas/" + servicearea_id); response = await _client.SendAsync(request); Assert.Equal(response.StatusCode, HttpStatusCode.NotFound); // cleanup schoolbus owner request = new HttpRequestMessage(HttpMethod.Post, "/api/schoolbusowners/" + schoolBusOwner_id + "/delete"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // should get a 404 if we try a get now. request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbusowners/" + schoolBusOwner_id); response = await _client.SendAsync(request); Assert.Equal(response.StatusCode, HttpStatusCode.NotFound); } // automates the search private async Task<SchoolBus[]> SearchHelper(Dictionary<string, string> parametersToAdd) { var targetUrl = "/api/schoolbuses/search"; var newUri = QueryHelpers.AddQueryString(targetUrl, parametersToAdd); var request = new HttpRequestMessage(HttpMethod.Get, newUri); var response = await _client.SendAsync(request); // parse as JSON. var jsonString = await response.Content.ReadAsStringAsync(); // should be an array of schoolbus records. SchoolBus[] searchresults = JsonConvert.DeserializeObject<SchoolBus[]>(jsonString); return searchresults; } [Fact] /// <summary> /// Integration test for GetAllBuses /// </summary> public async void TestBusSearch() { //setup test // create a service area. var request = new HttpRequestMessage(HttpMethod.Post, "/api/serviceareas"); ServiceArea servicearea = new ServiceArea(); string jsonString = servicearea.ToJson(); request.Content = new StringContent(jsonString, Encoding.UTF8, "application/json"); var response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // parse as JSON. jsonString = await response.Content.ReadAsStringAsync(); servicearea = JsonConvert.DeserializeObject<ServiceArea>(jsonString); var servicearea_id = servicearea.Id; // create a schoolbus owner. request = new HttpRequestMessage(HttpMethod.Post, "/api/schoolbusowners"); SchoolBusOwner schoolBusOwner = new SchoolBusOwner(); jsonString = schoolBusOwner.ToJson(); request.Content = new StringContent(jsonString, Encoding.UTF8, "application/json"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // parse as JSON. jsonString = await response.Content.ReadAsStringAsync(); schoolBusOwner = JsonConvert.DeserializeObject<SchoolBusOwner>(jsonString); var schoolBusOwner_id = schoolBusOwner.Id; // create a bus request = new HttpRequestMessage(HttpMethod.Post, "/api/schoolbuses"); // create a new schoolbus. SchoolBus schoolbus = new SchoolBus(); schoolbus.Status = "Active"; schoolbus.ServiceArea = servicearea; schoolbus.SchoolBusOwner = schoolBusOwner; schoolbus.VIN = "1234"; schoolbus.Plate = "12345"; jsonString = schoolbus.ToJson(); request.Content = new StringContent(jsonString, Encoding.UTF8, "application/json"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // parse as JSON. jsonString = await response.Content.ReadAsStringAsync(); schoolbus = JsonConvert.DeserializeObject<SchoolBus>(jsonString); // get the id var id = schoolbus.Id; // make a change. string testStatus = "1"; schoolbus.Status = "Active"; // now do an update. request = new HttpRequestMessage(HttpMethod.Put, "/api/schoolbuses/" + id); request.Content = new StringContent(schoolbus.ToJson(), Encoding.UTF8, "application/json"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // do a get. request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbuses/" + id); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // parse as JSON. jsonString = await response.Content.ReadAsStringAsync(); schoolbus = JsonConvert.DeserializeObject<SchoolBus>(jsonString); // test the search var parametersToAdd = new System.Collections.Generic.Dictionary<string, string> { { "servicearea", "["+servicearea_id +"]" } }; SchoolBus[] searchresults = await SearchHelper(parametersToAdd); Assert.NotNull(searchresults); Assert.NotEqual(searchresults.Length, 0); bool found = false; foreach (SchoolBus item in searchresults) { if (item.Id == id) { found = true; } } Assert.Equal(found, true); parametersToAdd = new System.Collections.Generic.Dictionary<string, string> { { "owner", "" + schoolBusOwner_id } }; searchresults = await SearchHelper(parametersToAdd); Assert.NotNull(searchresults); Assert.NotEqual(searchresults.Length, 0); found = false; foreach (SchoolBus item in searchresults) { if (item.Id == id) { found = true; } } Assert.Equal(found, true); parametersToAdd = new System.Collections.Generic.Dictionary<string, string> { { "vin", "1234" } }; searchresults = await SearchHelper(parametersToAdd); Assert.NotNull(searchresults); Assert.NotEqual(searchresults.Length, 0); found = false; foreach (SchoolBus item in searchresults) { if (item.Id == id) { found = true; } } Assert.Equal(found, true); parametersToAdd = new System.Collections.Generic.Dictionary<string, string> { { "plate", "12345" } }; searchresults = await SearchHelper(parametersToAdd); Assert.NotNull(searchresults); Assert.NotEqual(searchresults.Length, 0); found = false; foreach (SchoolBus item in searchresults) { if (item.Id == id) { found = true; } } Assert.Equal(found, true); // teardown request = new HttpRequestMessage(HttpMethod.Post, "/api/schoolbuses/" + id + "/delete"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // should get a 404 if we try a get now. request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbuses/" + id); response = await _client.SendAsync(request); Assert.Equal(response.StatusCode, HttpStatusCode.NotFound); // cleanup service area. request = new HttpRequestMessage(HttpMethod.Post, "/api/serviceareas/" + servicearea_id + "/delete"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // should get a 404 if we try a get now. request = new HttpRequestMessage(HttpMethod.Get, "/api/serviceareas/" + servicearea_id); response = await _client.SendAsync(request); Assert.Equal(response.StatusCode, HttpStatusCode.NotFound); // cleanup schoolbus owner request = new HttpRequestMessage(HttpMethod.Post, "/api/schoolbusowners/" + schoolBusOwner_id + "/delete"); response = await _client.SendAsync(request); response.EnsureSuccessStatusCode(); // should get a 404 if we try a get now. request = new HttpRequestMessage(HttpMethod.Get, "/api/schoolbusowners/" + schoolBusOwner_id); response = await _client.SendAsync(request); Assert.Equal(response.StatusCode, HttpStatusCode.NotFound); } } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; namespace DalSic { /// <summary> /// Controller class for LAB_ResultadoDetalle /// </summary> [System.ComponentModel.DataObject] public partial class LabResultadoDetalleController { // Preload our schema.. LabResultadoDetalle thisSchemaLoad = new LabResultadoDetalle(); private string userName = String.Empty; protected string UserName { get { if (userName.Length == 0) { if (System.Web.HttpContext.Current != null) { userName=System.Web.HttpContext.Current.User.Identity.Name; } else { userName=System.Threading.Thread.CurrentPrincipal.Identity.Name; } } return userName; } } [DataObjectMethod(DataObjectMethodType.Select, true)] public LabResultadoDetalleCollection FetchAll() { LabResultadoDetalleCollection coll = new LabResultadoDetalleCollection(); Query qry = new Query(LabResultadoDetalle.Schema); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; } [DataObjectMethod(DataObjectMethodType.Select, false)] public LabResultadoDetalleCollection FetchByID(object IdProtocolo) { LabResultadoDetalleCollection coll = new LabResultadoDetalleCollection().Where("idProtocolo", IdProtocolo).Load(); return coll; } [DataObjectMethod(DataObjectMethodType.Select, false)] public LabResultadoDetalleCollection FetchByQuery(Query qry) { LabResultadoDetalleCollection coll = new LabResultadoDetalleCollection(); coll.LoadAndCloseReader(qry.ExecuteReader()); return coll; } [DataObjectMethod(DataObjectMethodType.Delete, true)] public bool Delete(object IdProtocolo) { return (LabResultadoDetalle.Delete(IdProtocolo) == 1); } [DataObjectMethod(DataObjectMethodType.Delete, false)] public bool Destroy(object IdProtocolo) { return (LabResultadoDetalle.Destroy(IdProtocolo) == 1); } [DataObjectMethod(DataObjectMethodType.Delete, true)] public bool Delete(int IdProtocolo,int IdEfector,int IdDetalleProtocolo) { Query qry = new Query(LabResultadoDetalle.Schema); qry.QueryType = QueryType.Delete; qry.AddWhere("IdProtocolo", IdProtocolo).AND("IdEfector", IdEfector).AND("IdDetalleProtocolo", IdDetalleProtocolo); qry.Execute(); return (true); } /// <summary> /// Inserts a record, can be used with the Object Data Source /// </summary> [DataObjectMethod(DataObjectMethodType.Insert, true)] public void Insert(int IdProtocolo,int IdEfector,int IdDetalleProtocolo,string Codigo,int OrdenArea,int Orden,string Area,string Grupo,string Item,string Observaciones,string EsTitulo,string Derivado,string Unidad,bool? Hiv,string Metodo,string ValorReferencia,int Orden1,string Muestra,int Conresultado,string Resultado,string Codigo2,string CodigoNomenclador,string ProfesionalVal) { LabResultadoDetalle item = new LabResultadoDetalle(); item.IdProtocolo = IdProtocolo; item.IdEfector = IdEfector; item.IdDetalleProtocolo = IdDetalleProtocolo; item.Codigo = Codigo; item.OrdenArea = OrdenArea; item.Orden = Orden; item.Area = Area; item.Grupo = Grupo; item.Item = Item; item.Observaciones = Observaciones; item.EsTitulo = EsTitulo; item.Derivado = Derivado; item.Unidad = Unidad; item.Hiv = Hiv; item.Metodo = Metodo; item.ValorReferencia = ValorReferencia; item.Orden1 = Orden1; item.Muestra = Muestra; item.Conresultado = Conresultado; item.Resultado = Resultado; item.Codigo2 = Codigo2; item.CodigoNomenclador = CodigoNomenclador; item.ProfesionalVal = ProfesionalVal; item.Save(UserName); } /// <summary> /// Updates a record, can be used with the Object Data Source /// </summary> [DataObjectMethod(DataObjectMethodType.Update, true)] public void Update(int IdProtocolo,int IdEfector,int IdDetalleProtocolo,string Codigo,int OrdenArea,int Orden,string Area,string Grupo,string Item,string Observaciones,string EsTitulo,string Derivado,string Unidad,bool? Hiv,string Metodo,string ValorReferencia,int Orden1,string Muestra,int Conresultado,string Resultado,string Codigo2,string CodigoNomenclador,string ProfesionalVal) { LabResultadoDetalle item = new LabResultadoDetalle(); item.MarkOld(); item.IsLoaded = true; item.IdProtocolo = IdProtocolo; item.IdEfector = IdEfector; item.IdDetalleProtocolo = IdDetalleProtocolo; item.Codigo = Codigo; item.OrdenArea = OrdenArea; item.Orden = Orden; item.Area = Area; item.Grupo = Grupo; item.Item = Item; item.Observaciones = Observaciones; item.EsTitulo = EsTitulo; item.Derivado = Derivado; item.Unidad = Unidad; item.Hiv = Hiv; item.Metodo = Metodo; item.ValorReferencia = ValorReferencia; item.Orden1 = Orden1; item.Muestra = Muestra; item.Conresultado = Conresultado; item.Resultado = Resultado; item.Codigo2 = Codigo2; item.CodigoNomenclador = CodigoNomenclador; item.ProfesionalVal = ProfesionalVal; item.Save(UserName); } } }
using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Orleans; using Orleans.Runtime; using Orleans.TestingHost; using TestExtensions; using UnitTests.GrainInterfaces; using UnitTests.TestHelper; using Xunit; using Xunit.Abstractions; using System.Linq; using Orleans.Runtime.Configuration; using Orleans.Hosting; using Orleans.Configuration; using Microsoft.Extensions.Configuration; namespace UnitTests.ActivationsLifeCycleTests { [TestCategory("ActivationCollector")] public class DeactivateOnIdleTests : OrleansTestingBase, IDisposable { private readonly ITestOutputHelper output; private TestCluster testCluster; public DeactivateOnIdleTests(ITestOutputHelper output) { this.output = output; } private void Initialize(TestClusterBuilder builder = null) { if (builder == null) { builder = new TestClusterBuilder(1); } testCluster = builder.Build(); testCluster.Deploy(); } public void Dispose() { testCluster?.StopAllSilos(); testCluster = null; } [Fact, TestCategory("Functional")] public async Task DeactivateOnIdleTestInside_Basic() { Initialize(); var a = this.testCluster.GrainFactory.GetGrain<ICollectionTestGrain>(1); var b = this.testCluster.GrainFactory.GetGrain<ICollectionTestGrain>(2); await a.SetOther(b); await a.GetOtherAge(); // prime a's routing cache await b.DeactivateSelf(); Thread.Sleep(5000); var age = a.GetOtherAge().WaitForResultWithThrow(TimeSpan.FromMilliseconds(2000)); Assert.True(age.TotalMilliseconds < 2000, "Should be newly activated grain"); } [Fact, TestCategory("SlowBVT"), TestCategory("Functional")] public async Task DeactivateOnIdleTest_Stress_1() { Initialize(); var a = this.testCluster.GrainFactory.GetGrain<ICollectionTestGrain>(1); await a.GetAge(); await a.DeactivateSelf(); for (int i = 0; i < 30; i++) { await a.GetAge(); } } [Fact, TestCategory("Functional")] public async Task DeactivateOnIdleTest_Stress_2_NonReentrant() { Initialize(); var a = this.testCluster.GrainFactory.GetGrain<ICollectionTestGrain>(1, "UnitTests.Grains.CollectionTestGrain"); await a.IncrCounter(); Task t1 = Task.Run(async () => { List<Task> tasks = new List<Task>(); for (int i = 0; i < 100; i++) { tasks.Add(a.IncrCounter()); } await Task.WhenAll(tasks); }); await Task.Delay(1); Task t2 = a.DeactivateSelf(); await Task.WhenAll(t1, t2); } [Fact, TestCategory("Functional")] public async Task DeactivateOnIdleTest_Stress_3_Reentrant() { Initialize(); var a = this.testCluster.GrainFactory.GetGrain<ICollectionTestGrain>(1, "UnitTests.Grains.ReentrantCollectionTestGrain"); await a.IncrCounter(); Task t1 = Task.Run(async () => { List<Task> tasks = new List<Task>(); for (int i = 0; i < 100; i++) { tasks.Add(a.IncrCounter()); } await Task.WhenAll(tasks); }); await Task.Delay(TimeSpan.FromMilliseconds(1)); Task t2 = a.DeactivateSelf(); await Task.WhenAll(t1, t2); } [Fact, TestCategory("Functional")] public async Task DeactivateOnIdleTest_Stress_4_Timer() { Initialize(); var a = this.testCluster.GrainFactory.GetGrain<ICollectionTestGrain>(1, "UnitTests.Grains.ReentrantCollectionTestGrain"); for (int i = 0; i < 10; i++) { await a.StartTimer(TimeSpan.FromMilliseconds(5), TimeSpan.FromMilliseconds(100)); } await a.DeactivateSelf(); await a.IncrCounter(); } [Fact, TestCategory("Functional")] public async Task DeactivateOnIdleTest_Stress_5() { Initialize(); var a = this.testCluster.GrainFactory.GetGrain<ICollectionTestGrain>(1); await a.IncrCounter(); Task t1 = Task.Run(async () => { List<Task> tasks = new List<Task>(); for (int i = 0; i < 100; i++) { tasks.Add(a.IncrCounter()); } await Task.WhenAll(tasks); }); Task t2 = Task.Run(async () => { List<Task> tasks = new List<Task>(); for (int i = 0; i < 1; i++) { await Task.Delay(1); tasks.Add(a.DeactivateSelf()); } await Task.WhenAll(tasks); }); await Task.WhenAll(t1, t2); } [Fact, TestCategory("Stress")] public async Task DeactivateOnIdleTest_Stress_11() { Initialize(); var a = this.testCluster.GrainFactory.GetGrain<ICollectionTestGrain>(1); List<Task> tasks = new List<Task>(); for (int i = 0; i < 100; i++) { tasks.Add(a.IncrCounter()); } await Task.WhenAll(tasks); } [Fact, TestCategory("Functional")] public async Task DeactivateOnIdle_NonExistentActivation_1() { await DeactivateOnIdle_NonExistentActivation_Runner(0); } [Fact, TestCategory("Functional")] public async Task DeactivateOnIdle_NonExistentActivation_2() { await DeactivateOnIdle_NonExistentActivation_Runner(1); } public class ClientConfigurator : IClientBuilderConfigurator { public void Configure(IConfiguration configuration, IClientBuilder clientBuilder) { clientBuilder.Configure<StaticGatewayListProviderOptions>(options => { options.Gateways = options.Gateways.Take(1).ToList(); }); } } public class SiloConfigurator : ISiloBuilderConfigurator { public void Configure(ISiloHostBuilder hostBuilder) { var cfg = hostBuilder.GetConfiguration(); var maxForwardCount = int.Parse(cfg["MaxForwardCount"]); hostBuilder.Configure<SiloMessagingOptions>(options => options.MaxForwardCount = maxForwardCount); } } private async Task DeactivateOnIdle_NonExistentActivation_Runner(int forwardCount) { var builder = new TestClusterBuilder(2); builder.AddClientBuilderConfigurator<ClientConfigurator>(); builder.AddSiloBuilderConfigurator<SiloConfigurator>(); builder.Properties["MaxForwardCount"] = forwardCount.ToString(); Initialize(builder); ICollectionTestGrain grain = await PickGrainInNonPrimary(); output.WriteLine("About to make a 1st GetAge() call."); TimeSpan age = await grain.GetAge(); output.WriteLine(age.ToString()); await grain.DeactivateSelf(); await Task.Delay(3000); // ReSharper disable once PossibleNullReferenceException var thrownException = await Record.ExceptionAsync(() => grain.GetAge()); if (forwardCount != 0) { Assert.Null(thrownException); output.WriteLine("\nThe 1st call after DeactivateSelf has NOT thrown any exception as expected, since forwardCount is {0}.\n", forwardCount); } else { Assert.NotNull(thrownException); Assert.IsType<OrleansMessageRejectionException>(thrownException); Assert.Contains("Non-existent activation", thrownException.Message); output.WriteLine("\nThe 1st call after DeactivateSelf has thrown Non-existent activation exception as expected, since forwardCount is {0}.\n", forwardCount); // Try sending agan now and see it was fixed. await grain.GetAge(); } } private async Task<ICollectionTestGrain> PickGrainInNonPrimary() { for (int i = 0; i < 500; i++) { if (i % 30 == 29) await Task.Delay(1000); // give some extra time to stabilize if it can't find a suitable grain // Create grain such that: // Its directory owner is not the Gateway silo. This way Gateway will use its directory cache. // Its activation is located on the non Gateway silo as well. ICollectionTestGrain grain = this.testCluster.GrainFactory.GetGrain<ICollectionTestGrain>(i); GrainId grainId = ((GrainReference)await grain.GetGrainReference()).GrainId; SiloAddress primaryForGrain = (await TestUtils.GetDetailedGrainReport(this.testCluster.InternalGrainFactory, grainId, this.testCluster.Primary)).PrimaryForGrain; if (primaryForGrain.Equals(this.testCluster.Primary.SiloAddress)) { continue; } string siloHostingActivation = await grain.GetRuntimeInstanceId(); if (this.testCluster.Primary.SiloAddress.ToLongString().Equals(siloHostingActivation)) { continue; } this.output.WriteLine("\nCreated grain with key {0} whose primary directory owner is silo {1} and which was activated on silo {2}\n", i, primaryForGrain.ToLongString(), siloHostingActivation); return grain; } Assert.True(testCluster.GetActiveSilos().Count() > 1, "This logic requires at least 1 non-primary active silo"); Assert.True(false, "Could not find a grain that activates on a non-primary silo, and has the partition be also managed by a non-primary silo"); return null; } [Fact, TestCategory("Functional")] public async Task MissingActivation_WithoutDirectoryLazyDeregistration_MultiSilo() { var directoryLazyDeregistrationDelay = TimeSpan.FromMilliseconds(-1); var builder = new TestClusterBuilder(2); builder.AddSiloBuilderConfigurator<NoForwardingSiloConfigurator>(); Initialize(builder); for (int i = 0; i < 10; i++) { await MissingActivation_Runner(i, directoryLazyDeregistrationDelay); } } public class NoForwardingSiloConfigurator : ISiloBuilderConfigurator { public void Configure(ISiloHostBuilder hostBuilder) { // Disable retries in this case, to make test more predictable. hostBuilder.Configure<SiloMessagingOptions>(options => options.MaxForwardCount = 0); } } [Fact, TestCategory("Functional")] public async Task MissingActivation_WithDirectoryLazyDeregistration_SingleSilo() { var lazyDeregistrationDelay = TimeSpan.FromMilliseconds(5000); var builder = new TestClusterBuilder(1); builder.AddSiloBuilderConfigurator<NoForwardingSiloConfigurator>(); builder.AddSiloBuilderConfigurator<LazyDeregistrationDelaySiloConfigurator>(); Initialize(builder); for (int i = 0; i < 10; i++) { await MissingActivation_Runner(i, lazyDeregistrationDelay); } } public class LazyDeregistrationDelaySiloConfigurator : ISiloBuilderConfigurator { public void Configure(ISiloHostBuilder hostBuilder) { hostBuilder.Configure<GrainDirectoryOptions>(options => options.LazyDeregistrationDelay = TimeSpan.FromMilliseconds(5000)); } } [Fact(Skip = "Needs investigation"), TestCategory("Functional")] public async Task MissingActivation_WithoutDirectoryLazyDeregistration_MultiSilo_SecondaryFirst() { var lazyDeregistrationDelay = TimeSpan.FromMilliseconds(-1); var builder = new TestClusterBuilder(2); builder.Properties["MaxForwardCount"] = "0"; builder.AddClientBuilderConfigurator<ClientConfigurator>(); builder.AddSiloBuilderConfigurator<SiloConfigurator>(); Initialize(builder); await MissingActivation_Runner(1, lazyDeregistrationDelay, true); } private async Task MissingActivation_Runner( int grainId, TimeSpan lazyDeregistrationDelay, bool forceCreationInSecondary = false) { output.WriteLine("\n\n\n SMissingActivation_Runner.\n\n\n"); var realGrainId = grainId; ITestGrain grain; var isMultipleSilosPresent = testCluster.SecondarySilos != null && testCluster.SecondarySilos.Count > 0; if (!isMultipleSilosPresent && forceCreationInSecondary) { throw new InvalidOperationException( "If 'forceCreationInSecondary' is true multiple silos must be present, check the test!"); } var grainSiloAddress = String.Empty; var primarySiloAddress = testCluster.Primary.SiloAddress.ToString(); var secondarySiloAddress = isMultipleSilosPresent ? testCluster.SecondarySilos[0].SiloAddress.ToString() : String.Empty; // // We only doing this for multiple silos. // if (isMultipleSilosPresent && forceCreationInSecondary) { // // Make sure that we proceeding with a grain which is created in the secondary silo for first! // while (true) { this.output.WriteLine($"GetGrain: {realGrainId}"); grain = this.testCluster.GrainFactory.GetGrain<ITestGrain>(realGrainId); grainSiloAddress = await grain.GetRuntimeInstanceId(); if (grainSiloAddress != secondarySiloAddress) { this.output.WriteLine($"GetGrain: {realGrainId} Primary, skipping."); realGrainId++; } else { this.output.WriteLine($"GetGrain: {realGrainId} Secondary, proceeding."); break; } } } else { grain = this.testCluster.GrainFactory.GetGrain<ITestGrain>(realGrainId); } await grain.SetLabel("hello_" + grainId); var grainReference = ((GrainReference)await grain.GetGrainReference()).GrainId; // Call again to make sure the grain is in all silo caches for (int i = 0; i < 10; i++) { var label = await grain.GetLabel(); } // Now we know that there's an activation; try both silos and deactivate it incorrectly int primaryActivation = await this.testCluster.Client.GetTestHooks(testCluster.Primary) .UnregisterGrainForTesting(grainReference); int secondaryActivation = 0; if (isMultipleSilosPresent) { secondaryActivation = await this.testCluster.Client.GetTestHooks(testCluster.SecondarySilos[0]) .UnregisterGrainForTesting(grainReference); } Assert.Equal(1, primaryActivation + secondaryActivation); // If we try again, we shouldn't find any primaryActivation = await this.testCluster.Client.GetTestHooks(testCluster.Primary) .UnregisterGrainForTesting(grainReference); secondaryActivation = 0; if (isMultipleSilosPresent) { secondaryActivation = await this.testCluster.Client.GetTestHooks(testCluster.SecondarySilos[0]) .UnregisterGrainForTesting(grainReference); } Assert.Equal(0, primaryActivation + secondaryActivation); if (lazyDeregistrationDelay > TimeSpan.Zero) { // Wait a bit TimeSpan pause = lazyDeregistrationDelay.Multiply(2); output.WriteLine($"Pausing for {0} because we are using lazy deregistration", pause); await Task.Delay(pause); } // Now send a message again; it should fail); var firstEx = await Assert.ThrowsAsync<OrleansMessageRejectionException>(() => grain.GetLabel()); Assert.Contains("Non-existent activation", firstEx.Message); output.WriteLine("Got 1st Non-existent activation Exception, as expected."); // Try again; it should succeed or fail, based on doLazyDeregistration if (lazyDeregistrationDelay > TimeSpan.Zero || forceCreationInSecondary) { var newLabel = ""; newLabel = await grain.GetLabel(); // Since a new instance returned, we've to check that the label is no more prefixed with "hello_" Assert.Equal(grainId.ToString(), newLabel); output.WriteLine($"After 2nd call. newLabel = '{newLabel}'"); if (forceCreationInSecondary) { grainSiloAddress = await grain.GetRuntimeInstanceId(); output.WriteLine( grainSiloAddress == primarySiloAddress ? "Recreated in Primary" : "Recreated in Secondary"); output.WriteLine( grainSiloAddress == primarySiloAddress ? "Recreated in Primary" : "Recreated in Secondary"); } } else { var secondEx = await Assert.ThrowsAsync<OrleansMessageRejectionException>(() => grain.GetLabel()); output.WriteLine("Got 2nd exception - " + secondEx.GetBaseException().Message); Assert.True( secondEx.Message.Contains("duplicate activation") || secondEx.Message.Contains("Non-existent activation") || secondEx.Message.Contains("Forwarding failed"), "2nd exception message: " + secondEx); output.WriteLine("Got 2nd exception, as expected."); } } } }
using Orleans; using Orleans.Runtime; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using Microsoft.Extensions.Logging; namespace OrleansTelemetryConsumers.Counters { /// <summary> /// Telemetry consumer that writes metrics to predefined performance counters. /// </summary> public class OrleansPerfCounterTelemetryConsumer : IMetricTelemetryConsumer { internal const string CATEGORY_NAME = "OrleansRuntime"; internal const string CATEGORY_DESCRIPTION = "Orleans Runtime Counters"; private const string ExplainHowToCreateOrleansPerfCounters = "Run 'InstallUtil.exe OrleansTelemetryConsumers.Counters.dll' as Administrator to create perf counters for Orleans."; private readonly ILogger logger; private readonly List<PerfCounterConfigData> perfCounterData = new List<PerfCounterConfigData>(); private bool isInstalling = false; private readonly object initializationLock = new object(); private readonly Lazy<bool> isInitialized; private bool initializedGrainCounters; /// <summary> /// Default constructor /// </summary> public OrleansPerfCounterTelemetryConsumer(ILoggerFactory loggerFactory) { this.logger = loggerFactory.CreateLogger<OrleansPerfCounterTelemetryConsumer>(); this.isInitialized = new Lazy<bool>(this.Initialize, true); if (!AreWindowsPerfCountersAvailable(this.logger)) { this.logger.Warn(ErrorCode.PerfCounterNotFound, "Windows perf counters not found -- defaulting to in-memory counters. " + ExplainHowToCreateOrleansPerfCounters); } } #region Counter Management methods /// <summary> /// Checks to see if windows perf counters as supported by OS. /// </summary> /// <returns></returns> public static bool AreWindowsPerfCountersAvailable(ILogger logger) { try { if (Environment.OSVersion.ToString().StartsWith("unix", StringComparison.InvariantCultureIgnoreCase)) { logger.Warn(ErrorCode.PerfCounterNotFound, "Windows perf counters are only available on Windows :) -- defaulting to in-memory counters."); return false; } return PerformanceCounterCategory.Exists(CATEGORY_NAME); } catch (Exception exc) { logger.Warn(ErrorCode.PerfCounterCategoryCheckError, string.Format("Ignoring error checking for {0} perf counter category", CATEGORY_NAME), exc); } return false; } private PerformanceCounter CreatePerfCounter(string perfCounterName) { this.logger.Debug(ErrorCode.PerfCounterRegistering, "Creating perf counter {0}", perfCounterName); return new PerformanceCounter(CATEGORY_NAME, perfCounterName, false); } private static string GetPerfCounterName(PerfCounterConfigData cd) { return cd.Name.Name + "." + (cd.UseDeltaValue ? "Delta" : "Current"); } /// <summary> /// Register Orleans perf counters with Windows /// </summary> /// <remarks>Note: Program needs to be running as Administrator to be able to delete Windows perf counters.</remarks> internal void InstallCounters() { if (PerformanceCounterCategory.Exists(CATEGORY_NAME)) DeleteCounters(); this.isInstalling = true; if (!this.isInitialized.Value) { var msg = "Unable to install Windows Performance counters"; this.logger.Warn(ErrorCode.PerfCounterNotFound, msg); throw new InvalidOperationException(msg); } var collection = new CounterCreationDataCollection(); foreach (PerfCounterConfigData cd in this.perfCounterData) { var perfCounterName = GetPerfCounterName(cd); var description = cd.Name.Name; var msg = string.Format("Registering perf counter {0}", perfCounterName); Console.WriteLine(msg); collection.Add(new CounterCreationData(perfCounterName, description, PerformanceCounterType.NumberOfItems32)); } PerformanceCounterCategory.Create( CATEGORY_NAME, CATEGORY_DESCRIPTION, PerformanceCounterCategoryType.SingleInstance, collection); } /// <summary> /// Delete any existing perf counters registered with Windows /// </summary> /// <remarks>Note: Program needs to be running as Administrator to be able to delete Windows perf counters.</remarks> internal void DeleteCounters() { PerformanceCounterCategory.Delete(CATEGORY_NAME); } private PerfCounterConfigData GetCounter(string counterName) { return this.perfCounterData.Where(pcd => GetPerfCounterName(pcd) == counterName).SingleOrDefault(); } #endregion #region IMetricTelemetryConsumer Methods /// <summary> /// Increment metric. /// </summary> /// <param name="name">metric name</param> public void IncrementMetric(string name) => WriteMetric(name, UpdateMode.Increment); /// <summary> /// Increment metric by value. /// </summary> /// <param name="name">metric name</param> /// <param name="value">metric value</param> public void IncrementMetric(string name, double value) => WriteMetric(name, UpdateMode.Increment, value); /// <summary> /// Track metric value /// </summary> /// <param name="name">metric name</param> /// <param name="value">metric value</param> /// <param name="properties">related properties</param> public void TrackMetric(string name, double value, IDictionary<string, string> properties = null) => WriteMetric(name, UpdateMode.Set, value); /// <summary> /// Track metric value /// </summary> /// <param name="name">metric name</param> /// <param name="value">metric value</param> /// <param name="properties">related properties</param> public void TrackMetric(string name, TimeSpan value, IDictionary<string, string> properties = null) => WriteMetric(name, UpdateMode.Set, value.Ticks); /// <summary> /// Decrement metric /// </summary> /// <param name="name">metric name</param> public void DecrementMetric(string name) => WriteMetric(name, UpdateMode.Decrement); /// <summary> /// Decrement metric by value /// </summary> /// <param name="name">metric name</param> /// <param name="value">metric value</param> public void DecrementMetric(string name, double value) => WriteMetric(name, UpdateMode.Decrement, value); /// <summary> /// Write all pending metrics /// </summary> public void Flush() { } /// <summary> /// Close telemetry consumer /// </summary> public void Close() { } private bool Initialize() { try { // (1) Start with list of static counters var newPerfCounterData = new List<PerfCounterConfigData>(PerfCounterConfigData.StaticPerfCounters); // TODO: get rid of this static access. Telemetry consumers now allow being injected with dependencies, so extract it as such var grainTypes = CrashUtils.GrainTypes; if (grainTypes != null) { // (2) Then search for grain DLLs and pre-create activation counters for any grain types found foreach (var grainType in grainTypes) { var counterName = new StatisticName(StatisticNames.GRAIN_COUNTS_PER_GRAIN, grainType); newPerfCounterData.Add(new PerfCounterConfigData { Name = counterName, UseDeltaValue = false }); } this.initializedGrainCounters = true; } if (!this.isInstalling) { foreach (var cd in newPerfCounterData) { var perfCounterName = GetPerfCounterName(cd); cd.PerfCounter = CreatePerfCounter(perfCounterName); } } lock (this.initializationLock) { this.perfCounterData.Clear(); this.perfCounterData.AddRange(newPerfCounterData); } return true; } catch { return false; } } private void WriteMetric(string name, UpdateMode mode = UpdateMode.Increment, double? value = null) { if (!this.isInitialized.Value) return; // Attempt to initialize grain-specific counters if they haven't been initialized yet. if (!this.initializedGrainCounters) { this.Initialize(); } PerfCounterConfigData cd = GetCounter(name); if (cd == null || cd.PerfCounter == null) return; StatisticName statsName = cd.Name; string perfCounterName = GetPerfCounterName(cd); try { if (this.logger.IsEnabled(LogLevel.Trace)) this.logger.Trace(ErrorCode.PerfCounterWriting, "Writing perf counter {0}", perfCounterName); switch (mode) { case UpdateMode.Increment: if (value.HasValue) { cd.PerfCounter.IncrementBy((long)value.Value); } else { cd.PerfCounter.Increment(); } break; case UpdateMode.Decrement: if (value.HasValue) { cd.PerfCounter.RawValue = cd.PerfCounter.RawValue - (long)value.Value; } else { cd.PerfCounter.Decrement(); } break; case UpdateMode.Set: cd.PerfCounter.RawValue = (long)value.Value; break; } } catch (Exception ex) { this.logger.Error(ErrorCode.PerfCounterUnableToWrite, string.Format("Unable to write to Windows perf counter '{0}'", statsName), ex); } } private enum UpdateMode { Increment = 0, Decrement, Set } #endregion } }
/** * @copyright * * Copyright 2013-2015 Splunk, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"): you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Net.Http; using System.Threading.Tasks; namespace Splunk.Logging { /// <summary> /// Trace listener implementation for Splunk HTTP event collector. /// Usage example: /// <code> /// var trace = new TraceSource("logger"); /// trace.listeners.Add(new HttpEventCollectorTraceListener( /// uri: new Uri("https://localhost:8088"), /// token: "E6099437-3E1F-4793-90AB-0E5D9438A918")); /// trace.TraceEvent(TraceEventType.Information, 1, "hello world"); /// </code> /// /// Trace listener supports events batching (off by default) that allows to /// decrease number of HTTP requests to Splunk server. The batching is /// controlled by three parameters: "batch size count", "batch size bytes" /// and "batch interval". If batch size parameters are specified then /// Send(...) multiple events are sending simultaneously batch exceeds its limits. /// Batch interval controls a timer that forcefully sends events batch /// regardless of its size. /// <code> /// var trace = new TraceSource("logger"); /// trace.listeners.Add(new HttpEventCollectorTraceListener( /// uri: new Uri("https://localhost:8088"), /// token: "E6099437-3E1F-4793-90AB-0E5D9438A918", /// batchInterval: 1000, // send events at least every second /// batchSizeBytes: 1024, // 1KB /// batchSizeCount: 10) // events batch contains at most 10 individual events /// ); /// trace.TraceEvent(TraceEventType.Information, 1, "hello batching"); /// </code> /// /// To improve system performance tracing events are sent asynchronously and /// events with the same timestamp (that has 1 millisecond resolution) may /// be indexed out of order by Splunk. sendMode parameter triggers /// "sequential mode" that guarantees preserving events order. In /// "sequential mode" performance of sending events to the server is lower. /// /// There is an ability to plug middleware components that act before and /// after posting data. /// For example: /// <code> /// new HttpEventCollectorTraceListener( /// uri: new Uri("https://localhost:8088"), /// token: "E6099437-3E1F-4793-90AB-0E5D9438A918, /// middleware: (request, next) => { /// // preprocess request /// var response = next(request); // post data /// // process response /// return response; /// } /// ... /// ) /// </code> /// Middleware components can apply additional logic before and after posting /// the data to Splunk server. See HttpEventCollectorResendMiddleware. /// </remarks> /// /// A user application code can register an error handler that is invoked /// when HTTP event collector isn't able to send data. /// <code> /// var listener = new HttpEventCollectorTraceListener( /// uri: new Uri("https://localhost:8088"), /// token: "E6099437-3E1F-4793-90AB-0E5D9438A918") /// ); /// listener.AddLoggingFailureHandler((sender, HttpEventCollectorException e) => /// { /// // do something /// }); /// trace.listeners.Add(listener); /// </code> /// HttpEventCollectorException contains information about the error and the list of /// events caused the problem. /// </summary> public class HttpEventCollectorTraceListener : TraceListener { private HttpEventCollectorSender sender; public HttpEventCollectorSender.HttpEventCollectorFormatter formatter; /// <summary> /// HttpEventCollectorTraceListener c-or. /// </summary> /// <param name="uri">Splunk server uri, for example https://localhost:8088.</param> /// <param name="token">HTTP event collector authorization token.</param> /// <param name="metadata">Logger metadata.</param> /// <param name="sendMode">Send mode of the events.</param> /// <param name="batchInterval">Batch interval in milliseconds.</param> /// <param name="batchSizeBytes">Batch max size.</param> /// <param name="batchSizeCount">MNax number of individual events in batch.</param> /// <param name="middleware"> /// HTTP client middleware. This allows to plug an HttpClient handler that /// intercepts logging HTTP traffic. /// </param> public HttpEventCollectorTraceListener( Uri uri, string token, HttpEventCollectorEventInfo.Metadata metadata = null, HttpEventCollectorSender.SendMode sendMode = HttpEventCollectorSender.SendMode.Sequential, int batchInterval = HttpEventCollectorSender.DefaultBatchInterval, int batchSizeBytes = HttpEventCollectorSender.DefaultBatchSize, int batchSizeCount = HttpEventCollectorSender.DefaultBatchCount, HttpEventCollectorSender.HttpEventCollectorMiddleware middleware = null, HttpEventCollectorSender.HttpEventCollectorFormatter formatter = null) { this.formatter = formatter; sender = new HttpEventCollectorSender( uri, token, metadata, sendMode, batchInterval, batchSizeBytes, batchSizeCount, middleware, formatter); } /// <summary> /// HttpEventCollectorTraceListener c-or. Instantiates HttpEventCollectorTraceListener /// when retriesOnError parameter is specified. /// </summary> /// <param name="uri">Splunk server uri, for example https://localhost:8088.</param> /// <param name="token">HTTP event collector authorization token.</param> /// <param name="retriesOnError">Number of retries when network problem is detected</param> /// <param name="metadata">Logger metadata.</param> /// <param name="sendMode">Send mode of the events.</param> /// <param name="batchInterval">Batch interval in milliseconds.</param> /// <param name="batchSizeBytes">Batch max size.</param> /// <param name="batchSizeCount">MNax number of individual events in batch.</param> public HttpEventCollectorTraceListener( Uri uri, string token, int retriesOnError, HttpEventCollectorEventInfo.Metadata metadata = null, HttpEventCollectorSender.SendMode sendMode = HttpEventCollectorSender.SendMode.Sequential, int batchInterval = HttpEventCollectorSender.DefaultBatchInterval, int batchSizeBytes = HttpEventCollectorSender.DefaultBatchSize, int batchSizeCount = HttpEventCollectorSender.DefaultBatchCount) : this(uri, token, metadata, sendMode, batchInterval, batchSizeBytes, batchSizeCount, (new HttpEventCollectorResendMiddleware(retriesOnError)).Plugin) { } /// <summary> /// Add a handler to be invoked when some problem is detected during the /// operation of HTTP event collector and it cannot be fixed by resending the data. /// </summary> /// <param name="handler">A function to handle the exception.</param> public void AddLoggingFailureHandler(Action<HttpEventCollectorException> handler) { sender.OnError += handler; } #region TraceListener output callbacks public override void Write(string message) { sender.Send(message: message); } public override void WriteLine(string message) { sender.Send(message: message); } public override void TraceData( TraceEventCache eventCache, string source, TraceEventType eventType, int id, params object[] data) { sender.Send( id: id.ToString(), severity: eventType.ToString(), data: data ); } public override void TraceEvent( TraceEventCache eventCache, string source, TraceEventType eventType, int id) { sender.Send( id: id.ToString(), severity: eventType.ToString() ); } public override void TraceEvent( TraceEventCache eventCache, string source, TraceEventType eventType, int id, string message) { sender.Send( id: id.ToString(), severity: eventType.ToString(), message: message ); } public override void TraceEvent( TraceEventCache eventCache, string source, TraceEventType eventType, int id, string format, params object[] args) { string message = args != null ? string.Format(CultureInfo.InvariantCulture, format, args) : format; sender.Send( id: id.ToString(), severity: eventType.ToString(), message: message ); } public override void TraceTransfer( TraceEventCache eventCache, string source, int id, string message, Guid relatedActivityId) { sender.Send( id: id.ToString(), message: message, data: relatedActivityId ); } #endregion /// <summary> /// Flush all events. /// </summary> public Task FlushAsync() { return sender.FlushAsync(); } public override void Close() { sender.FlushSync(); } override protected void Dispose(bool disposing) { Close(); } ~HttpEventCollectorTraceListener() { Dispose(false); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void TestCInt16() { var test = new BooleanBinaryOpTest__TestCInt16(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class BooleanBinaryOpTest__TestCInt16 { private const int VectorSize = 16; private const int Op1ElementCount = VectorSize / sizeof(Int16); private const int Op2ElementCount = VectorSize / sizeof(Int16); private static Int16[] _data1 = new Int16[Op1ElementCount]; private static Int16[] _data2 = new Int16[Op2ElementCount]; private static Vector128<Int16> _clsVar1; private static Vector128<Int16> _clsVar2; private Vector128<Int16> _fld1; private Vector128<Int16> _fld2; private BooleanBinaryOpTest__DataTable<Int16, Int16> _dataTable; static BooleanBinaryOpTest__TestCInt16() { var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (short)(random.Next(short.MinValue, short.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _clsVar1), ref Unsafe.As<Int16, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (short)(random.Next(short.MinValue, short.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _clsVar2), ref Unsafe.As<Int16, byte>(ref _data2[0]), VectorSize); } public BooleanBinaryOpTest__TestCInt16() { Succeeded = true; var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (short)(random.Next(short.MinValue, short.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _fld1), ref Unsafe.As<Int16, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (short)(random.Next(short.MinValue, short.MaxValue)); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Int16>, byte>(ref _fld2), ref Unsafe.As<Int16, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (short)(random.Next(short.MinValue, short.MaxValue)); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (short)(random.Next(short.MinValue, short.MaxValue)); } _dataTable = new BooleanBinaryOpTest__DataTable<Int16, Int16>(_data1, _data2, VectorSize); } public bool IsSupported => Sse41.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Sse41.TestC( Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr) ); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result); } public void RunBasicScenario_Load() { var result = Sse41.TestC( Sse2.LoadVector128((Int16*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Int16*)(_dataTable.inArray2Ptr)) ); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result); } public void RunBasicScenario_LoadAligned() { var result = Sse41.TestC( Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray2Ptr)) ); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, result); } public void RunReflectionScenario_UnsafeRead() { var method = typeof(Sse41).GetMethod(nameof(Sse41.TestC), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) }); if (method != null) { var result = method.Invoke(null, new object[] { Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr) }); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result)); } } public void RunReflectionScenario_Load() { var method = typeof(Sse41).GetMethod(nameof(Sse41.TestC), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) }); if (method != null) { var result = method.Invoke(null, new object[] { Sse2.LoadVector128((Int16*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Int16*)(_dataTable.inArray2Ptr)) }); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result)); } } public void RunReflectionScenario_LoadAligned() { var method = typeof(Sse41).GetMethod(nameof(Sse41.TestC), new Type[] { typeof(Vector128<Int16>), typeof(Vector128<Int16>) }); if (method != null) { var result = method.Invoke(null, new object[] { Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray2Ptr)) }); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, (bool)(result)); } } public void RunClsVarScenario() { var result = Sse41.TestC( _clsVar1, _clsVar2 ); ValidateResult(_clsVar1, _clsVar2, result); } public void RunLclVarScenario_UnsafeRead() { var left = Unsafe.Read<Vector128<Int16>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector128<Int16>>(_dataTable.inArray2Ptr); var result = Sse41.TestC(left, right); ValidateResult(left, right, result); } public void RunLclVarScenario_Load() { var left = Sse2.LoadVector128((Int16*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadVector128((Int16*)(_dataTable.inArray2Ptr)); var result = Sse41.TestC(left, right); ValidateResult(left, right, result); } public void RunLclVarScenario_LoadAligned() { var left = Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadAlignedVector128((Int16*)(_dataTable.inArray2Ptr)); var result = Sse41.TestC(left, right); ValidateResult(left, right, result); } public void RunLclFldScenario() { var test = new BooleanBinaryOpTest__TestCInt16(); var result = Sse41.TestC(test._fld1, test._fld2); ValidateResult(test._fld1, test._fld2, result); } public void RunFldScenario() { var result = Sse41.TestC(_fld1, _fld2); ValidateResult(_fld1, _fld2, result); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector128<Int16> left, Vector128<Int16> right, bool result, [CallerMemberName] string method = "") { Int16[] inArray1 = new Int16[Op1ElementCount]; Int16[] inArray2 = new Int16[Op2ElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right); ValidateResult(inArray1, inArray2, result, method); } private void ValidateResult(void* left, void* right, bool result, [CallerMemberName] string method = "") { Int16[] inArray1 = new Int16[Op1ElementCount]; Int16[] inArray2 = new Int16[Op2ElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Int16, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize); ValidateResult(inArray1, inArray2, result, method); } private void ValidateResult(Int16[] left, Int16[] right, bool result, [CallerMemberName] string method = "") { var expectedResult = true; for (var i = 0; i < Op1ElementCount; i++) { expectedResult &= ((~left[i] & right[i]) == 0); } if (expectedResult != result) { Succeeded = false; Console.WriteLine($"{nameof(Sse41)}.{nameof(Sse41.TestC)}<Int16>(Vector128<Int16>, Vector128<Int16>): {method} failed:"); Console.WriteLine($" left: ({string.Join(", ", left)})"); Console.WriteLine($" right: ({string.Join(", ", right)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
#pragma warning disable 0168 using System; using System.Collections.Generic; using System.Linq; using nHydrate.Generator.Common.Util; using System.ComponentModel; using nHydrate.Generator.Common.GeneratorFramework; using DslModeling = global::Microsoft.VisualStudio.Modeling; namespace nHydrate.Dsl { partial class Field : nHydrate.Dsl.IContainerParent, nHydrate.Dsl.IField, IDirtyable { #region Constructors /// <summary> /// Constructor /// </summary> /// <param name="store">Store where new element is to be created.</param> /// <param name="propertyAssignments">List of domain property id/value pairs to set once the element is created.</param> public Field(DslModeling::Store store, params DslModeling::PropertyAssignment[] propertyAssignments) : this(store != null ? store.DefaultPartitionForClass(DomainClassId) : null, propertyAssignments) { } /// <summary> /// Constructor /// </summary> /// <param name="partition">Partition where new element is to be created.</param> /// <param name="propertyAssignments">List of domain property id/value pairs to set once the element is created.</param> public Field(DslModeling::Partition partition, params DslModeling::PropertyAssignment[] propertyAssignments) : base(partition, propertyAssignments) { } #endregion /// <summary> /// This is the image to use on the diagram. If null one will be calculated /// </summary> internal System.Drawing.Bitmap CachedImage { get; set; } #region Names public string PascalName { get { if (!string.IsNullOrEmpty(this.CodeFacade)) return StringHelper.DatabaseNameToPascalCase(this.CodeFacade); else return StringHelper.DatabaseNameToPascalCase(this.Name); } } public string DatabaseName => this.Name; #endregion #region Methods public virtual bool CanHaveDefault() { switch (this.DataType) { case DataTypeConstants.BigInt: case DataTypeConstants.Binary: case DataTypeConstants.Bit: case DataTypeConstants.Char: case DataTypeConstants.Date: case DataTypeConstants.DateTime: case DataTypeConstants.DateTime2: case DataTypeConstants.Decimal: case DataTypeConstants.Float: case DataTypeConstants.Image: case DataTypeConstants.Int: case DataTypeConstants.Money: case DataTypeConstants.NChar: case DataTypeConstants.NText: case DataTypeConstants.NVarChar: case DataTypeConstants.Real: case DataTypeConstants.SmallDateTime: case DataTypeConstants.SmallInt: case DataTypeConstants.SmallMoney: case DataTypeConstants.Text: case DataTypeConstants.Time: case DataTypeConstants.TinyInt: case DataTypeConstants.UniqueIdentifier: case DataTypeConstants.VarBinary: case DataTypeConstants.VarChar: return true; case DataTypeConstants.Structured: case DataTypeConstants.Timestamp: case DataTypeConstants.Udt: case DataTypeConstants.Variant: case DataTypeConstants.Xml: case DataTypeConstants.DateTimeOffset: return false; default: return false; } } /// <summary> /// Gets the C# code equivalent for this default value /// </summary> /// <returns></returns> public virtual string GetCodeDefault() { var defaultValue = string.Empty; var userValue = this.Default + string.Empty; if ((this.DataType == DataTypeConstants.DateTime) || (this.DataType == DataTypeConstants.SmallDateTime)) { if (StringHelper.Match(userValue, "getdate", true) || StringHelper.Match(userValue, "getdate()", true) || StringHelper.Match(userValue, "sysdatetime", true) || StringHelper.Match(userValue, "sysdatetime()", true)) { defaultValue = String.Format("DateTime.Now", this.PascalName); } else if (StringHelper.Match(userValue, "getutcdate", true) || StringHelper.Match(userValue, "getutcdate()", true)) { defaultValue = String.Format("DateTime.UtcNow", this.PascalName); } else if (userValue.ToLower().StartsWith("getdate+") || userValue.ToLower().StartsWith("sysdatetime+")) { var br = userValue.IndexOf("+") + 1; var t = userValue.Substring(br, userValue.Length - br); var tarr = t.Split('-'); if (tarr.Length == 2) { if (tarr[1] == "year") defaultValue = String.Format("DateTime.Now.AddYears(" + tarr[0] + ")", this.PascalName); else if (tarr[1] == "month") defaultValue = String.Format("DateTime.Now.AddMonths(" + tarr[0] + ")", this.PascalName); else if (tarr[1] == "day") defaultValue = String.Format("DateTime.Now.AddDays(" + tarr[0] + ")", this.PascalName); } } //else if (this.DataType == DataTypeConstants.SmallDateTime) //{ // defaultValue = String.Format("new DateTime(1900, 1, 1)", this.PascalName); //} //else //{ // defaultValue = String.Format("new DateTime(1753, 1, 1)", this.PascalName); //} } else if (this.DataType == DataTypeConstants.Char) { defaultValue = "\" \""; if (userValue.Length == 1) defaultValue = "@\"" + userValue.First().ToString().Replace("\"", @"""") + "\""; } else if (this.DataType.IsBinaryType()) { defaultValue = "new System.Byte[] { " + userValue.ConvertToHexArrayString() + " }"; } //else if (this.DataType == DataTypeConstants.DateTimeOffset) //{ // defaultValue = "DateTimeOffset.MinValue"; //} //else if (this.IsDateType) //{ // defaultValue = "System.DateTime.MinValue"; //} //else if (this.DataType == DataTypeConstants.Time) //{ // defaultValue = "0"; //} else if (this.DataType == DataTypeConstants.UniqueIdentifier) { if ((StringHelper.Match(userValue, "newid", true)) || (StringHelper.Match(userValue, "newid()", true))) defaultValue = "Guid.NewGuid()"; else if (string.IsNullOrEmpty(userValue)) defaultValue = "System.Guid.Empty"; else { var gv = userValue.Replace("'", string.Empty); if (Guid.TryParse(gv, out _)) defaultValue = "new Guid(\"" + gv + "\")"; } } else if (this.DataType.IsIntegerType()) { defaultValue = "0"; int i; if (int.TryParse(userValue, out i)) defaultValue = userValue; if (this.DataType == DataTypeConstants.BigInt) defaultValue += "L"; } else if (this.DataType.IsNumericType()) { defaultValue = "0"; double d; if (double.TryParse(userValue, out d)) { defaultValue = userValue; if (this.GetCodeType(false) == "decimal") defaultValue += "M"; } } else if (this.DataType == DataTypeConstants.Bit) { if (userValue == "0" || StringHelper.Match(userValue, "false", true)) defaultValue = "false"; else if (userValue == "1" || StringHelper.Match(userValue, "true", true)) defaultValue = "true"; } else { if (this.DataType.IsTextType()) defaultValue = "\"" + userValue.Replace("''", "") + "\""; else defaultValue = "\"" + userValue + "\""; } return defaultValue; } [Browsable(false)] public virtual bool IsValidDefault() { if (!this.CanHaveDefault()) return string.IsNullOrEmpty(this.Default); return IsValidDefault(this.Default); } [Browsable(false)] public virtual bool IsValidDefault(string value) { //No default is valid for everything if (string.IsNullOrEmpty(value)) return true; //There is a default and one is not valid so always false if (!this.CanHaveDefault()) return false; switch (this.DataType) { case DataTypeConstants.BigInt: return long.TryParse(value, out _); case DataTypeConstants.Binary: case DataTypeConstants.Image: case DataTypeConstants.VarBinary: if (string.IsNullOrEmpty(value)) return false; if (value.Length <= 2) return false; if ((value.Substring(0, 2) == "0x") && (value.Length % 2 == 0) && value.Substring(2, value.Length - 2).IsHex()) return true; return false; case DataTypeConstants.Bit: { var q = value.ToLower(); if (q == "1" || q == "0") return true; return bool.TryParse(value, out _); } case DataTypeConstants.Date: return !string.IsNullOrEmpty(this.GetCodeDefault()); case DataTypeConstants.DateTime: return !string.IsNullOrEmpty(this.GetCodeDefault()); case DataTypeConstants.DateTime2: return !string.IsNullOrEmpty(this.GetCodeDefault()); case DataTypeConstants.Decimal: return decimal.TryParse(value, out _); case DataTypeConstants.Float: return decimal.TryParse(value, out _); case DataTypeConstants.Int: return int.TryParse(value, out _); case DataTypeConstants.Money: return long.TryParse(value, out _); case DataTypeConstants.Char: case DataTypeConstants.NChar: case DataTypeConstants.NText: case DataTypeConstants.NVarChar: case DataTypeConstants.Text: case DataTypeConstants.VarChar: return true; case DataTypeConstants.Real: return decimal.TryParse(value, out _); case DataTypeConstants.SmallDateTime: return !string.IsNullOrEmpty(this.GetCodeDefault()); case DataTypeConstants.SmallInt: return Int16.TryParse(value, out _); case DataTypeConstants.SmallMoney: return int.TryParse(value, out _); case DataTypeConstants.Time: return !string.IsNullOrEmpty(this.GetCodeDefault()); case DataTypeConstants.TinyInt: return byte.TryParse(value, out byte _); case DataTypeConstants.UniqueIdentifier: { if (value.ToLower() == "newid") return true; try { var v = new Guid(value); return true; } catch { return false; } } case DataTypeConstants.Timestamp: case DataTypeConstants.Structured: case DataTypeConstants.DateTimeOffset: case DataTypeConstants.Udt: case DataTypeConstants.Variant: case DataTypeConstants.Xml: return false; } return false; } public virtual string GetCodeType(bool allowNullable) { return GetCodeType(allowNullable, false); } public virtual string GetCodeType(bool allowNullable, bool forceNull) { var retval = string.Empty; if (StringHelper.Match(this.DataType.ToString(), "bigint", true)) retval = "long"; else if (StringHelper.Match(this.DataType.ToString(), "binary", true)) return "System.Byte[]"; else if (StringHelper.Match(this.DataType.ToString(), "bit", true)) retval = "bool"; else if (StringHelper.Match(this.DataType.ToString(), "char", true)) return "string"; else if (StringHelper.Match(this.DataType.ToString(), "datetime", true)) retval = "DateTime"; else if (StringHelper.Match(this.DataType.ToString(), "datetime2", true)) retval = "DateTime"; else if (StringHelper.Match(this.DataType.ToString(), "date", true)) retval = "DateTime"; else if (StringHelper.Match(this.DataType.ToString(), "time", true)) retval = "TimeSpan"; else if (StringHelper.Match(this.DataType.ToString(), "datetimeoffset", true)) retval = "DateTimeOffset"; else if (StringHelper.Match(this.DataType.ToString(), "decimal", true)) retval = "decimal"; else if (StringHelper.Match(this.DataType.ToString(), "float", true)) retval = "double"; else if (StringHelper.Match(this.DataType.ToString(), "image", true)) return "System.Byte[]"; else if (StringHelper.Match(this.DataType.ToString(), "int", true)) retval = "int"; else if (StringHelper.Match(this.DataType.ToString(), "money", true)) retval = "decimal"; else if (StringHelper.Match(this.DataType.ToString(), "nchar", true)) return "string"; else if (StringHelper.Match(this.DataType.ToString(), "ntext", true)) return "string"; else if (StringHelper.Match(this.DataType.ToString(), "numeric", true)) retval = "decimal"; else if (StringHelper.Match(this.DataType.ToString(), "nvarchar", true)) return "string"; else if (StringHelper.Match(this.DataType.ToString(), "real", true)) retval = "System.Single"; else if (StringHelper.Match(this.DataType.ToString(), "smalldatetime", true)) retval = "DateTime"; else if (StringHelper.Match(this.DataType.ToString(), "smallint", true)) retval = "short"; else if (StringHelper.Match(this.DataType.ToString(), "smallmoney", true)) retval = "decimal"; else if (StringHelper.Match(this.DataType.ToString(), "variant", true)) retval = "object"; else if (StringHelper.Match(this.DataType.ToString(), "text", true)) return "string"; else if (StringHelper.Match(this.DataType.ToString(), "tinyint", true)) retval = "byte"; else if (StringHelper.Match(this.DataType.ToString(), "uniqueidentifier", true)) retval = "System.Guid"; else if (StringHelper.Match(this.DataType.ToString(), "varbinary", true)) return "System.Byte[]"; else if (StringHelper.Match(this.DataType.ToString(), "varchar", true)) return "string"; else if (StringHelper.Match(this.DataType.ToString(), "timestamp", true)) return "System.Byte[]"; else if (StringHelper.Match(this.DataType.ToString(), "xml", true)) return "string"; else throw new Exception("Cannot Map Sql Value To C# Value"); if (allowNullable && (this.Nullable || forceNull)) retval += "?"; return retval; } public override string ToString() => this.Name; #endregion #region Properties public override bool IsIndexed { get { return (base.IsIndexed || base.IsPrimaryKey) && !base.IsCalculated; } set { base.IsIndexed = value; } } public override bool IsUnique { get { return (base.IsUnique || base.IsPrimaryKey) && !base.IsCalculated; } set { base.IsUnique = value; } } public override string Default { get { if (this.IsCalculated) return string.Empty; return base.Default; } set { base.Default = value; } } public override IdentityTypeConstants Identity { get { if (!this.DataType.SupportsIdentity()) return IdentityTypeConstants.None; else if (this.IsCalculated) return IdentityTypeConstants.None; else return base.Identity; } set { base.Identity = value; } } public override bool IsPrimaryKey { get { if (this.IsCalculated) return false; else return base.IsPrimaryKey; } set { base.IsPrimaryKey = value; } } public override bool Nullable { get { if (this.IsCalculated) return true; else if (this.IsUnique) return false; else if (this.DataType == DataTypeConstants.Variant) return false; else return base.Nullable && !this.IsPrimaryKey; } set { base.Nullable = value; } } public override int Length { get { var retval = this.DataType.GetPredefinedSize(); if (retval == -1) retval = base.Length; return retval; } set { base.Length = value; } } public override int Scale { get { var retval = this.DataType.GetPredefinedScale(); if (retval == -1) retval = base.Scale; return retval; } set { base.Scale = value; } } public override DataTypeConstants DataType { get { return base.DataType; } set { try { base.DataType = value; if (this.Entity != null && this.Entity.nHydrateModel != null) { //Reset length if necessary if (!this.Entity.nHydrateModel.IsLoading) { base.Length = value.GetDefaultSize(base.Length); } } } catch (Exception ex) { throw; } } } #endregion #region OnDeleting protected override void OnDeleting() { //This will remove the managed index before removing this field this.IsIndexed = false; //Remove from relation mapped collection if (this.Entity != null && this.Entity.nHydrateModel != null && this.Entity.nHydrateModel.RelationFields != null) { var relationFieldList = this.Entity.nHydrateModel.RelationFields.Where(x => x.SourceFieldId == this.Id || x.TargetFieldId == this.Id).ToList(); foreach (var item in relationFieldList) { var relation = this.Entity.nHydrateModel.AllRelations.FirstOrDefault(x => x.Id == item.RelationID); if (relation != null) relation.Delete(); } //This is redundant. Should be zero. Used for debugging. var count1 = this.Entity.nHydrateModel.RelationFields.Remove(x => x.SourceFieldId == this.Id || x.TargetFieldId == this.Id); } base.OnDeleting(); } #endregion #region IContainerParent Members DslModeling.ModelElement IContainerParent.ContainerParent => this.Entity; #endregion } partial class FieldBase { partial class NamePropertyHandler { protected override void OnValueChanged(FieldBase element, string oldValue, string newValue) { //If not laoding then parse the name for the data type var hasChanged = false; if (element.Entity != null && !element.Entity.nHydrateModel.IsLoading) { if (!string.IsNullOrEmpty(newValue)) { var arr = newValue.Split(':'); if (arr.Length == 2) { var typearr = arr[1].Split(' '); var d = Extensions.GetDataTypeFromName(typearr[0]); if (d != null) { if (typearr.Length == 2) { if (int.TryParse(typearr[1], out var len)) { element.DataType = d.Value; element.Length = len; newValue = arr[0]; hasChanged = true; } else { throw new Exception("Unrecognized data type! Valid format is 'Name:Datatype length'"); } } else { element.DataType = d.Value; newValue = arr[0]; hasChanged = true; } } else { throw new Exception("Unrecognized data type! Valid format is 'Name:Datatype length'"); } } } } base.OnValueChanged(element, oldValue, newValue); //Reset after we set datatype if (hasChanged) element.Name = newValue; else base.OnValueChanged(element, oldValue, newValue); } } partial class LengthPropertyHandler { protected override void OnValueChanged(FieldBase element, int oldValue, int newValue) { base.OnValueChanged(element, oldValue, newValue); //this will trigger another event var v = newValue; if (v < 0) v = 0; v = element.DataType.ValidateDataTypeMax(v); if (newValue != v) element.Length = element.DataType.ValidateDataTypeMax(v); } } partial class ScalePropertyHandler { protected override void OnValueChanged(FieldBase element, int oldValue, int newValue) { base.OnValueChanged(element, oldValue, newValue); //this will trigger another event if (newValue < 0) element.Scale = 0; } } partial class IsPrimaryKeyPropertyHandler { protected override void OnValueChanged(FieldBase element, bool oldValue, bool newValue) { base.OnValueChanged(element, oldValue, newValue); try { if (element.Entity != null && element.Entity.nHydrateModel != null && !element.Entity.nHydrateModel.IsLoading) { //Processes Index list if (element.IsPrimaryKey) //Must use real property since there is logic there { //This is a PK so determine if there is a key for this and add this field to a new or the existing index var existing = element.Entity.Indexes.FirstOrDefault(x => x.IndexType == IndexTypeConstants.PrimaryKey); if (existing == null) { //The PK index does not exist so create one using (var transaction = element.Store.TransactionManager.BeginTransaction(Guid.NewGuid().ToString())) { var newIndex = new Index(element.Partition); newIndex.ParentEntityID = element.Entity.Id; newIndex.Clustered = true; element.Entity.Indexes.Add(newIndex); var newColumn = new IndexColumn(element.Partition); newColumn.FieldID = element.Id; newIndex.IndexColumns.Add(newColumn); newColumn.IsInternal = true; newIndex.IndexType = IndexTypeConstants.PrimaryKey; //Do this last transaction.Commit(); } } else { //The PK does exist so add this field to it using (var transaction = element.Store.TransactionManager.BeginTransaction(Guid.NewGuid().ToString())) { if (existing.IndexColumns.Count(x => x.FieldID == element.Id) == 0) { existing.IndexType = IndexTypeConstants.User; var newColumn = new IndexColumn(element.Partition); newColumn.FieldID = element.Id; newColumn.IsInternal = true; existing.IndexColumns.Add(newColumn); //Just in case there are invalid fields existing.IndexColumns.Remove(x => x.GetField() == null); existing.IndexType = IndexTypeConstants.PrimaryKey; //Do this last transaction.Commit(); } } } //Remove the IsIndex ones if exist Func<Index, bool> where = x => x.IndexType == IndexTypeConstants.IsIndexed && x.IndexColumns.Count == 1 && x.IndexColumns.First().FieldID == element.Id && x.IndexType == IndexTypeConstants.IsIndexed; element.Entity.Indexes.Where(where).ToList().ForEach(x => x.IndexType = IndexTypeConstants.User); element.Entity.Indexes.Remove(where); } else //Remove Index { var existing = element.Entity.Indexes.FirstOrDefault(x => x.IndexType == IndexTypeConstants.PrimaryKey); if (existing != null) { using (var transaction = element.Store.TransactionManager.BeginTransaction(Guid.NewGuid().ToString())) { existing.IndexType = IndexTypeConstants.User; existing.IndexColumns.Remove(x => x.FieldID == element.Id); if (element.Entity.Fields.Count(x => x.IsPrimaryKey) == 0) //No more primary keys element.Entity.Indexes.Remove(existing); else existing.IndexType = IndexTypeConstants.PrimaryKey; transaction.Commit(); } } } } } catch (Exception ex) { throw; } } } partial class IsIndexedPropertyHandler { protected override void OnValueChanged(FieldBase element, bool oldValue, bool newValue) { base.OnValueChanged(element, oldValue, newValue); if (element.Entity != null && element.Entity.nHydrateModel != null && !element.Entity.nHydrateModel.IsLoading) { //Processes Index list if (newValue) //element.IsIndexed //Must use real property since there is logic there { //Add an Asc single field index var existing = element.Entity.Indexes.FirstOrDefault(x => x.IndexColumns.Count == 1 && x.IndexColumns.First().FieldID == element.Id && x.IndexColumns.First().Ascending && x.IndexType == IndexTypeConstants.IsIndexed); if (existing == null) { using (var transaction = element.Store.TransactionManager.BeginTransaction(Guid.NewGuid().ToString())) { var newIndex = new Index(element.Partition); newIndex.ParentEntityID = element.Entity.Id; newIndex.IndexType = IndexTypeConstants.IsIndexed; newIndex.Clustered = false; element.Entity.Indexes.Add(newIndex); var newColumn = new IndexColumn(element.Partition); newColumn.FieldID = element.Id; newColumn.IsInternal = true; newIndex.IndexColumns.Add(newColumn); transaction.Commit(); } } } else //Remove Index { var existingList = element.Entity.Indexes .Where(x => x.IndexType == IndexTypeConstants.IsIndexed && x.IndexColumns.Count == 1 && x.IndexColumns.First().FieldID == element.Id && x.IndexType == IndexTypeConstants.IsIndexed) .ToList(); using (var transaction = element.Store.TransactionManager.BeginTransaction(Guid.NewGuid().ToString())) { while (existingList.Count > 0) { var item = existingList.First(); item.IndexType = IndexTypeConstants.User; element.Entity.Indexes.Remove(item); existingList.Remove(item); } transaction.Commit(); } } } } } partial class DataTypePropertyHandler { protected override void OnValueChanged(FieldBase element, DataTypeConstants oldValue, DataTypeConstants newValue) { base.OnValueChanged(element, oldValue, newValue); if (element.Entity != null && element.Entity.nHydrateModel != null) { if (!element.Entity.nHydrateModel.IsLoading) { element.Length = newValue.GetDefaultSize(element.Length); } } } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Linq.Expressions; using System.Reflection; using Microsoft.CSharp.RuntimeBinder.Semantics; namespace Microsoft.CSharp.RuntimeBinder { internal class ExpressionTreeCallRewriter : ExprVisitorBase { ///////////////////////////////////////////////////////////////////////////////// // Members private class ExpressionEXPR : EXPR { public Expression Expression; public ExpressionEXPR(Expression e) { Expression = e; } } private Dictionary<EXPRCALL, Expression> _DictionaryOfParameters; private IEnumerable<Expression> _ListOfParameters; private TypeManager _typeManager; // Counts how many EXPRSAVEs we've encountered so we know which index into the // parameter list we should be taking. private int _currentParameterIndex; ///////////////////////////////////////////////////////////////////////////////// protected ExpressionTreeCallRewriter(TypeManager typeManager, IEnumerable<Expression> listOfParameters) { _typeManager = typeManager; _DictionaryOfParameters = new Dictionary<EXPRCALL, Expression>(); _ListOfParameters = listOfParameters; } ///////////////////////////////////////////////////////////////////////////////// public static Expression Rewrite(TypeManager typeManager, EXPR pExpr, IEnumerable<Expression> listOfParameters) { ExpressionTreeCallRewriter rewriter = new ExpressionTreeCallRewriter(typeManager, listOfParameters); // We should have a EXPRBINOP thats an EK_SEQUENCE. The RHS of our sequence // should be a call to PM_EXPRESSION_LAMBDA. The LHS of our sequence is the // set of declarations for the parameters that we'll need. // Assert all of these first, and then unwrap them. Debug.Assert(pExpr != null); Debug.Assert(pExpr.isBIN()); Debug.Assert(pExpr.kind == ExpressionKind.EK_SEQUENCE); Debug.Assert(pExpr.asBIN().GetOptionalRightChild() != null); Debug.Assert(pExpr.asBIN().GetOptionalRightChild().isCALL()); Debug.Assert(pExpr.asBIN().GetOptionalRightChild().asCALL().PredefinedMethod == PREDEFMETH.PM_EXPRESSION_LAMBDA); Debug.Assert(pExpr.asBIN().GetOptionalLeftChild() != null); // Visit the left to generate the parameter construction. rewriter.Visit(pExpr.asBIN().GetOptionalLeftChild()); EXPRCALL call = pExpr.asBIN().GetOptionalRightChild().asCALL(); ExpressionEXPR e = rewriter.Visit(call) as ExpressionEXPR; return e.Expression; } ///////////////////////////////////////////////////////////////////////////////// protected override EXPR VisitSAVE(EXPRBINOP pExpr) { // Saves should have a LHS that is a CALL to PM_EXPRESSION_PARAMETER // and a RHS that is a WRAP of that call. Debug.Assert(pExpr.GetOptionalLeftChild() != null); Debug.Assert(pExpr.GetOptionalLeftChild().isCALL()); Debug.Assert(pExpr.GetOptionalLeftChild().asCALL().PredefinedMethod == PREDEFMETH.PM_EXPRESSION_PARAMETER); Debug.Assert(pExpr.GetOptionalRightChild() != null); Debug.Assert(pExpr.GetOptionalRightChild().isWRAP()); EXPRCALL call = pExpr.GetOptionalLeftChild().asCALL(); EXPRTYPEOF TypeOf = call.GetOptionalArguments().asLIST().GetOptionalElement().asTYPEOF(); Expression parameter = _ListOfParameters.ElementAt(_currentParameterIndex++); _DictionaryOfParameters.Add(call, parameter); return null; } ///////////////////////////////////////////////////////////////////////////////// protected override EXPR VisitCAST(EXPRCAST pExpr) { return base.VisitCAST(pExpr); } ///////////////////////////////////////////////////////////////////////////////// protected override EXPR VisitCALL(EXPRCALL pExpr) { if (pExpr.PredefinedMethod != PREDEFMETH.PM_FIRST) { switch (pExpr.PredefinedMethod) { case PREDEFMETH.PM_EXPRESSION_LAMBDA: return GenerateLambda(pExpr); case PREDEFMETH.PM_EXPRESSION_CALL: return GenerateCall(pExpr); case PREDEFMETH.PM_EXPRESSION_ARRAYINDEX: case PREDEFMETH.PM_EXPRESSION_ARRAYINDEX2: return GenerateArrayIndex(pExpr); case PREDEFMETH.PM_EXPRESSION_CONVERT: case PREDEFMETH.PM_EXPRESSION_CONVERT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_CONVERTCHECKED: case PREDEFMETH.PM_EXPRESSION_CONVERTCHECKED_USER_DEFINED: return GenerateConvert(pExpr); case PREDEFMETH.PM_EXPRESSION_PROPERTY: return GenerateProperty(pExpr); case PREDEFMETH.PM_EXPRESSION_FIELD: return GenerateField(pExpr); case PREDEFMETH.PM_EXPRESSION_INVOKE: return GenerateInvoke(pExpr); case PREDEFMETH.PM_EXPRESSION_NEW: return GenerateNew(pExpr); case PREDEFMETH.PM_EXPRESSION_ADD: case PREDEFMETH.PM_EXPRESSION_AND: case PREDEFMETH.PM_EXPRESSION_DIVIDE: case PREDEFMETH.PM_EXPRESSION_EQUAL: case PREDEFMETH.PM_EXPRESSION_EXCLUSIVEOR: case PREDEFMETH.PM_EXPRESSION_GREATERTHAN: case PREDEFMETH.PM_EXPRESSION_GREATERTHANOREQUAL: case PREDEFMETH.PM_EXPRESSION_LEFTSHIFT: case PREDEFMETH.PM_EXPRESSION_LESSTHAN: case PREDEFMETH.PM_EXPRESSION_LESSTHANOREQUAL: case PREDEFMETH.PM_EXPRESSION_MODULO: case PREDEFMETH.PM_EXPRESSION_MULTIPLY: case PREDEFMETH.PM_EXPRESSION_NOTEQUAL: case PREDEFMETH.PM_EXPRESSION_OR: case PREDEFMETH.PM_EXPRESSION_RIGHTSHIFT: case PREDEFMETH.PM_EXPRESSION_SUBTRACT: case PREDEFMETH.PM_EXPRESSION_ORELSE: case PREDEFMETH.PM_EXPRESSION_ANDALSO: // Checked case PREDEFMETH.PM_EXPRESSION_ADDCHECKED: case PREDEFMETH.PM_EXPRESSION_MULTIPLYCHECKED: case PREDEFMETH.PM_EXPRESSION_SUBTRACTCHECKED: return GenerateBinaryOperator(pExpr); case PREDEFMETH.PM_EXPRESSION_ADD_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_AND_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_DIVIDE_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_EQUAL_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_EXCLUSIVEOR_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_GREATERTHAN_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_GREATERTHANOREQUAL_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_LEFTSHIFT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_LESSTHAN_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_LESSTHANOREQUAL_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_MODULO_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_MULTIPLY_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_NOTEQUAL_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_OR_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_RIGHTSHIFT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_SUBTRACT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_ORELSE_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_ANDALSO_USER_DEFINED: // Checked case PREDEFMETH.PM_EXPRESSION_ADDCHECKED_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_MULTIPLYCHECKED_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_SUBTRACTCHECKED_USER_DEFINED: return GenerateUserDefinedBinaryOperator(pExpr); case PREDEFMETH.PM_EXPRESSION_NEGATE: case PREDEFMETH.PM_EXPRESSION_NOT: case PREDEFMETH.PM_EXPRESSION_NEGATECHECKED: return GenerateUnaryOperator(pExpr); case PREDEFMETH.PM_EXPRESSION_UNARYPLUS_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_NEGATE_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_NOT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_NEGATECHECKED_USER_DEFINED: return GenerateUserDefinedUnaryOperator(pExpr); case PREDEFMETH.PM_EXPRESSION_CONSTANT_OBJECT_TYPE: return GenerateConstantType(pExpr); case PREDEFMETH.PM_EXPRESSION_ASSIGN: return GenerateAssignment(pExpr); default: Debug.Assert(false, "Invalid Predefined Method in VisitCALL"); throw Error.InternalCompilerError(); } } return pExpr; } #region Generators ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateLambda(EXPRCALL pExpr) { // We always call Lambda(body, arrayinit) where the arrayinit // is the initialization of the parameters. ExpressionEXPR body = Visit(pExpr.GetOptionalArguments().asLIST().GetOptionalElement()) as ExpressionEXPR; Expression e = body.Expression; /* * // Do we need to do this? if (e.Type.IsValueType) { // If we have a value type, convert it to object so that boxing // can happen. e = Expression.Convert(body.Expression, typeof(object)); } * */ return new ExpressionEXPR(e); } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateCall(EXPRCALL pExpr) { // Our arguments are: object, methodinfo, parameters. // The object is either an EXPRWRAP of a CALL, or a CALL that is a PM_CONVERT, whose // argument is the WRAP of a CALL. Deal with that first. EXPRMETHODINFO methinfo; EXPRARRINIT arrinit; EXPRLIST list = pExpr.GetOptionalArguments().asLIST(); if (list.GetOptionalNextListNode().isLIST()) { methinfo = list.GetOptionalNextListNode().asLIST().GetOptionalElement().asMETHODINFO(); arrinit = list.GetOptionalNextListNode().asLIST().GetOptionalNextListNode().asARRINIT(); } else { methinfo = list.GetOptionalNextListNode().asMETHODINFO(); arrinit = null; } Expression obj = null; MethodInfo m = GetMethodInfoFromExpr(methinfo); Expression[] arguments = GetArgumentsFromArrayInit(arrinit); if (m == null) { Debug.Assert(false, "How did we get a call that doesn't have a methodinfo?"); throw Error.InternalCompilerError(); } // The DLR is expecting the instance for a static invocation to be null. If we have // an instance method, fetch the object. if (!m.IsStatic) { obj = GetExpression(pExpr.GetOptionalArguments().asLIST().GetOptionalElement()); } return new ExpressionEXPR(Expression.Call(obj, m, arguments)); } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateArrayIndex(EXPRCALL pExpr) { // We have two possibilities here - we're either a single index array, in which // case we'll be PM_EXPRESSION_ARRAYINDEX, or we have multiple dimensions, // in which case we are PM_EXPRESSION_ARRAYINDEX2. // // Our arguments then, are: object, index or object, indicies. EXPRLIST list = pExpr.GetOptionalArguments().asLIST(); Expression obj = GetExpression(list.GetOptionalElement()); Expression[] indicies; if (pExpr.PredefinedMethod == PREDEFMETH.PM_EXPRESSION_ARRAYINDEX) { indicies = new Expression[] { GetExpression(list.GetOptionalNextListNode()) }; } else { Debug.Assert(pExpr.PredefinedMethod == PREDEFMETH.PM_EXPRESSION_ARRAYINDEX2); indicies = GetArgumentsFromArrayInit(list.GetOptionalNextListNode().asARRINIT()); } return new ExpressionEXPR(Expression.ArrayAccess(obj, indicies)); } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateConvert(EXPRCALL pExpr) { PREDEFMETH pm = pExpr.PredefinedMethod; Expression e; Type t; if (pm == PREDEFMETH.PM_EXPRESSION_CONVERT_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_CONVERTCHECKED_USER_DEFINED) { // If we have a user defined conversion, then we'll have the object // as the first element, and another list as a second element. This list // contains a TYPEOF as the first element, and the METHODINFO for the call // as the second. EXPRLIST list = pExpr.asCALL().GetOptionalArguments().asLIST(); EXPRLIST list2 = list.GetOptionalNextListNode().asLIST(); e = GetExpression(list.GetOptionalElement()); t = list2.GetOptionalElement().asTYPEOF().SourceType.type.AssociatedSystemType; if (e.Type.MakeByRefType() == t) { // We're trying to convert from a type to its by ref type. Dont do that. return new ExpressionEXPR(e); } Debug.Assert((pExpr.flags & EXPRFLAG.EXF_UNBOXRUNTIME) == 0); MethodInfo m = GetMethodInfoFromExpr(list2.GetOptionalNextListNode().asMETHODINFO()); if (pm == PREDEFMETH.PM_EXPRESSION_CONVERT_USER_DEFINED) { return new ExpressionEXPR(Expression.Convert(e, t, m)); } return new ExpressionEXPR(Expression.ConvertChecked(e, t, m)); } else { Debug.Assert(pm == PREDEFMETH.PM_EXPRESSION_CONVERT || pm == PREDEFMETH.PM_EXPRESSION_CONVERTCHECKED); // If we have a standard conversion, then we'll have some object as // the first list element (ie a WRAP or a CALL), and then a TYPEOF // as the second list element. EXPRLIST list = pExpr.asCALL().GetOptionalArguments().asLIST(); e = GetExpression(list.GetOptionalElement()); t = list.GetOptionalNextListNode().asTYPEOF().SourceType.type.AssociatedSystemType; if (e.Type.MakeByRefType() == t) { // We're trying to convert from a type to its by ref type. Dont do that. return new ExpressionEXPR(e); } if ((pExpr.flags & EXPRFLAG.EXF_UNBOXRUNTIME) != 0) { // If we want to unbox this thing, return that instead of the convert. return new ExpressionEXPR(Expression.Unbox(e, t)); } if (pm == PREDEFMETH.PM_EXPRESSION_CONVERT) { return new ExpressionEXPR(Expression.Convert(e, t)); } return new ExpressionEXPR(Expression.ConvertChecked(e, t)); } } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateProperty(EXPRCALL pExpr) { EXPRLIST list = pExpr.asCALL().GetOptionalArguments().asLIST(); EXPR instance = list.GetOptionalElement(); EXPRPropertyInfo propinfo = list.GetOptionalNextListNode().isLIST() ? list.GetOptionalNextListNode().asLIST().GetOptionalElement().asPropertyInfo() : list.GetOptionalNextListNode().asPropertyInfo(); EXPRARRINIT arguments = list.GetOptionalNextListNode().isLIST() ? list.GetOptionalNextListNode().asLIST().GetOptionalNextListNode().asARRINIT() : null; PropertyInfo p = GetPropertyInfoFromExpr(propinfo); if (p == null) { Debug.Assert(false, "How did we get a prop that doesn't have a propinfo?"); throw Error.InternalCompilerError(); } if (arguments == null) { return new ExpressionEXPR(Expression.Property(GetExpression(instance), p)); } return new ExpressionEXPR(Expression.Property(GetExpression(instance), p, GetArgumentsFromArrayInit(arguments))); } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateField(EXPRCALL pExpr) { EXPRLIST list = pExpr.asCALL().GetOptionalArguments().asLIST(); Type t = list.GetOptionalNextListNode().asFIELDINFO().FieldType().AssociatedSystemType; FieldInfo f = list.GetOptionalNextListNode().asFIELDINFO().Field().AssociatedFieldInfo; // This is to ensure that for embedded nopia types, we have the // appropriate local type from the member itself; this is possible // because nopia types are not generic or nested. if (!t.GetTypeInfo().IsGenericType && !t.GetTypeInfo().IsNested) { t = f.DeclaringType; } // Now find the generic'ed one if we're generic. if (t.GetTypeInfo().IsGenericType) { f = t.GetField(f.Name, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static); } return new ExpressionEXPR(Expression.Field(GetExpression(list.GetOptionalElement()), f)); } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateInvoke(EXPRCALL pExpr) { EXPRLIST list = pExpr.asCALL().GetOptionalArguments().asLIST(); return new ExpressionEXPR(Expression.Invoke( GetExpression(list.GetOptionalElement()), GetArgumentsFromArrayInit(list.GetOptionalNextListNode().asARRINIT()))); } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateNew(EXPRCALL pExpr) { EXPRLIST list = pExpr.asCALL().GetOptionalArguments().asLIST(); var constructor = GetConstructorInfoFromExpr(list.GetOptionalElement().asMETHODINFO()); var arguments = GetArgumentsFromArrayInit(list.GetOptionalNextListNode().asARRINIT()); return new ExpressionEXPR(Expression.New(constructor, arguments)); } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateConstantType(EXPRCALL pExpr) { EXPRLIST list = pExpr.GetOptionalArguments().asLIST(); return new ExpressionEXPR( Expression.Constant( GetObject(list.GetOptionalElement()), list.GetOptionalNextListNode().asTYPEOF().SourceType.type.AssociatedSystemType)); } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateAssignment(EXPRCALL pExpr) { EXPRLIST list = pExpr.GetOptionalArguments().asLIST(); return new ExpressionEXPR(Expression.Assign( GetExpression(list.GetOptionalElement()), GetExpression(list.GetOptionalNextListNode()))); } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateBinaryOperator(EXPRCALL pExpr) { Expression arg1 = GetExpression(pExpr.GetOptionalArguments().asLIST().GetOptionalElement()); Expression arg2 = GetExpression(pExpr.GetOptionalArguments().asLIST().GetOptionalNextListNode()); switch (pExpr.PredefinedMethod) { case PREDEFMETH.PM_EXPRESSION_ADD: return new ExpressionEXPR(Expression.Add(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_AND: return new ExpressionEXPR(Expression.And(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_DIVIDE: return new ExpressionEXPR(Expression.Divide(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_EQUAL: return new ExpressionEXPR(Expression.Equal(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_EXCLUSIVEOR: return new ExpressionEXPR(Expression.ExclusiveOr(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_GREATERTHAN: return new ExpressionEXPR(Expression.GreaterThan(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_GREATERTHANOREQUAL: return new ExpressionEXPR(Expression.GreaterThanOrEqual(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_LEFTSHIFT: return new ExpressionEXPR(Expression.LeftShift(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_LESSTHAN: return new ExpressionEXPR(Expression.LessThan(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_LESSTHANOREQUAL: return new ExpressionEXPR(Expression.LessThanOrEqual(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_MODULO: return new ExpressionEXPR(Expression.Modulo(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_MULTIPLY: return new ExpressionEXPR(Expression.Multiply(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_NOTEQUAL: return new ExpressionEXPR(Expression.NotEqual(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_OR: return new ExpressionEXPR(Expression.Or(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_RIGHTSHIFT: return new ExpressionEXPR(Expression.RightShift(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_SUBTRACT: return new ExpressionEXPR(Expression.Subtract(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_ORELSE: return new ExpressionEXPR(Expression.OrElse(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_ANDALSO: return new ExpressionEXPR(Expression.AndAlso(arg1, arg2)); // Checked case PREDEFMETH.PM_EXPRESSION_ADDCHECKED: return new ExpressionEXPR(Expression.AddChecked(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_MULTIPLYCHECKED: return new ExpressionEXPR(Expression.MultiplyChecked(arg1, arg2)); case PREDEFMETH.PM_EXPRESSION_SUBTRACTCHECKED: return new ExpressionEXPR(Expression.SubtractChecked(arg1, arg2)); default: Debug.Assert(false, "Invalid Predefined Method in GenerateBinaryOperator"); throw Error.InternalCompilerError(); } } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateUserDefinedBinaryOperator(EXPRCALL pExpr) { EXPRLIST list = pExpr.GetOptionalArguments().asLIST(); Expression arg1 = GetExpression(list.GetOptionalElement()); Expression arg2 = GetExpression(list.GetOptionalNextListNode().asLIST().GetOptionalElement()); list = list.GetOptionalNextListNode().asLIST(); MethodInfo methodInfo; bool bIsLifted = false; if (list.GetOptionalNextListNode().isLIST()) { EXPRCONSTANT isLifted = list.GetOptionalNextListNode().asLIST().GetOptionalElement().asCONSTANT(); bIsLifted = isLifted.getVal().iVal == 1; methodInfo = GetMethodInfoFromExpr(list.GetOptionalNextListNode().asLIST().GetOptionalNextListNode().asMETHODINFO()); } else { methodInfo = GetMethodInfoFromExpr(list.GetOptionalNextListNode().asMETHODINFO()); } switch (pExpr.PredefinedMethod) { case PREDEFMETH.PM_EXPRESSION_ADD_USER_DEFINED: return new ExpressionEXPR(Expression.Add(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_AND_USER_DEFINED: return new ExpressionEXPR(Expression.And(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_DIVIDE_USER_DEFINED: return new ExpressionEXPR(Expression.Divide(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_EQUAL_USER_DEFINED: return new ExpressionEXPR(Expression.Equal(arg1, arg2, bIsLifted, methodInfo)); case PREDEFMETH.PM_EXPRESSION_EXCLUSIVEOR_USER_DEFINED: return new ExpressionEXPR(Expression.ExclusiveOr(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_GREATERTHAN_USER_DEFINED: return new ExpressionEXPR(Expression.GreaterThan(arg1, arg2, bIsLifted, methodInfo)); case PREDEFMETH.PM_EXPRESSION_GREATERTHANOREQUAL_USER_DEFINED: return new ExpressionEXPR(Expression.GreaterThanOrEqual(arg1, arg2, bIsLifted, methodInfo)); case PREDEFMETH.PM_EXPRESSION_LEFTSHIFT_USER_DEFINED: return new ExpressionEXPR(Expression.LeftShift(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_LESSTHAN_USER_DEFINED: return new ExpressionEXPR(Expression.LessThan(arg1, arg2, bIsLifted, methodInfo)); case PREDEFMETH.PM_EXPRESSION_LESSTHANOREQUAL_USER_DEFINED: return new ExpressionEXPR(Expression.LessThanOrEqual(arg1, arg2, bIsLifted, methodInfo)); case PREDEFMETH.PM_EXPRESSION_MODULO_USER_DEFINED: return new ExpressionEXPR(Expression.Modulo(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_MULTIPLY_USER_DEFINED: return new ExpressionEXPR(Expression.Multiply(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_NOTEQUAL_USER_DEFINED: return new ExpressionEXPR(Expression.NotEqual(arg1, arg2, bIsLifted, methodInfo)); case PREDEFMETH.PM_EXPRESSION_OR_USER_DEFINED: return new ExpressionEXPR(Expression.Or(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_RIGHTSHIFT_USER_DEFINED: return new ExpressionEXPR(Expression.RightShift(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_SUBTRACT_USER_DEFINED: return new ExpressionEXPR(Expression.Subtract(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_ORELSE_USER_DEFINED: return new ExpressionEXPR(Expression.OrElse(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_ANDALSO_USER_DEFINED: return new ExpressionEXPR(Expression.AndAlso(arg1, arg2, methodInfo)); // Checked case PREDEFMETH.PM_EXPRESSION_ADDCHECKED_USER_DEFINED: return new ExpressionEXPR(Expression.AddChecked(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_MULTIPLYCHECKED_USER_DEFINED: return new ExpressionEXPR(Expression.MultiplyChecked(arg1, arg2, methodInfo)); case PREDEFMETH.PM_EXPRESSION_SUBTRACTCHECKED_USER_DEFINED: return new ExpressionEXPR(Expression.SubtractChecked(arg1, arg2, methodInfo)); default: Debug.Assert(false, "Invalid Predefined Method in GenerateUserDefinedBinaryOperator"); throw Error.InternalCompilerError(); } } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateUnaryOperator(EXPRCALL pExpr) { PREDEFMETH pm = pExpr.PredefinedMethod; Expression arg = GetExpression(pExpr.GetOptionalArguments()); switch (pm) { case PREDEFMETH.PM_EXPRESSION_NOT: return new ExpressionEXPR(Expression.Not(arg)); case PREDEFMETH.PM_EXPRESSION_NEGATE: return new ExpressionEXPR(Expression.Negate(arg)); case PREDEFMETH.PM_EXPRESSION_NEGATECHECKED: return new ExpressionEXPR(Expression.NegateChecked(arg)); default: Debug.Assert(false, "Invalid Predefined Method in GenerateUnaryOperator"); throw Error.InternalCompilerError(); } } ///////////////////////////////////////////////////////////////////////////////// private ExpressionEXPR GenerateUserDefinedUnaryOperator(EXPRCALL pExpr) { PREDEFMETH pm = pExpr.PredefinedMethod; EXPRLIST list = pExpr.GetOptionalArguments().asLIST(); Expression arg = GetExpression(list.GetOptionalElement()); MethodInfo methodInfo = GetMethodInfoFromExpr(list.GetOptionalNextListNode().asMETHODINFO()); switch (pm) { case PREDEFMETH.PM_EXPRESSION_NOT_USER_DEFINED: return new ExpressionEXPR(Expression.Not(arg, methodInfo)); case PREDEFMETH.PM_EXPRESSION_NEGATE_USER_DEFINED: return new ExpressionEXPR(Expression.Negate(arg, methodInfo)); case PREDEFMETH.PM_EXPRESSION_UNARYPLUS_USER_DEFINED: return new ExpressionEXPR(Expression.UnaryPlus(arg, methodInfo)); case PREDEFMETH.PM_EXPRESSION_NEGATECHECKED_USER_DEFINED: return new ExpressionEXPR(Expression.NegateChecked(arg, methodInfo)); default: Debug.Assert(false, "Invalid Predefined Method in GenerateUserDefinedUnaryOperator"); throw Error.InternalCompilerError(); } } #endregion #region Helpers ///////////////////////////////////////////////////////////////////////////////// private Expression GetExpression(EXPR pExpr) { if (pExpr.isWRAP()) { return _DictionaryOfParameters[pExpr.asWRAP().GetOptionalExpression().asCALL()]; } else if (pExpr.isCONSTANT()) { Debug.Assert(pExpr.type.IsNullType()); return null; } else { // We can have a convert node or a call of a user defined conversion. Debug.Assert(pExpr.isCALL()); EXPRCALL call = pExpr.asCALL(); PREDEFMETH pm = call.PredefinedMethod; Debug.Assert(pm == PREDEFMETH.PM_EXPRESSION_CONVERT || pm == PREDEFMETH.PM_EXPRESSION_CONVERT_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_NEWARRAYINIT || pm == PREDEFMETH.PM_EXPRESSION_CALL || pm == PREDEFMETH.PM_EXPRESSION_PROPERTY || pm == PREDEFMETH.PM_EXPRESSION_FIELD || pm == PREDEFMETH.PM_EXPRESSION_ARRAYINDEX || pm == PREDEFMETH.PM_EXPRESSION_ARRAYINDEX2 || pm == PREDEFMETH.PM_EXPRESSION_CONSTANT_OBJECT_TYPE || pm == PREDEFMETH.PM_EXPRESSION_NEW || // Binary operators. pm == PREDEFMETH.PM_EXPRESSION_ASSIGN || pm == PREDEFMETH.PM_EXPRESSION_ADD || pm == PREDEFMETH.PM_EXPRESSION_AND || pm == PREDEFMETH.PM_EXPRESSION_DIVIDE || pm == PREDEFMETH.PM_EXPRESSION_EQUAL || pm == PREDEFMETH.PM_EXPRESSION_EXCLUSIVEOR || pm == PREDEFMETH.PM_EXPRESSION_GREATERTHAN || pm == PREDEFMETH.PM_EXPRESSION_GREATERTHANOREQUAL || pm == PREDEFMETH.PM_EXPRESSION_LEFTSHIFT || pm == PREDEFMETH.PM_EXPRESSION_LESSTHAN || pm == PREDEFMETH.PM_EXPRESSION_LESSTHANOREQUAL || pm == PREDEFMETH.PM_EXPRESSION_MODULO || pm == PREDEFMETH.PM_EXPRESSION_MULTIPLY || pm == PREDEFMETH.PM_EXPRESSION_NOTEQUAL || pm == PREDEFMETH.PM_EXPRESSION_OR || pm == PREDEFMETH.PM_EXPRESSION_RIGHTSHIFT || pm == PREDEFMETH.PM_EXPRESSION_SUBTRACT || pm == PREDEFMETH.PM_EXPRESSION_ORELSE || pm == PREDEFMETH.PM_EXPRESSION_ANDALSO || pm == PREDEFMETH.PM_EXPRESSION_ADD_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_AND_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_DIVIDE_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_EQUAL_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_EXCLUSIVEOR_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_GREATERTHAN_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_GREATERTHANOREQUAL_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_LEFTSHIFT_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_LESSTHAN_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_LESSTHANOREQUAL_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_MODULO_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_MULTIPLY_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_NOTEQUAL_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_OR_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_RIGHTSHIFT_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_SUBTRACT_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_ORELSE_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_ANDALSO_USER_DEFINED || // Checked binary pm == PREDEFMETH.PM_EXPRESSION_ADDCHECKED || pm == PREDEFMETH.PM_EXPRESSION_MULTIPLYCHECKED || pm == PREDEFMETH.PM_EXPRESSION_SUBTRACTCHECKED || pm == PREDEFMETH.PM_EXPRESSION_ADDCHECKED_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_MULTIPLYCHECKED_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_SUBTRACTCHECKED_USER_DEFINED || // Unary operators. pm == PREDEFMETH.PM_EXPRESSION_NOT || pm == PREDEFMETH.PM_EXPRESSION_NEGATE || pm == PREDEFMETH.PM_EXPRESSION_NOT_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_NEGATE_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_UNARYPLUS_USER_DEFINED || // Checked unary pm == PREDEFMETH.PM_EXPRESSION_NEGATECHECKED || pm == PREDEFMETH.PM_EXPRESSION_CONVERTCHECKED || pm == PREDEFMETH.PM_EXPRESSION_NEGATECHECKED_USER_DEFINED || pm == PREDEFMETH.PM_EXPRESSION_CONVERTCHECKED_USER_DEFINED ); switch (pm) { case PREDEFMETH.PM_EXPRESSION_CALL: return GenerateCall(call).Expression; case PREDEFMETH.PM_EXPRESSION_CONVERT: case PREDEFMETH.PM_EXPRESSION_CONVERT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_CONVERTCHECKED: case PREDEFMETH.PM_EXPRESSION_CONVERTCHECKED_USER_DEFINED: return GenerateConvert(call).Expression; case PREDEFMETH.PM_EXPRESSION_NEWARRAYINIT: { EXPRLIST list = call.GetOptionalArguments().asLIST(); return Expression.NewArrayInit( list.GetOptionalElement().asTYPEOF().SourceType.type.AssociatedSystemType, GetArgumentsFromArrayInit(list.GetOptionalNextListNode().asARRINIT())); } case PREDEFMETH.PM_EXPRESSION_ARRAYINDEX: case PREDEFMETH.PM_EXPRESSION_ARRAYINDEX2: return GenerateArrayIndex(call).Expression; case PREDEFMETH.PM_EXPRESSION_NEW: return GenerateNew(call).Expression; case PREDEFMETH.PM_EXPRESSION_PROPERTY: return GenerateProperty(call).Expression; case PREDEFMETH.PM_EXPRESSION_FIELD: return GenerateField(call).Expression; case PREDEFMETH.PM_EXPRESSION_CONSTANT_OBJECT_TYPE: return GenerateConstantType(call).Expression; case PREDEFMETH.PM_EXPRESSION_ASSIGN: return GenerateAssignment(call).Expression; case PREDEFMETH.PM_EXPRESSION_ADD: case PREDEFMETH.PM_EXPRESSION_AND: case PREDEFMETH.PM_EXPRESSION_DIVIDE: case PREDEFMETH.PM_EXPRESSION_EQUAL: case PREDEFMETH.PM_EXPRESSION_EXCLUSIVEOR: case PREDEFMETH.PM_EXPRESSION_GREATERTHAN: case PREDEFMETH.PM_EXPRESSION_GREATERTHANOREQUAL: case PREDEFMETH.PM_EXPRESSION_LEFTSHIFT: case PREDEFMETH.PM_EXPRESSION_LESSTHAN: case PREDEFMETH.PM_EXPRESSION_LESSTHANOREQUAL: case PREDEFMETH.PM_EXPRESSION_MODULO: case PREDEFMETH.PM_EXPRESSION_MULTIPLY: case PREDEFMETH.PM_EXPRESSION_NOTEQUAL: case PREDEFMETH.PM_EXPRESSION_OR: case PREDEFMETH.PM_EXPRESSION_RIGHTSHIFT: case PREDEFMETH.PM_EXPRESSION_SUBTRACT: case PREDEFMETH.PM_EXPRESSION_ORELSE: case PREDEFMETH.PM_EXPRESSION_ANDALSO: // Checked case PREDEFMETH.PM_EXPRESSION_ADDCHECKED: case PREDEFMETH.PM_EXPRESSION_MULTIPLYCHECKED: case PREDEFMETH.PM_EXPRESSION_SUBTRACTCHECKED: return GenerateBinaryOperator(call).Expression; case PREDEFMETH.PM_EXPRESSION_ADD_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_AND_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_DIVIDE_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_EQUAL_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_EXCLUSIVEOR_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_GREATERTHAN_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_GREATERTHANOREQUAL_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_LEFTSHIFT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_LESSTHAN_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_LESSTHANOREQUAL_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_MODULO_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_MULTIPLY_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_NOTEQUAL_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_OR_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_RIGHTSHIFT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_SUBTRACT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_ORELSE_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_ANDALSO_USER_DEFINED: // Checked case PREDEFMETH.PM_EXPRESSION_ADDCHECKED_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_MULTIPLYCHECKED_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_SUBTRACTCHECKED_USER_DEFINED: return GenerateUserDefinedBinaryOperator(call).Expression; case PREDEFMETH.PM_EXPRESSION_NOT: case PREDEFMETH.PM_EXPRESSION_NEGATE: case PREDEFMETH.PM_EXPRESSION_NEGATECHECKED: return GenerateUnaryOperator(call).Expression; case PREDEFMETH.PM_EXPRESSION_NOT_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_NEGATE_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_UNARYPLUS_USER_DEFINED: case PREDEFMETH.PM_EXPRESSION_NEGATECHECKED_USER_DEFINED: return GenerateUserDefinedUnaryOperator(call).Expression; default: Debug.Assert(false, "Invalid Predefined Method in GetExpression"); throw Error.InternalCompilerError(); } } } ///////////////////////////////////////////////////////////////////////////////// private object GetObject(EXPR pExpr) { if (pExpr.isCAST()) { return GetObject(pExpr.asCAST().GetArgument()); } else if (pExpr.isTYPEOF()) { return pExpr.asTYPEOF().SourceType.type.AssociatedSystemType; } else if (pExpr.isMETHODINFO()) { return GetMethodInfoFromExpr(pExpr.asMETHODINFO()); } else if (pExpr.isCONSTANT()) { CONSTVAL val = pExpr.asCONSTANT().Val; CType underlyingType = pExpr.type; object objval; if (pExpr.type.IsNullType()) { return null; } if (pExpr.type.isEnumType()) { underlyingType = underlyingType.getAggregate().GetUnderlyingType(); } switch (underlyingType.AssociatedSystemType.GetTypeCode()) { case TypeCode.Boolean: objval = val.boolVal; break; case TypeCode.SByte: objval = val.sbyteVal; break; case TypeCode.Byte: objval = val.byteVal; break; case TypeCode.Int16: objval = val.shortVal; break; case TypeCode.UInt16: objval = val.ushortVal; break; case TypeCode.Int32: objval = val.iVal; break; case TypeCode.UInt32: objval = val.uiVal; break; case TypeCode.Int64: objval = val.longVal; break; case TypeCode.UInt64: objval = val.ulongVal; break; case TypeCode.Single: objval = val.floatVal; break; case TypeCode.Double: objval = val.doubleVal; break; case TypeCode.Decimal: objval = val.decVal; break; case TypeCode.Char: objval = val.cVal; break; case TypeCode.String: objval = val.strVal; break; default: objval = val.objectVal; break; } if (pExpr.type.isEnumType()) { objval = Enum.ToObject(pExpr.type.AssociatedSystemType, objval); } return objval; } else if (pExpr.isZEROINIT()) { if (pExpr.asZEROINIT().OptionalArgument != null) { return GetObject(pExpr.asZEROINIT().OptionalArgument); } return System.Activator.CreateInstance(pExpr.type.AssociatedSystemType); } Debug.Assert(false, "Invalid EXPR in GetObject"); throw Error.InternalCompilerError(); } ///////////////////////////////////////////////////////////////////////////////// private Expression[] GetArgumentsFromArrayInit(EXPRARRINIT arrinit) { List<Expression> expressions = new List<Expression>(); if (arrinit != null) { EXPR list = arrinit.GetOptionalArguments(); EXPR p = list; while (list != null) { if (list.isLIST()) { p = list.asLIST().GetOptionalElement(); list = list.asLIST().GetOptionalNextListNode(); } else { p = list; list = null; } expressions.Add(GetExpression(p)); } Debug.Assert(expressions.Count == arrinit.dimSizes[0]); } return expressions.ToArray(); } ///////////////////////////////////////////////////////////////////////////////// private MethodInfo GetMethodInfoFromExpr(EXPRMETHODINFO methinfo) { // To do this, we need to construct a type array of the parameter types, // get the parent constructed type, and get the method from it. AggregateType aggType = methinfo.Method.Ats; MethodSymbol methSym = methinfo.Method.Meth(); TypeArray genericParams = _typeManager.SubstTypeArray(methSym.Params, aggType, methSym.typeVars); CType genericReturn = _typeManager.SubstType(methSym.RetType, aggType, methSym.typeVars); Type type = aggType.AssociatedSystemType; MethodInfo methodInfo = methSym.AssociatedMemberInfo as MethodInfo; // This is to ensure that for embedded nopia types, we have the // appropriate local type from the member itself; this is possible // because nopia types are not generic or nested. if (!type.GetTypeInfo().IsGenericType && !type.IsNested) { type = methodInfo.DeclaringType; } // We need to find the associated methodinfo on the instantiated type. foreach (MethodInfo m in type.GetRuntimeMethods()) { #if UNSUPPORTEDAPI if ((m.MetadataToken != methodInfo.MetadataToken) || (m.Module != methodInfo.Module)) #else if (!m.HasSameMetadataDefinitionAs(methodInfo)) #endif { continue; } Debug.Assert((m.Name == methodInfo.Name) && (m.GetParameters().Length == genericParams.size) && (TypesAreEqual(m.ReturnType, genericReturn.AssociatedSystemType))); bool bMatch = true; ParameterInfo[] parameters = m.GetParameters(); for (int i = 0; i < genericParams.size; i++) { if (!TypesAreEqual(parameters[i].ParameterType, genericParams.Item(i).AssociatedSystemType)) { bMatch = false; break; } } if (bMatch) { if (m.IsGenericMethod) { int size = methinfo.Method.TypeArgs != null ? methinfo.Method.TypeArgs.size : 0; Type[] typeArgs = new Type[size]; if (size > 0) { for (int i = 0; i < methinfo.Method.TypeArgs.size; i++) { typeArgs[i] = methinfo.Method.TypeArgs[i].AssociatedSystemType; } } return m.MakeGenericMethod(typeArgs); } return m; } } Debug.Assert(false, "Could not find matching method"); throw Error.InternalCompilerError(); } ///////////////////////////////////////////////////////////////////////////////// private ConstructorInfo GetConstructorInfoFromExpr(EXPRMETHODINFO methinfo) { // To do this, we need to construct a type array of the parameter types, // get the parent constructed type, and get the method from it. AggregateType aggType = methinfo.Method.Ats; MethodSymbol methSym = methinfo.Method.Meth(); TypeArray genericInstanceParams = _typeManager.SubstTypeArray(methSym.Params, aggType); Type type = aggType.AssociatedSystemType; ConstructorInfo ctorInfo = (ConstructorInfo)methSym.AssociatedMemberInfo; // This is to ensure that for embedded nopia types, we have the // appropriate local type from the member itself; this is possible // because nopia types are not generic or nested. if (!type.GetTypeInfo().IsGenericType && !type.IsNested) { type = ctorInfo.DeclaringType; } foreach (ConstructorInfo c in type.GetConstructors(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static)) { #if UNSUPPORTEDAPI if ((c.MetadataToken != ctorInfo.MetadataToken) || (c.Module != ctorInfo.Module)) #else if (!c.HasSameMetadataDefinitionAs(ctorInfo)) #endif { continue; } Debug.Assert(c.GetParameters() == null || c.GetParameters().Length == genericInstanceParams.size); bool bMatch = true; ParameterInfo[] parameters = c.GetParameters(); for (int i = 0; i < genericInstanceParams.size; i++) { if (!TypesAreEqual(parameters[i].ParameterType, genericInstanceParams.Item(i).AssociatedSystemType)) { bMatch = false; break; } } if (bMatch) { return c; } } Debug.Assert(false, "Could not find matching constructor"); throw Error.InternalCompilerError(); } ///////////////////////////////////////////////////////////////////////////////// private PropertyInfo GetPropertyInfoFromExpr(EXPRPropertyInfo propinfo) { // To do this, we need to construct a type array of the parameter types, // get the parent constructed type, and get the property from it. AggregateType aggType = propinfo.Property.Ats; PropertySymbol propSym = propinfo.Property.Prop(); TypeArray genericInstanceParams = _typeManager.SubstTypeArray(propSym.Params, aggType, null); CType genericInstanceReturn = _typeManager.SubstType(propSym.RetType, aggType, null); Type type = aggType.AssociatedSystemType; PropertyInfo propertyInfo = propSym.AssociatedPropertyInfo; // This is to ensure that for embedded nopia types, we have the // appropriate local type from the member itself; this is possible // because nopia types are not generic or nested. if (!type.GetTypeInfo().IsGenericType && !type.IsNested) { type = propertyInfo.DeclaringType; } foreach (PropertyInfo p in type.GetProperties(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static)) { #if UNSUPPORTEDAPI if ((p.MetadataToken != propertyInfo.MetadataToken) || (p.Module != propertyInfo.Module)) #else if (!p.HasSameMetadataDefinitionAs(propertyInfo)) { #endif continue; } Debug.Assert((p.Name == propertyInfo.Name) && (p.GetIndexParameters() == null || p.GetIndexParameters().Length == genericInstanceParams.size)); bool bMatch = true; ParameterInfo[] parameters = p.GetSetMethod(true) != null ? p.GetSetMethod(true).GetParameters() : p.GetGetMethod(true).GetParameters(); for (int i = 0; i < genericInstanceParams.size; i++) { if (!TypesAreEqual(parameters[i].ParameterType, genericInstanceParams.Item(i).AssociatedSystemType)) { bMatch = false; break; } } if (bMatch) { return p; } } Debug.Assert(false, "Could not find matching property"); throw Error.InternalCompilerError(); } ///////////////////////////////////////////////////////////////////////////////// private bool TypesAreEqual(Type t1, Type t2) { if (t1 == t2) { return true; } return t1.IsEquivalentTo(t2); } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.Threading; using Microsoft.Isam.Esent.Interop; using Roslyn.Utilities; namespace Microsoft.CodeAnalysis.Esent { internal partial class EsentStorage { // set db page size to 8K and version store page size to 16K private const int DatabasePageSize = 2 * 4 * 1024; private const int VersionStorePageSize = 4 * 4 * 1024; // JET parameter consts private const int JET_paramIOPriority = 152; private const int JET_paramCheckpointIOMax = 135; private const int JET_paramVerPageSize = 128; private const int JET_paramDisablePerfmon = 107; private const int JET_paramPageHintCacheSize = 101; private const int JET_paramLogFileCreateAsynch = 69; private const int JET_paramOutstandingIOMax = 30; private const int JET_paramLRUKHistoryMax = 26; private const int JET_paramCommitDefault = 16; private readonly string _databaseFile; private readonly bool _enablePerformanceMonitor; private readonly Dictionary<TableKinds, AbstractTable> _tables; private readonly ConcurrentStack<OpenSession> _sessionCache; private readonly CancellationTokenSource _shutdownCancellationTokenSource; private Instance _instance; private Session _primarySessionId; private JET_DBID _primaryDatabaseId; public EsentStorage(string databaseFile, bool enablePerformanceMonitor = false) { Contract.Requires(!string.IsNullOrWhiteSpace(databaseFile)); _databaseFile = databaseFile; _enablePerformanceMonitor = enablePerformanceMonitor; // order of tables are fixed. don't change it _tables = new Dictionary<TableKinds, AbstractTable>() { { TableKinds.Name, new NameTable() }, { TableKinds.Solution, new SolutionTable() }, { TableKinds.Project, new ProjectTable() }, { TableKinds.Document, new DocumentTable() }, { TableKinds.Identifier, new IdentifierNameTable() }, { TableKinds.IdentifierLocations, new IdentifierLocationTable() }, }; _sessionCache = new ConcurrentStack<OpenSession>(); _shutdownCancellationTokenSource = new CancellationTokenSource(); } public void Initialize() { _instance = CreateEsentInstance(); _primarySessionId = new Session(_instance); InitializeDatabaseAndTables(); } public bool IsClosed { get { return _instance == null; } } public void Close() { if (_instance != null) { var handle = _instance; _instance = null; // try to free all allocated session - if succeeded we can try to do a clean shutdown _shutdownCancellationTokenSource.Cancel(); try { // just close the instance - all associated objects will be closed as well _primarySessionId.Dispose(); handle.Dispose(); } catch { // ignore exception if whatever reason esent throws an exception. } _shutdownCancellationTokenSource.Dispose(); } } public int GetUniqueId(string value) { return GetUniqueId(value, TableKinds.Name); } public int GetUniqueIdentifierId(string value) { return GetUniqueId(value, TableKinds.Identifier); } private int GetUniqueId(string value, TableKinds tableKind) { Contract.ThrowIfTrue(string.IsNullOrWhiteSpace(value)); using (var accessor = (StringNameTableAccessor)GetTableAccessor(tableKind)) { return accessor.GetUniqueId(value); } } public SolutionTableAccessor GetSolutionTableAccessor() { return (SolutionTableAccessor)GetTableAccessor(TableKinds.Solution); } public ProjectTableAccessor GetProjectTableAccessor() { return (ProjectTableAccessor)GetTableAccessor(TableKinds.Project); } public DocumentTableAccessor GetDocumentTableAccessor() { return (DocumentTableAccessor)GetTableAccessor(TableKinds.Document); } public IdentifierLocationTableAccessor GetIdentifierLocationTableAccessor() { return (IdentifierLocationTableAccessor)GetTableAccessor(TableKinds.IdentifierLocations); } private AbstractTableAccessor GetTableAccessor(TableKinds tableKind) { return _tables[tableKind].GetTableAccessor(GetOpenSession()); } private OpenSession GetOpenSession() { if (_sessionCache.TryPop(out var session)) { return session; } return new OpenSession(this, _databaseFile, _shutdownCancellationTokenSource.Token); } private void CloseSession(OpenSession session) { if (_shutdownCancellationTokenSource.IsCancellationRequested) { session.Close(); return; } if (_sessionCache.Count > 5) { session.Close(); return; } _sessionCache.Push(session); } private Instance CreateEsentInstance() { var instanceDataFolder = Path.GetDirectoryName(_databaseFile); TryInitializeGlobalParameters(); var instance = new Instance(Path.GetFileName(_databaseFile), _databaseFile, TermGrbit.Complete); // create log file preemptively Api.JetSetSystemParameter(instance.JetInstance, JET_SESID.Nil, (JET_param)JET_paramLogFileCreateAsynch, /* true */ 1, null); // set default commit mode Api.JetSetSystemParameter(instance.JetInstance, JET_SESID.Nil, (JET_param)JET_paramCommitDefault, /* lazy */ 1, null); // remove transaction log file that is not needed anymore instance.Parameters.CircularLog = true; // transaction log file buffer 1M (1024 * 2 * 512 bytes) instance.Parameters.LogBuffers = 2 * 1024; // transaction log file is 2M (2 * 1024 * 1024 bytes) instance.Parameters.LogFileSize = 2 * 1024; // db directories instance.Parameters.LogFileDirectory = instanceDataFolder; instance.Parameters.SystemDirectory = instanceDataFolder; instance.Parameters.TempDirectory = instanceDataFolder; // Esent uses version pages to store intermediate non-committed data during transactions // smaller values may cause VersionStoreOutOfMemory error when dealing with multiple transactions\writing lot's of data in transaction or both // it is about 16MB - this is okay to be big since most of it is temporary memory that will be released once the last transaction goes away instance.Parameters.MaxVerPages = 16 * 1024 * 1024 / VersionStorePageSize; // set the size of max transaction log size (in bytes) that should be replayed after the crash // small values: smaller log files but potentially longer transaction flushes if there was a crash (6M) instance.Parameters.CheckpointDepthMax = 6 * 1024 * 1024; // how much db grows when it finds db is full (1M) // (less I/O as value gets bigger) instance.Parameters.DbExtensionSize = 1024 * 1024 / DatabasePageSize; // fail fast if log file is wrong. we will recover from it by creating db from scratch instance.Parameters.CleanupMismatchedLogFiles = true; instance.Parameters.EnableIndexChecking = true; // now, actually initialize instance instance.Init(); return instance; } private void TryInitializeGlobalParameters() { Api.JetGetInstanceInfo(out var instances, out var infos); // already initialized nothing we can do. if (instances != 0) { return; } try { // use small configuration so that esent use process heap and windows file cache SystemParameters.Configuration = 0; // allow many esent instances SystemParameters.MaxInstances = 1024; // enable perf monitor if requested Api.JetSetSystemParameter(JET_INSTANCE.Nil, JET_SESID.Nil, (JET_param)JET_paramDisablePerfmon, _enablePerformanceMonitor ? 0 : 1, null); // set max IO queue (bigger value better IO perf) Api.JetSetSystemParameter(JET_INSTANCE.Nil, JET_SESID.Nil, (JET_param)JET_paramOutstandingIOMax, 1024, null); // set max current write to db Api.JetSetSystemParameter(JET_INSTANCE.Nil, JET_SESID.Nil, (JET_param)JET_paramCheckpointIOMax, 32, null); // better cache management (4M) Api.JetSetSystemParameter(JET_INSTANCE.Nil, JET_SESID.Nil, (JET_param)JET_paramLRUKHistoryMax, 4 * 1024 * 1024 / DatabasePageSize, null); // better db performance (100K bytes) Api.JetSetSystemParameter(JET_INSTANCE.Nil, JET_SESID.Nil, (JET_param)JET_paramPageHintCacheSize, 100 * 1024, null); // set version page size to normal 16K Api.JetSetSystemParameter(JET_INSTANCE.Nil, JET_SESID.Nil, (JET_param)JET_paramVerPageSize, VersionStorePageSize, null); // use windows file system cache SystemParameters.EnableFileCache = true; // don't use mapped file for database. this will waste more VM. SystemParameters.EnableViewCache = false; // this is the unit where chunks are loaded into memory/locked and etc SystemParameters.DatabasePageSize = DatabasePageSize; // set max cache size - don't use too much memory for cache (8MB) SystemParameters.CacheSizeMax = 8 * 1024 * 1024 / DatabasePageSize; // set min cache size - Esent tries to adjust this value automatically but often it is better to help him. // small cache sizes => more I\O during random seeks // currently set to 2MB SystemParameters.CacheSizeMin = 2 * 1024 * 1024 / DatabasePageSize; // set box of when cache eviction starts (1% - 2% of max cache size) SystemParameters.StartFlushThreshold = 20; SystemParameters.StopFlushThreshold = 40; } catch (EsentAlreadyInitializedException) { // can't change global status } } private void InitializeDatabaseAndTables() { // open database for the first time: database file will be created if necessary // first quick check whether file exist if (!File.Exists(_databaseFile)) { Api.JetCreateDatabase(_primarySessionId, _databaseFile, null, out _primaryDatabaseId, CreateDatabaseGrbit.None); CreateTables(); return; } // file exist, just attach the db. try { // if this succeed, it will lock the file. Api.JetAttachDatabase(_primarySessionId, _databaseFile, AttachDatabaseGrbit.None); } catch (EsentFileNotFoundException) { // if someone has deleted the file, while we are attaching. Api.JetCreateDatabase(_primarySessionId, _databaseFile, null, out _primaryDatabaseId, CreateDatabaseGrbit.None); CreateTables(); return; } Api.JetOpenDatabase(_primarySessionId, _databaseFile, null, out _primaryDatabaseId, OpenDatabaseGrbit.None); InitializeTables(); } private void CreateTables() { foreach (var table in _tables.Values) { table.Create(_primarySessionId, _primaryDatabaseId); } } private void InitializeTables() { foreach (var table in _tables.Values) { table.Initialize(_primarySessionId, _primaryDatabaseId); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Reflection; using System.Linq.Expressions; using Microsoft.Internal; namespace System.Composition.Convention { /// <summary> /// Configures a type as a MEF part. /// </summary> /// <typeparam name="T">The type of the part, or a type to which the part is assignable.</typeparam> public class PartConventionBuilder<T> : PartConventionBuilder { private class MethodExpressionAdapter { private readonly MethodInfo _methodInfo; public MethodExpressionAdapter(Expression<Action<T>> methodSelector) { _methodInfo = SelectMethods(methodSelector); } public bool VerifyMethodInfo(MethodInfo mi) { return mi == _methodInfo; } private static MethodInfo SelectMethods(Expression<Action<T>> methodSelector) { Requires.NotNull(methodSelector, nameof(methodSelector)); var expr = Reduce(methodSelector).Body; if (expr.NodeType == ExpressionType.Call) { var memberInfo = ((MethodCallExpression)expr).Method as MethodInfo; if (memberInfo != null) { return memberInfo; } } // An error occurred the expression must be a void Method() Member Expression throw ExceptionBuilder.Argument_ExpressionMustBeVoidMethodWithNoArguments(nameof(methodSelector)); } protected static Expression<Func<T, object>> Reduce(Expression<Func<T, object>> expr) { while (expr.CanReduce) { expr = (Expression<Func<T, object>>)expr.Reduce(); } return expr; } protected static Expression<Action<T>> Reduce(Expression<Action<T>> expr) { while (expr.CanReduce) { expr = (Expression<Action<T>>)expr.Reduce(); } return expr; } } private class PropertyExpressionAdapter { private readonly PropertyInfo _propertyInfo; private readonly Action<ImportConventionBuilder> _configureImport; private readonly Action<ExportConventionBuilder> _configureExport; public PropertyExpressionAdapter( Expression<Func<T, object>> propertySelector, Action<ImportConventionBuilder> configureImport = null, Action<ExportConventionBuilder> configureExport = null) { _propertyInfo = SelectProperties(propertySelector); _configureImport = configureImport; _configureExport = configureExport; } public bool VerifyPropertyInfo(PropertyInfo pi) { return pi == _propertyInfo; } public void ConfigureImport(PropertyInfo propertyInfo, ImportConventionBuilder importBuilder) { if (_configureImport != null) { _configureImport(importBuilder); } } public void ConfigureExport(PropertyInfo propertyInfo, ExportConventionBuilder exportBuilder) { if (_configureExport != null) { _configureExport(exportBuilder); } } private static PropertyInfo SelectProperties(Expression<Func<T, object>> propertySelector) { Requires.NotNull(propertySelector, nameof(propertySelector)); var expr = Reduce(propertySelector).Body; if (expr.NodeType == ExpressionType.MemberAccess) { var memberInfo = ((MemberExpression)expr).Member as PropertyInfo; if (memberInfo != null) { return memberInfo; } } // An error occurred the expression must be a Property Member Expression throw ExceptionBuilder.Argument_ExpressionMustBePropertyMember(nameof(propertySelector)); } protected static Expression<Func<T, object>> Reduce(Expression<Func<T, object>> expr) { while (expr.CanReduce) { expr = (Expression<Func<T, object>>)expr.Reduce(); } return expr; } } private class ConstructorExpressionAdapter { private ConstructorInfo _constructorInfo = null; private Dictionary<ParameterInfo, Action<ImportConventionBuilder>> _importBuilders = null; public ConstructorExpressionAdapter(Expression<Func<ParameterImportConventionBuilder, T>> selectConstructor) { ParseSelectConstructor(selectConstructor); } public ConstructorInfo SelectConstructor(IEnumerable<ConstructorInfo> constructorInfos) { return _constructorInfo; } public void ConfigureConstructorImports(ParameterInfo parameterInfo, ImportConventionBuilder importBuilder) { if (_importBuilders != null) { Action<ImportConventionBuilder> parameterImportBuilder; if (_importBuilders.TryGetValue(parameterInfo, out parameterImportBuilder)) { parameterImportBuilder(importBuilder); } } return; } private void ParseSelectConstructor(Expression<Func<ParameterImportConventionBuilder, T>> constructorSelector) { Requires.NotNull(constructorSelector, nameof(constructorSelector)); var expr = Reduce(constructorSelector).Body; if (expr.NodeType != ExpressionType.New) { throw ExceptionBuilder.Argument_ExpressionMustBeNew(nameof(constructorSelector)); } var newExpression = (NewExpression)expr; _constructorInfo = newExpression.Constructor; int index = 0; var parameterInfos = _constructorInfo.GetParameters(); foreach (var argument in newExpression.Arguments) { if (argument.NodeType == ExpressionType.Call) { var methodCallExpression = (MethodCallExpression)argument; if (methodCallExpression.Arguments.Count() == 1) { var parameter = methodCallExpression.Arguments[0]; if (parameter.NodeType == ExpressionType.Lambda) { var lambdaExpression = (LambdaExpression)parameter; var importDelegate = lambdaExpression.Compile(); if (_importBuilders == null) { _importBuilders = new Dictionary<ParameterInfo, Action<ImportConventionBuilder>>(); } _importBuilders.Add(parameterInfos[index], (Action<ImportConventionBuilder>)importDelegate); ++index; } } } } } private static Expression<Func<ParameterImportConventionBuilder, T>> Reduce(Expression<Func<ParameterImportConventionBuilder, T>> expr) { while (expr.CanReduce) { expr.Reduce(); } return expr; } } internal PartConventionBuilder(Predicate<Type> selectType) : base(selectType) { } /// <summary> /// Select which of the available constructors will be used to instantiate the part. /// </summary> /// <param name="constructorSelector">Expression that selects a single constructor.</param> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> SelectConstructor(Expression<Func<ParameterImportConventionBuilder, T>> constructorSelector) { Requires.NotNull(constructorSelector, nameof(constructorSelector)); var adapter = new ConstructorExpressionAdapter(constructorSelector); base.SelectConstructor(adapter.SelectConstructor, adapter.ConfigureConstructorImports); return this; } /// <summary> /// Select a property on the part to export. /// </summary> /// <param name="propertySelector">Expression that selects the exported property.</param> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> ExportProperty(Expression<Func<T, object>> propertySelector) { return ExportProperty(propertySelector, null); } /// <summary> /// Select a property on the part to export. /// </summary> /// <param name="propertySelector">Expression that selects the exported property.</param> /// <param name="exportConfiguration">Action to configure selected properties.</param> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> ExportProperty( Expression<Func<T, object>> propertySelector, Action<ExportConventionBuilder> exportConfiguration) { Requires.NotNull(propertySelector, nameof(propertySelector)); var adapter = new PropertyExpressionAdapter(propertySelector, null, exportConfiguration); base.ExportProperties(adapter.VerifyPropertyInfo, adapter.ConfigureExport); return this; } /// <summary> /// Select a property to export from the part. /// </summary> /// <typeparam name="TContract">Contract type to export.</typeparam> /// <param name="propertySelector">Expression to select the matching property.</param> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> ExportProperty<TContract>(Expression<Func<T, object>> propertySelector) { return ExportProperty<TContract>(propertySelector, null); } /// <summary> /// Select a property to export from the part. /// </summary> /// <typeparam name="TContract">Contract type to export.</typeparam> /// <param name="propertySelector">Expression to select the matching property.</param> /// <param name="exportConfiguration">Action to configure selected properties.</param> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> ExportProperty<TContract>( Expression<Func<T, object>> propertySelector, Action<ExportConventionBuilder> exportConfiguration) { Requires.NotNull(propertySelector, nameof(propertySelector)); var adapter = new PropertyExpressionAdapter(propertySelector, null, exportConfiguration); base.ExportProperties<TContract>(adapter.VerifyPropertyInfo, adapter.ConfigureExport); return this; } /// <summary> /// Select a property on the part to import. /// </summary> /// <param name="propertySelector">Expression selecting the property.</param> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> ImportProperty(Expression<Func<T, object>> propertySelector) { return ImportProperty(propertySelector, null); } /// <summary> /// Select a property on the part to import. /// </summary> /// <param name="propertySelector">Expression selecting the property.</param> /// <param name="importConfiguration">Action configuring the imported property.</param> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> ImportProperty( Expression<Func<T, object>> propertySelector, Action<ImportConventionBuilder> importConfiguration) { Requires.NotNull(propertySelector, nameof(propertySelector)); var adapter = new PropertyExpressionAdapter(propertySelector, importConfiguration, null); base.ImportProperties(adapter.VerifyPropertyInfo, adapter.ConfigureImport); return this; } /// <summary> /// Select a property on the part to import. /// </summary> /// <typeparam name="TContract">Contract type to import.</typeparam> /// <param name="propertySelector">Expression selecting the property.</param> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> ImportProperty<TContract>(Expression<Func<T, object>> propertySelector) { return ImportProperty<TContract>(propertySelector, null); } /// <summary> /// Select a property on the part to import. /// </summary> /// <typeparam name="TContract">Contract type to import.</typeparam> /// <param name="propertySelector">Expression selecting the property.</param> /// <param name="importConfiguration">Action configuring the imported property.</param> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> ImportProperty<TContract>( Expression<Func<T, object>> propertySelector, Action<ImportConventionBuilder> importConfiguration) { Requires.NotNull(propertySelector, nameof(propertySelector)); var adapter = new PropertyExpressionAdapter(propertySelector, importConfiguration, null); base.ImportProperties<TContract>(adapter.VerifyPropertyInfo, adapter.ConfigureImport); return this; } /// <summary> /// Mark the part as being shared within the entire composition. /// </summary> /// <returns>A part builder allowing further configuration of the part.</returns> public PartConventionBuilder<T> NotifyImportsSatisfied(Expression<Action<T>> methodSelector) { Requires.NotNull(methodSelector, nameof(methodSelector)); var adapter = new MethodExpressionAdapter(methodSelector); base.NotifyImportsSatisfied(adapter.VerifyMethodInfo); return this; } } }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Data.Common; using System.Data.SqlClient; using System.Linq; using Umbraco.Core.Events; using Umbraco.Core.Logging; using Umbraco.Core.Models.Membership; using Umbraco.Core.Persistence; using Umbraco.Core.Persistence.Querying; using Umbraco.Core.Persistence.UnitOfWork; using Umbraco.Core.Security; namespace Umbraco.Core.Services { /// <summary> /// Represents the UserService, which is an easy access to operations involving <see cref="IProfile"/>, <see cref="IMembershipUser"/> and eventually Backoffice Users. /// </summary> public class UserService : RepositoryService, IUserService { //TODO: We need to change the isUpgrading flag to use an app state enum as described here: http://issues.umbraco.org/issue/U4-6816 // in the meantime, we will use a boolean which we are currently using during upgrades to ensure that a user object is not persisted during this phase, otherwise // exceptions can occur if the db is not in it's correct state. internal bool IsUpgrading { get; set; } public UserService(IDatabaseUnitOfWorkProvider provider, RepositoryFactory repositoryFactory, ILogger logger, IEventMessagesFactory eventMessagesFactory) : base(provider, repositoryFactory, logger, eventMessagesFactory) { IsUpgrading = false; } #region Implementation of IMembershipUserService /// <summary> /// Gets the default MemberType alias /// </summary> /// <remarks>By default we'll return the 'writer', but we need to check it exists. If it doesn't we'll /// return the first type that is not an admin, otherwise if there's only one we will return that one.</remarks> /// <returns>Alias of the default MemberType</returns> public string GetDefaultMemberType() { using (var repository = RepositoryFactory.CreateUserTypeRepository(UowProvider.GetUnitOfWork())) { var types = repository.GetAll().Select(x => x.Alias).ToArray(); if (types.Any() == false) { throw new EntityNotFoundException("No member types could be resolved"); } if (types.InvariantContains("writer")) { return types.First(x => x.InvariantEquals("writer")); } if (types.Length == 1) { return types.First(); } //first that is not admin return types.First(x => x.InvariantEquals("admin") == false); } } /// <summary> /// Checks if a User with the username exists /// </summary> /// <param name="username">Username to check</param> /// <returns><c>True</c> if the User exists otherwise <c>False</c></returns> public bool Exists(string username) { using (var repository = RepositoryFactory.CreateUserRepository(UowProvider.GetUnitOfWork())) { return repository.Exists(username); } } /// <summary> /// Creates a new User /// </summary> /// <remarks>The user will be saved in the database and returned with an Id</remarks> /// <param name="username">Username of the user to create</param> /// <param name="email">Email of the user to create</param> /// <param name="userType"><see cref="IUserType"/> which the User should be based on</param> /// <returns><see cref="IUser"/></returns> public IUser CreateUserWithIdentity(string username, string email, IUserType userType) { return CreateUserWithIdentity(username, email, "", userType); } /// <summary> /// Creates and persists a new <see cref="IUser"/> /// </summary> /// <param name="username">Username of the <see cref="IUser"/> to create</param> /// <param name="email">Email of the <see cref="IUser"/> to create</param> /// <param name="passwordValue">This value should be the encoded/encrypted/hashed value for the password that will be stored in the database</param> /// <param name="memberTypeAlias">Alias of the Type</param> /// <returns><see cref="IUser"/></returns> IUser IMembershipMemberService<IUser>.CreateWithIdentity(string username, string email, string passwordValue, string memberTypeAlias) { var userType = GetUserTypeByAlias(memberTypeAlias); if (userType == null) { throw new EntityNotFoundException("The user type " + memberTypeAlias + " could not be resolved"); } return CreateUserWithIdentity(username, email, passwordValue, userType); } /// <summary> /// Creates and persists a Member /// </summary> /// <remarks>Using this method will persist the Member object before its returned /// meaning that it will have an Id available (unlike the CreateMember method)</remarks> /// <param name="username">Username of the Member to create</param> /// <param name="email">Email of the Member to create</param> /// <param name="passwordValue">This value should be the encoded/encrypted/hashed value for the password that will be stored in the database</param> /// <param name="userType">MemberType the Member should be based on</param> /// <returns><see cref="IUser"/></returns> private IUser CreateUserWithIdentity(string username, string email, string passwordValue, IUserType userType) { if (userType == null) throw new ArgumentNullException("userType"); //TODO: PUT lock here!! var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { var loginExists = uow.Database.ExecuteScalar<int>("SELECT COUNT(id) FROM umbracoUser WHERE userLogin = @Login", new { Login = username }) != 0; if (loginExists) throw new ArgumentException("Login already exists"); var user = new User(userType) { DefaultToLiveEditing = false, Email = email, Language = Configuration.GlobalSettings.DefaultUILanguage, Name = username, RawPasswordValue = passwordValue, Username = username, StartContentId = -1, StartMediaId = -1, IsLockedOut = false, IsApproved = true }; //adding default sections content and media user.AddAllowedSection("content"); user.AddAllowedSection("media"); if (SavingUser.IsRaisedEventCancelled(new SaveEventArgs<IUser>(user), this)) return user; repository.AddOrUpdate(user); uow.Commit(); SavedUser.RaiseEvent(new SaveEventArgs<IUser>(user, false), this); return user; } } /// <summary> /// Gets a User by its integer id /// </summary> /// <param name="id"><see cref="System.int"/> Id</param> /// <returns><see cref="IUser"/></returns> public IUser GetById(int id) { using (var repository = RepositoryFactory.CreateUserRepository(UowProvider.GetUnitOfWork())) { var user = repository.Get((int)id); return user; } } /// <summary> /// Gets an <see cref="IUser"/> by its provider key /// </summary> /// <param name="id">Id to use for retrieval</param> /// <returns><see cref="IUser"/></returns> public IUser GetByProviderKey(object id) { var asInt = id.TryConvertTo<int>(); if (asInt.Success) { return GetById((int)id); } return null; } /// <summary> /// Get an <see cref="IUser"/> by email /// </summary> /// <param name="email">Email to use for retrieval</param> /// <returns><see cref="IUser"/></returns> public IUser GetByEmail(string email) { using (var repository = RepositoryFactory.CreateUserRepository(UowProvider.GetUnitOfWork())) { var query = Query<IUser>.Builder.Where(x => x.Email.Equals(email)); var user = repository.GetByQuery(query).FirstOrDefault(); return user; } } /// <summary> /// Get an <see cref="IUser"/> by username /// </summary> /// <param name="username">Username to use for retrieval</param> /// <returns><see cref="IUser"/></returns> public IUser GetByUsername(string username) { using (var repository = RepositoryFactory.CreateUserRepository(UowProvider.GetUnitOfWork())) { var query = Query<IUser>.Builder.Where(x => x.Username.Equals(username)); var user = repository.GetByQuery(query).FirstOrDefault(); return user; } } /// <summary> /// Deletes an <see cref="IUser"/> /// </summary> /// <param name="membershipUser"><see cref="IUser"/> to Delete</param> public void Delete(IUser membershipUser) { //disable membershipUser.IsApproved = false; //can't rename if it's going to take up too many chars if (membershipUser.Username.Length + 9 <= 125) { membershipUser.Username = DateTime.Now.ToString("yyyyMMdd") + "_" + membershipUser.Username; } Save(membershipUser); } /// <summary> /// This is simply a helper method which essentially just wraps the MembershipProvider's ChangePassword method /// </summary> /// <remarks> /// This method exists so that Umbraco developers can use one entry point to create/update users if they choose to. /// </remarks> /// <param name="user">The user to save the password for</param> /// <param name="password">The password to save</param> public void SavePassword(IUser user, string password) { if (user == null) throw new ArgumentNullException("user"); var provider = MembershipProviderExtensions.GetUsersMembershipProvider(); if (provider.IsUmbracoMembershipProvider() == false) throw new NotSupportedException("When using a non-Umbraco membership provider you must change the user password by using the MembershipProvider.ChangePassword method"); provider.ChangePassword(user.Username, "", password); //go re-fetch the member and update the properties that may have changed var result = GetByUsername(user.Username); if (result != null) { //should never be null but it could have been deleted by another thread. user.RawPasswordValue = result.RawPasswordValue; user.LastPasswordChangeDate = result.LastPasswordChangeDate; user.UpdateDate = user.UpdateDate; } } /// <summary> /// Deletes or disables a User /// </summary> /// <param name="user"><see cref="IUser"/> to delete</param> /// <param name="deletePermanently"><c>True</c> to permanently delete the user, <c>False</c> to disable the user</param> public void Delete(IUser user, bool deletePermanently) { if (deletePermanently == false) { Delete(user); } else { if (DeletingUser.IsRaisedEventCancelled(new DeleteEventArgs<IUser>(user), this)) return; var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { repository.Delete(user); uow.Commit(); } DeletedUser.RaiseEvent(new DeleteEventArgs<IUser>(user, false), this); } } /// <summary> /// Saves an <see cref="IUser"/> /// </summary> /// <param name="entity"><see cref="IUser"/> to Save</param> /// <param name="raiseEvents">Optional parameter to raise events. /// Default is <c>True</c> otherwise set to <c>False</c> to not raise events</param> public void Save(IUser entity, bool raiseEvents = true) { if (raiseEvents) { if (SavingUser.IsRaisedEventCancelled(new SaveEventArgs<IUser>(entity), this)) return; } var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { repository.AddOrUpdate(entity); try { uow.Commit(); } catch (DbException ex) { //Special case, if we are upgrading and an exception occurs, just continue if (IsUpgrading == false) throw; Logger.WarnWithException<UserService>("An error occurred attempting to save a user instance during upgrade, normally this warning can be ignored", ex); return; } } if (raiseEvents) SavedUser.RaiseEvent(new SaveEventArgs<IUser>(entity, false), this); } /// <summary> /// Saves a list of <see cref="IUser"/> objects /// </summary> /// <param name="entities"><see cref="IEnumerable{IUser}"/> to save</param> /// <param name="raiseEvents">Optional parameter to raise events. /// Default is <c>True</c> otherwise set to <c>False</c> to not raise events</param> public void Save(IEnumerable<IUser> entities, bool raiseEvents = true) { if (raiseEvents) { if (SavingUser.IsRaisedEventCancelled(new SaveEventArgs<IUser>(entities), this)) return; } var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { foreach (var member in entities) { repository.AddOrUpdate(member); } //commit the whole lot in one go uow.Commit(); } if (raiseEvents) SavedUser.RaiseEvent(new SaveEventArgs<IUser>(entities, false), this); } /// <summary> /// Finds a list of <see cref="IUser"/> objects by a partial email string /// </summary> /// <param name="emailStringToMatch">Partial email string to match</param> /// <param name="pageIndex">Current page index</param> /// <param name="pageSize">Size of the page</param> /// <param name="totalRecords">Total number of records found (out)</param> /// <param name="matchType">The type of match to make as <see cref="StringPropertyMatchType"/>. Default is <see cref="StringPropertyMatchType.StartsWith"/></param> /// <returns><see cref="IEnumerable{IUser}"/></returns> public IEnumerable<IUser> FindByEmail(string emailStringToMatch, int pageIndex, int pageSize, out int totalRecords, StringPropertyMatchType matchType = StringPropertyMatchType.StartsWith) { var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { var query = new Query<IUser>(); switch (matchType) { case StringPropertyMatchType.Exact: query.Where(member => member.Email.Equals(emailStringToMatch)); break; case StringPropertyMatchType.Contains: query.Where(member => member.Email.Contains(emailStringToMatch)); break; case StringPropertyMatchType.StartsWith: query.Where(member => member.Email.StartsWith(emailStringToMatch)); break; case StringPropertyMatchType.EndsWith: query.Where(member => member.Email.EndsWith(emailStringToMatch)); break; case StringPropertyMatchType.Wildcard: query.Where(member => member.Email.SqlWildcard(emailStringToMatch, TextColumnType.NVarchar)); break; default: throw new ArgumentOutOfRangeException("matchType"); } return repository.GetPagedResultsByQuery(query, pageIndex, pageSize, out totalRecords, dto => dto.Email); } } /// <summary> /// Finds a list of <see cref="IUser"/> objects by a partial username /// </summary> /// <param name="login">Partial username to match</param> /// <param name="pageIndex">Current page index</param> /// <param name="pageSize">Size of the page</param> /// <param name="totalRecords">Total number of records found (out)</param> /// <param name="matchType">The type of match to make as <see cref="StringPropertyMatchType"/>. Default is <see cref="StringPropertyMatchType.StartsWith"/></param> /// <returns><see cref="IEnumerable{IUser}"/></returns> public IEnumerable<IUser> FindByUsername(string login, int pageIndex, int pageSize, out int totalRecords, StringPropertyMatchType matchType = StringPropertyMatchType.StartsWith) { var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { var query = new Query<IUser>(); switch (matchType) { case StringPropertyMatchType.Exact: query.Where(member => member.Username.Equals(login)); break; case StringPropertyMatchType.Contains: query.Where(member => member.Username.Contains(login)); break; case StringPropertyMatchType.StartsWith: query.Where(member => member.Username.StartsWith(login)); break; case StringPropertyMatchType.EndsWith: query.Where(member => member.Username.EndsWith(login)); break; case StringPropertyMatchType.Wildcard: query.Where(member => member.Email.SqlWildcard(login, TextColumnType.NVarchar)); break; default: throw new ArgumentOutOfRangeException("matchType"); } return repository.GetPagedResultsByQuery(query, pageIndex, pageSize, out totalRecords, dto => dto.Username); } } /// <summary> /// Gets the total number of Users based on the count type /// </summary> /// <remarks> /// The way the Online count is done is the same way that it is done in the MS SqlMembershipProvider - We query for any members /// that have their last active date within the Membership.UserIsOnlineTimeWindow (which is in minutes). It isn't exact science /// but that is how MS have made theirs so we'll follow that principal. /// </remarks> /// <param name="countType"><see cref="MemberCountType"/> to count by</param> /// <returns><see cref="System.int"/> with number of Users for passed in type</returns> public int GetCount(MemberCountType countType) { using (var repository = RepositoryFactory.CreateUserRepository(UowProvider.GetUnitOfWork())) { IQuery<IUser> query; switch (countType) { case MemberCountType.All: query = new Query<IUser>(); return repository.Count(query); case MemberCountType.Online: throw new NotImplementedException(); //var fromDate = DateTime.Now.AddMinutes(-Membership.UserIsOnlineTimeWindow); //query = // Query<IMember>.Builder.Where( // x => // ((Member)x).PropertyTypeAlias == Constants.Conventions.Member.LastLoginDate && // ((Member)x).DateTimePropertyValue > fromDate); //return repository.GetCountByQuery(query); case MemberCountType.LockedOut: query = Query<IUser>.Builder.Where( x => x.IsLockedOut); return repository.GetCountByQuery(query); case MemberCountType.Approved: query = Query<IUser>.Builder.Where( x => x.IsApproved); return repository.GetCountByQuery(query); default: throw new ArgumentOutOfRangeException("countType"); } } } /// <summary> /// Gets a list of paged <see cref="IUser"/> objects /// </summary> /// <param name="pageIndex">Current page index</param> /// <param name="pageSize">Size of the page</param> /// <param name="totalRecords">Total number of records found (out)</param> /// <returns><see cref="IEnumerable{IMember}"/></returns> public IEnumerable<IUser> GetAll(int pageIndex, int pageSize, out int totalRecords) { var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { return repository.GetPagedResultsByQuery(null, pageIndex, pageSize, out totalRecords, member => member.Username); } } #endregion #region Implementation of IUserService /// <summary> /// Gets an IProfile by User Id. /// </summary> /// <param name="id">Id of the User to retrieve</param> /// <returns><see cref="IProfile"/></returns> public IProfile GetProfileById(int id) { var user = GetUserById(id); return user.ProfileData; } /// <summary> /// Gets a profile by username /// </summary> /// <param name="username">Username</param> /// <returns><see cref="IProfile"/></returns> public IProfile GetProfileByUserName(string username) { var user = GetByUsername(username); return user.ProfileData; } /// <summary> /// Gets a user by Id /// </summary> /// <param name="id">Id of the user to retrieve</param> /// <returns><see cref="IUser"/></returns> public IUser GetUserById(int id) { using (var repository = RepositoryFactory.CreateUserRepository(UowProvider.GetUnitOfWork())) { return repository.Get(id); } } /// <summary> /// Replaces the same permission set for a single user to any number of entities /// </summary> /// <remarks>If no 'entityIds' are specified all permissions will be removed for the specified user.</remarks> /// <param name="userId">Id of the user</param> /// <param name="permissions">Permissions as enumerable list of <see cref="char"/></param> /// <param name="entityIds">Specify the nodes to replace permissions for. If nothing is specified all permissions are removed.</param> public void ReplaceUserPermissions(int userId, IEnumerable<char> permissions, params int[] entityIds) { var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { repository.ReplaceUserPermissions(userId, permissions, entityIds); } } /// <summary> /// Assigns the same permission set for a single user to any number of entities /// </summary> /// <param name="userId">Id of the user</param> /// <param name="permission"></param> /// <param name="entityIds">Specify the nodes to replace permissions for</param> public void AssignUserPermission(int userId, char permission, params int[] entityIds) { var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { repository.AssignUserPermission(userId, permission, entityIds); } } /// <summary> /// Gets all UserTypes or thosed specified as parameters /// </summary> /// <param name="ids">Optional Ids of UserTypes to retrieve</param> /// <returns>An enumerable list of <see cref="IUserType"/></returns> public IEnumerable<IUserType> GetAllUserTypes(params int[] ids) { var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserTypeRepository(uow)) { return repository.GetAll(ids); } } /// <summary> /// Gets a UserType by its Alias /// </summary> /// <param name="alias">Alias of the UserType to retrieve</param> /// <returns><see cref="IUserType"/></returns> public IUserType GetUserTypeByAlias(string alias) { using (var repository = RepositoryFactory.CreateUserTypeRepository(UowProvider.GetUnitOfWork())) { var query = Query<IUserType>.Builder.Where(x => x.Alias == alias); var contents = repository.GetByQuery(query); return contents.SingleOrDefault(); } } /// <summary> /// Gets a UserType by its Id /// </summary> /// <param name="id">Id of the UserType to retrieve</param> /// <returns><see cref="IUserType"/></returns> public IUserType GetUserTypeById(int id) { using (var repository = RepositoryFactory.CreateUserTypeRepository(UowProvider.GetUnitOfWork())) { return repository.Get(id); } } /// <summary> /// Gets a UserType by its Name /// </summary> /// <param name="name">Name of the UserType to retrieve</param> /// <returns><see cref="IUserType"/></returns> public IUserType GetUserTypeByName(string name) { using (var repository = RepositoryFactory.CreateUserTypeRepository(UowProvider.GetUnitOfWork())) { var query = Query<IUserType>.Builder.Where(x => x.Name == name); var contents = repository.GetByQuery(query); return contents.SingleOrDefault(); } } /// <summary> /// Saves a UserType /// </summary> /// <param name="userType">UserType to save</param> /// <param name="raiseEvents">Optional parameter to raise events. /// Default is <c>True</c> otherwise set to <c>False</c> to not raise events</param> public void SaveUserType(IUserType userType, bool raiseEvents = true) { if (raiseEvents) { if (SavingUserType.IsRaisedEventCancelled(new SaveEventArgs<IUserType>(userType), this)) return; } var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserTypeRepository(uow)) { repository.AddOrUpdate(userType); uow.Commit(); } if (raiseEvents) SavedUserType.RaiseEvent(new SaveEventArgs<IUserType>(userType, false), this); } /// <summary> /// Deletes a UserType /// </summary> /// <param name="userType">UserType to delete</param> public void DeleteUserType(IUserType userType) { if (DeletingUserType.IsRaisedEventCancelled(new DeleteEventArgs<IUserType>(userType), this)) return; var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserTypeRepository(uow)) { repository.Delete(userType); uow.Commit(); } DeletedUserType.RaiseEvent(new DeleteEventArgs<IUserType>(userType, false), this); } /// <summary> /// Removes a specific section from all users /// </summary> /// <remarks>This is useful when an entire section is removed from config</remarks> /// <param name="sectionAlias">Alias of the section to remove</param> public void DeleteSectionFromAllUsers(string sectionAlias) { var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { var assignedUsers = repository.GetUsersAssignedToSection(sectionAlias); foreach (var user in assignedUsers) { //now remove the section for each user and commit user.RemoveAllowedSection(sectionAlias); repository.AddOrUpdate(user); } uow.Commit(); } } /// <summary> /// Add a specific section to all users or those specified as parameters /// </summary> /// <remarks>This is useful when a new section is created to allow specific users accessing it</remarks> /// <param name="sectionAlias">Alias of the section to add</param> /// <param name="userIds">Specifiying nothing will add the section to all user</param> public void AddSectionToAllUsers(string sectionAlias, params int[] userIds) { var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { IEnumerable<IUser> users; if (userIds.Any()) { users = repository.GetAll(userIds); } else { users = repository.GetAll(); } foreach (var user in users.Where(u => !u.AllowedSections.InvariantContains(sectionAlias))) { //now add the section for each user and commit user.AddAllowedSection(sectionAlias); repository.AddOrUpdate(user); } uow.Commit(); } } /// <summary> /// Get permissions set for a user and optional node ids /// </summary> /// <remarks>If no permissions are found for a particular entity then the user's default permissions will be applied</remarks> /// <param name="user">User to retrieve permissions for</param> /// <param name="nodeIds">Specifiying nothing will return all user permissions for all nodes</param> /// <returns>An enumerable list of <see cref="EntityPermission"/></returns> public IEnumerable<EntityPermission> GetPermissions(IUser user, params int[] nodeIds) { var uow = UowProvider.GetUnitOfWork(); using (var repository = RepositoryFactory.CreateUserRepository(uow)) { var explicitPermissions = repository.GetUserPermissionsForEntities(user.Id, nodeIds); //if no permissions are assigned to a particular node then we will fill in those permissions with the user's defaults var result = new List<EntityPermission>(explicitPermissions); var missingIds = nodeIds.Except(result.Select(x => x.EntityId)); foreach (var id in missingIds) { result.Add( new EntityPermission( user.Id, id, user.DefaultPermissions.ToArray())); } return result; } } #endregion /// <summary> /// Occurs before Save /// </summary> public static event TypedEventHandler<IUserService, SaveEventArgs<IUser>> SavingUser; /// <summary> /// Occurs after Save /// </summary> public static event TypedEventHandler<IUserService, SaveEventArgs<IUser>> SavedUser; /// <summary> /// Occurs before Delete /// </summary> public static event TypedEventHandler<IUserService, DeleteEventArgs<IUser>> DeletingUser; /// <summary> /// Occurs after Delete /// </summary> public static event TypedEventHandler<IUserService, DeleteEventArgs<IUser>> DeletedUser; /// <summary> /// Occurs before Save /// </summary> public static event TypedEventHandler<IUserService, SaveEventArgs<IUserType>> SavingUserType; /// <summary> /// Occurs after Save /// </summary> public static event TypedEventHandler<IUserService, SaveEventArgs<IUserType>> SavedUserType; /// <summary> /// Occurs before Delete /// </summary> public static event TypedEventHandler<IUserService, DeleteEventArgs<IUserType>> DeletingUserType; /// <summary> /// Occurs after Delete /// </summary> public static event TypedEventHandler<IUserService, DeleteEventArgs<IUserType>> DeletedUserType; } }
// // Copyright (c) 2004-2017 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.UnitTests.Config { using System.IO; using System.Text; using NLog.Config; using NLog.Filters; using Xunit; public class RuleConfigurationTests : NLogTestBase { [Fact] public void NoRulesTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' /> </targets> <rules> </rules> </nlog>"); Assert.Equal(0, c.LoggingRules.Count); } [Fact] public void SimpleRuleTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' /> </targets> <rules> <logger name='*' minLevel='Info' writeTo='d1' /> </rules> </nlog>"); Assert.Equal(1, c.LoggingRules.Count); var rule = c.LoggingRules[0]; Assert.Equal("*", rule.LoggerNamePattern); Assert.Equal(4, rule.Levels.Count); Assert.Contains(LogLevel.Info, rule.Levels); Assert.Contains(LogLevel.Warn, rule.Levels); Assert.Contains(LogLevel.Error, rule.Levels); Assert.Contains(LogLevel.Fatal, rule.Levels); Assert.Equal(1, rule.Targets.Count); Assert.Same(c.FindTargetByName("d1"), rule.Targets[0]); Assert.False(rule.Final); Assert.Equal(0, rule.Filters.Count); } [Fact] public void SingleLevelTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' /> </targets> <rules> <logger name='*' level='Warn' writeTo='d1' /> </rules> </nlog>"); Assert.Equal(1, c.LoggingRules.Count); var rule = c.LoggingRules[0]; Assert.Single(rule.Levels); Assert.Contains(LogLevel.Warn, rule.Levels); } [Fact] public void MinMaxLevelTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' /> </targets> <rules> <logger name='*' minLevel='Info' maxLevel='Warn' writeTo='d1' /> </rules> </nlog>"); Assert.Equal(1, c.LoggingRules.Count); var rule = c.LoggingRules[0]; Assert.Equal(2, rule.Levels.Count); Assert.Contains(LogLevel.Info, rule.Levels); Assert.Contains(LogLevel.Warn, rule.Levels); } [Fact] public void NoLevelsTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' /> </targets> <rules> <logger name='*' writeTo='d1' /> </rules> </nlog>"); Assert.Equal(1, c.LoggingRules.Count); var rule = c.LoggingRules[0]; Assert.Equal(6, rule.Levels.Count); Assert.Contains(LogLevel.Trace, rule.Levels); Assert.Contains(LogLevel.Debug, rule.Levels); Assert.Contains(LogLevel.Info, rule.Levels); Assert.Contains(LogLevel.Warn, rule.Levels); Assert.Contains(LogLevel.Error, rule.Levels); Assert.Contains(LogLevel.Fatal, rule.Levels); } [Fact] public void ExplicitLevelsTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' /> </targets> <rules> <logger name='*' levels='Trace,Info,Warn' writeTo='d1' /> </rules> </nlog>"); Assert.Equal(1, c.LoggingRules.Count); var rule = c.LoggingRules[0]; Assert.Equal(3, rule.Levels.Count); Assert.Contains(LogLevel.Trace, rule.Levels); Assert.Contains(LogLevel.Info, rule.Levels); Assert.Contains(LogLevel.Warn, rule.Levels); } [Fact] public void MultipleTargetsTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' /> <target name='d2' type='Debug' /> <target name='d3' type='Debug' /> <target name='d4' type='Debug' /> </targets> <rules> <logger name='*' level='Warn' writeTo='d1,d2,d3' /> </rules> </nlog>"); Assert.Equal(1, c.LoggingRules.Count); var rule = c.LoggingRules[0]; Assert.Equal(3, rule.Targets.Count); Assert.Same(c.FindTargetByName("d1"), rule.Targets[0]); Assert.Same(c.FindTargetByName("d2"), rule.Targets[1]); Assert.Same(c.FindTargetByName("d3"), rule.Targets[2]); } [Fact] public void MultipleRulesSameTargetTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' layout='${message}' /> <target name='d2' type='Debug' layout='${message}' /> <target name='d3' type='Debug' layout='${message}' /> <target name='d4' type='Debug' layout='${message}' /> </targets> <rules> <logger name='*' level='Warn' writeTo='d1' /> <logger name='*' level='Warn' writeTo='d2' /> <logger name='*' level='Warn' writeTo='d3' /> </rules> </nlog>"); LogFactory factory = new LogFactory(c); var loggerConfig = factory.GetConfigurationForLogger("AAA", c); var targets = loggerConfig.GetTargetsForLevel(LogLevel.Warn); Assert.Equal("d1", targets.Target.Name); Assert.Equal("d2", targets.NextInChain.Target.Name); Assert.Equal("d3", targets.NextInChain.NextInChain.Target.Name); Assert.Null(targets.NextInChain.NextInChain.NextInChain); LogManager.Configuration = c; var logger = LogManager.GetLogger("BBB"); logger.Warn("test1234"); AssertDebugLastMessage("d1", "test1234"); AssertDebugLastMessage("d2", "test1234"); AssertDebugLastMessage("d3", "test1234"); AssertDebugLastMessage("d4", string.Empty); } [Fact] public void ChildRulesTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' /> <target name='d2' type='Debug' /> <target name='d3' type='Debug' /> <target name='d4' type='Debug' /> </targets> <rules> <logger name='*' level='Warn' writeTo='d1,d2,d3'> <logger name='Foo*' writeTo='d4' /> <logger name='Bar*' writeTo='d4' /> </logger> </rules> </nlog>"); Assert.Equal(1, c.LoggingRules.Count); var rule = c.LoggingRules[0]; Assert.Equal(2, rule.ChildRules.Count); Assert.Equal("Foo*", rule.ChildRules[0].LoggerNamePattern); Assert.Equal("Bar*", rule.ChildRules[1].LoggerNamePattern); } [Fact] public void FiltersTest() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' /> <target name='d2' type='Debug' /> <target name='d3' type='Debug' /> <target name='d4' type='Debug' /> </targets> <rules> <logger name='*' level='Warn' writeTo='d1,d2,d3'> <filters> <when condition=""starts-with(message, 'x')"" action='Ignore' /> <when condition=""starts-with(message, 'z')"" action='Ignore' /> </filters> </logger> </rules> </nlog>"); Assert.Equal(1, c.LoggingRules.Count); var rule = c.LoggingRules[0]; Assert.Equal(2, rule.Filters.Count); var conditionBasedFilter = rule.Filters[0] as ConditionBasedFilter; Assert.NotNull(conditionBasedFilter); Assert.Equal("starts-with(message, 'x')", conditionBasedFilter.Condition.ToString()); Assert.Equal(FilterResult.Ignore, conditionBasedFilter.Action); conditionBasedFilter = rule.Filters[1] as ConditionBasedFilter; Assert.NotNull(conditionBasedFilter); Assert.Equal("starts-with(message, 'z')", conditionBasedFilter.Condition.ToString()); Assert.Equal(FilterResult.Ignore, conditionBasedFilter.Action); } [Fact] public void FiltersTest_ignoreFinal() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' layout='${message}' /> <target name='d2' type='Debug' layout='${message}' /> </targets> <rules> <logger name='*' level='Warn' writeTo='d1'> <filters> <when condition=""starts-with(message, 'x')"" action='IgnoreFinal' /> </filters> </logger> <logger name='*' level='Warn' writeTo='d2'> </logger> </rules> </nlog>"); LogManager.Configuration = c; var logger = LogManager.GetLogger("logger1"); logger.Warn("test 1"); AssertDebugLastMessage("d1", "test 1"); AssertDebugLastMessage("d2", "test 1"); logger.Warn("x-mass"); AssertDebugLastMessage("d1", "test 1"); AssertDebugLastMessage("d2", "test 1"); } [Fact] public void FiltersTest_logFinal() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' layout='${message}' /> <target name='d2' type='Debug' layout='${message}' /> </targets> <rules> <logger name='*' level='Warn' writeTo='d1'> <filters> <when condition=""starts-with(message, 'x')"" action='LogFinal' /> </filters> </logger> <logger name='*' level='Warn' writeTo='d2'> </logger> </rules> </nlog>"); LogManager.Configuration = c; var logger = LogManager.GetLogger("logger1"); logger.Warn("test 1"); AssertDebugLastMessage("d1", "test 1"); AssertDebugLastMessage("d2", "test 1"); logger.Warn("x-mass"); AssertDebugLastMessage("d1", "x-mass"); AssertDebugLastMessage("d2", "test 1"); } [Fact] public void FiltersTest_ignore() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' layout='${message}' /> <target name='d2' type='Debug' layout='${message}' /> </targets> <rules> <logger name='*' level='Warn' writeTo='d1'> <filters> <when condition=""starts-with(message, 'x')"" action='Ignore' /> </filters> </logger> <logger name='*' level='Warn' writeTo='d2'> </logger> </rules> </nlog>"); LogManager.Configuration = c; var logger = LogManager.GetLogger("logger1"); logger.Warn("test 1"); AssertDebugLastMessage("d1", "test 1"); AssertDebugLastMessage("d2", "test 1"); logger.Warn("x-mass"); AssertDebugLastMessage("d1", "test 1"); AssertDebugLastMessage("d2", "x-mass"); } [Fact] public void LoggingRule_Final_SuppressesOnlyMatchingLevels() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='d1' type='Debug' layout='${message}' /> </targets> <rules> <logger name='a' level='Debug' final='true' /> <logger name='*' minlevel='Debug' writeTo='d1' /> </rules> </nlog>"); LogManager.Configuration = c; Logger a = LogManager.GetLogger("a"); Assert.False(a.IsDebugEnabled); Assert.True(a.IsInfoEnabled); a.Info("testInfo"); a.Debug("suppressedDebug"); AssertDebugLastMessage("d1", "testInfo"); Logger b = LogManager.GetLogger("b"); b.Debug("testDebug"); AssertDebugLastMessage("d1", "testDebug"); } [Fact] public void UnusedTargetsShouldBeLoggedToInternalLogger() { string tempFileName = Path.GetTempFileName(); try { CreateConfigurationFromString( "<nlog internalLogFile='" + tempFileName + @"' internalLogLevel='Warn'> <targets> <target name='d1' type='Debug' /> <target name='d2' type='Debug' /> <target name='d3' type='Debug' /> <target name='d4' type='Debug' /> <target name='d5' type='Debug' /> </targets> <rules> <logger name='*' level='Debug' writeTo='d1' /> <logger name='*' level='Debug' writeTo='d1,d2,d3' /> </rules> </nlog>"); AssertFileContains(tempFileName, "Unused target detected. Add a rule for this target to the configuration. TargetName: d4", Encoding.UTF8); AssertFileContains(tempFileName, "Unused target detected. Add a rule for this target to the configuration. TargetName: d5", Encoding.UTF8); } finally { NLog.Common.InternalLogger.Reset(); if (File.Exists(tempFileName)) { File.Delete(tempFileName); } } } [Fact] public void UnusedTargetsShouldBeLoggedToInternalLogger_PermitWrapped() { string tempFileName = Path.GetTempFileName(); try { CreateConfigurationFromString( "<nlog internalLogFile='" + tempFileName + @"' internalLogLevel='Warn'> <extensions> <add assembly='NLog.UnitTests'/> </extensions> <targets> <target name='d1' type='Debug' /> <target name='d2' type='MockWrapper'> <target name='d3' type='Debug' /> </target> <target name='d4' type='Debug' /> <target name='d5' type='Debug' /> </targets> <rules> <logger name='*' level='Debug' writeTo='d1' /> <logger name='*' level='Debug' writeTo='d1,d2,d4' /> </rules> </nlog>"); AssertFileNotContains(tempFileName, "Unused target detected. Add a rule for this target to the configuration. TargetName: d2", Encoding.UTF8); AssertFileNotContains(tempFileName, "Unused target detected. Add a rule for this target to the configuration. TargetName: d3", Encoding.UTF8); AssertFileNotContains(tempFileName, "Unused target detected. Add a rule for this target to the configuration. TargetName: d4", Encoding.UTF8); AssertFileContains(tempFileName, "Unused target detected. Add a rule for this target to the configuration. TargetName: d5", Encoding.UTF8); } finally { NLog.Common.InternalLogger.Reset(); if (File.Exists(tempFileName)) { File.Delete(tempFileName); } } } [Fact] public void LoggingRule_LevelOff_NotSetAsActualLogLevel() { LoggingConfiguration c = CreateConfigurationFromString(@" <nlog> <targets> <target name='l1' type='Debug' layout='${message}' /> <target name='l2' type='Debug' layout='${message}' /> </targets> <rules> <logger name='a' level='Off' appendTo='l1' /> <logger name='a' minlevel='Debug' appendTo='l2' /> </rules> </nlog>"); LogManager.Configuration = c; Logger a = LogManager.GetLogger("a"); Assert.True(c.LoggingRules.Count == 2, "All rules should have been loaded."); Assert.False(c.LoggingRules[0].IsLoggingEnabledForLevel(LogLevel.Off), "Log level Off should always return false."); // The two functions below should not throw an exception. c.LoggingRules[0].EnableLoggingForLevel(LogLevel.Debug); c.LoggingRules[0].DisableLoggingForLevel(LogLevel.Debug); } } }
using Lucene.Net.Support; using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.CompilerServices; namespace Lucene.Net.Index { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using ArrayUtil = Lucene.Net.Util.ArrayUtil; using Codec = Lucene.Net.Codecs.Codec; using Counter = Lucene.Net.Util.Counter; using FieldInfosWriter = Lucene.Net.Codecs.FieldInfosWriter; using IOContext = Lucene.Net.Store.IOContext; /// <summary> /// This is a <see cref="DocConsumer"/> that gathers all fields under the /// same name, and calls per-field consumers to process field /// by field. This class doesn't doesn't do any "real" work /// of its own: it just forwards the fields to a /// <see cref="DocFieldConsumer"/>. /// </summary> internal sealed class DocFieldProcessor : DocConsumer { internal readonly DocFieldConsumer consumer; internal readonly StoredFieldsConsumer storedConsumer; internal readonly Codec codec; // Holds all fields seen in current doc internal DocFieldProcessorPerField[] fields = new DocFieldProcessorPerField[1]; internal int fieldCount; // Hash table for all fields ever seen internal DocFieldProcessorPerField[] fieldHash = new DocFieldProcessorPerField[2]; internal int hashMask = 1; internal int totalFieldCount; internal int fieldGen; internal readonly DocumentsWriterPerThread.DocState docState; internal readonly Counter bytesUsed; public DocFieldProcessor(DocumentsWriterPerThread docWriter, DocFieldConsumer consumer, StoredFieldsConsumer storedConsumer) { this.docState = docWriter.docState; this.codec = docWriter.codec; this.bytesUsed = docWriter.bytesUsed; this.consumer = consumer; this.storedConsumer = storedConsumer; } [MethodImpl(MethodImplOptions.NoInlining)] public override void Flush(SegmentWriteState state) { IDictionary<string, DocFieldConsumerPerField> childFields = new Dictionary<string, DocFieldConsumerPerField>(); ICollection<DocFieldConsumerPerField> fields = Fields(); foreach (DocFieldConsumerPerField f in fields) { childFields[f.FieldInfo.Name] = f; } Debug.Assert(fields.Count == totalFieldCount); storedConsumer.Flush(state); consumer.Flush(childFields, state); // Important to save after asking consumer to flush so // consumer can alter the FieldInfo* if necessary. EG, // FreqProxTermsWriter does this with // FieldInfo.storePayload. FieldInfosWriter infosWriter = codec.FieldInfosFormat.FieldInfosWriter; infosWriter.Write(state.Directory, state.SegmentInfo.Name, "", state.FieldInfos, IOContext.DEFAULT); } [MethodImpl(MethodImplOptions.NoInlining)] public override void Abort() { Exception th = null; foreach (DocFieldProcessorPerField field in fieldHash) { DocFieldProcessorPerField fieldNext = field; while (fieldNext != null) { DocFieldProcessorPerField next = fieldNext.next; try { fieldNext.Abort(); } catch (Exception t) { if (th == null) { th = t; } } fieldNext = next; } } try { storedConsumer.Abort(); } catch (Exception t) { if (th == null) { th = t; } } try { consumer.Abort(); } catch (Exception t) { if (th == null) { th = t; } } // If any errors occured, throw it. if (th != null) { if (th is Exception) { throw (Exception)th; } // defensive code - we should not hit unchecked exceptions throw new Exception(th.Message, th); } } public ICollection<DocFieldConsumerPerField> Fields() { ICollection<DocFieldConsumerPerField> fields = new HashSet<DocFieldConsumerPerField>(); for (int i = 0; i < fieldHash.Length; i++) { DocFieldProcessorPerField field = fieldHash[i]; while (field != null) { fields.Add(field.consumer); field = field.next; } } Debug.Assert(fields.Count == totalFieldCount); return fields; } private void Rehash() { int newHashSize = (fieldHash.Length * 2); Debug.Assert(newHashSize > fieldHash.Length); DocFieldProcessorPerField[] newHashArray = new DocFieldProcessorPerField[newHashSize]; // Rehash int newHashMask = newHashSize - 1; for (int j = 0; j < fieldHash.Length; j++) { DocFieldProcessorPerField fp0 = fieldHash[j]; while (fp0 != null) { int hashPos2 = fp0.fieldInfo.Name.GetHashCode() & newHashMask; DocFieldProcessorPerField nextFP0 = fp0.next; fp0.next = newHashArray[hashPos2]; newHashArray[hashPos2] = fp0; fp0 = nextFP0; } } fieldHash = newHashArray; hashMask = newHashMask; } public override void ProcessDocument(FieldInfos.Builder fieldInfos) { consumer.StartDocument(); storedConsumer.StartDocument(); fieldCount = 0; int thisFieldGen = fieldGen++; // Absorb any new fields first seen in this document. // Also absorb any changes to fields we had already // seen before (eg suddenly turning on norms or // vectors, etc.): foreach (IIndexableField field in docState.doc) { string fieldName = field.Name; // Make sure we have a PerField allocated int hashPos = fieldName.GetHashCode() & hashMask; DocFieldProcessorPerField fp = fieldHash[hashPos]; while (fp != null && !fp.fieldInfo.Name.Equals(fieldName, StringComparison.Ordinal)) { fp = fp.next; } if (fp == null) { // TODO FI: we need to genericize the "flags" that a // field holds, and, how these flags are merged; it // needs to be more "pluggable" such that if I want // to have a new "thing" my Fields can do, I can // easily add it FieldInfo fi = fieldInfos.AddOrUpdate(fieldName, field.IndexableFieldType); fp = new DocFieldProcessorPerField(this, fi); fp.next = fieldHash[hashPos]; fieldHash[hashPos] = fp; totalFieldCount++; if (totalFieldCount >= fieldHash.Length / 2) { Rehash(); } } else { // need to addOrUpdate so that FieldInfos can update globalFieldNumbers // with the correct DocValue type (LUCENE-5192) FieldInfo fi = fieldInfos.AddOrUpdate(fieldName, field.IndexableFieldType); Debug.Assert(fi == fp.fieldInfo, "should only have updated an existing FieldInfo instance"); } if (thisFieldGen != fp.lastGen) { // First time we're seeing this field for this doc fp.fieldCount = 0; if (fieldCount == fields.Length) { int newSize = fields.Length * 2; DocFieldProcessorPerField[] newArray = new DocFieldProcessorPerField[newSize]; Array.Copy(fields, 0, newArray, 0, fieldCount); fields = newArray; } fields[fieldCount++] = fp; fp.lastGen = thisFieldGen; } fp.AddField(field); storedConsumer.AddField(docState.docID, field, fp.fieldInfo); } // If we are writing vectors then we must visit // fields in sorted order so they are written in // sorted order. TODO: we actually only need to // sort the subset of fields that have vectors // enabled; we could save [small amount of] CPU // here. ArrayUtil.IntroSort(fields, 0, fieldCount, fieldsComp); for (int i = 0; i < fieldCount; i++) { DocFieldProcessorPerField perField = fields[i]; perField.consumer.ProcessFields(perField.fields, perField.fieldCount); } } private static readonly IComparer<DocFieldProcessorPerField> fieldsComp = new ComparerAnonymousInnerClassHelper(); private class ComparerAnonymousInnerClassHelper : IComparer<DocFieldProcessorPerField> { public ComparerAnonymousInnerClassHelper() { } public virtual int Compare(DocFieldProcessorPerField o1, DocFieldProcessorPerField o2) { return o1.fieldInfo.Name.CompareToOrdinal(o2.fieldInfo.Name); } } [MethodImpl(MethodImplOptions.NoInlining)] internal override void FinishDocument() { try { storedConsumer.FinishDocument(); } finally { consumer.FinishDocument(); } } } }
/* Shellify Copyright (c) 2010-2021 Sebastien Lebreton Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ using System; using System.Collections.Generic; using System.IO; using System.Text; using Shellify.Core; using Shellify.ExtraData; using Shellify.IO; namespace Shellify { public class ShellLinkFile : IHasIDList { public ShellLinkHeader Header { get; set; } public IList<ExtraDataBlock> ExtraDataBlocks { get; set; } public IList<ShItemID> ShItemIDs { get; set; } private LinkInfo _linkInfo; public LinkInfo LinkInfo { get => _linkInfo; set { _linkInfo = value; UpdateHeaderFlags(value, LinkFlags.HasLinkInfo); } } private string _name; public string Name { get => _name; set { _name = value; UpdateHeaderFlags(value, LinkFlags.HasName); } } private string _relativePath; public string RelativePath { get => _relativePath; set { _relativePath = value; UpdateHeaderFlags(value, LinkFlags.HasRelativePath); } } private string _workingDirectory; public string WorkingDirectory { get => _workingDirectory; set { _workingDirectory = value; UpdateHeaderFlags(value, LinkFlags.HasWorkingDir); } } private string _arguments; public string Arguments { get => _arguments; set { _arguments = value; UpdateHeaderFlags(value, LinkFlags.HasArguments); } } private string _iconLocation; public string IconLocation { get => _iconLocation; set { _iconLocation = value; UpdateHeaderFlags(value, LinkFlags.HasIconLocation); } } public ShellLinkFile() { Header = new ShellLinkHeader(); ExtraDataBlocks = new List<ExtraDataBlock>(); ShItemIDs = new List<ShItemID>(); } private void UpdateHeaderFlags(object item, LinkFlags flag) { if (item is string s && string.IsNullOrEmpty(s) || item == null) { Header.LinkFlags &= ~flag; } else { Header.LinkFlags |= flag; } } public override string ToString() { var builder = new StringBuilder(); if (Header != null) builder.AppendLine(Header.ToString()); if (LinkInfo != null) builder.AppendLine(LinkInfo.ToString()); if (ExtraDataBlocks != null) { foreach (var block in ExtraDataBlocks) builder.AppendLine(block.ToString()); } if (ShItemIDs != null) { foreach (var shitem in ShItemIDs) builder.AppendLine(shitem.ToString()); } builder.AppendLine(">>File"); builder.AppendFormat("Name: {0}", Name); builder.AppendLine(); builder.AppendFormat("RelativePath: {0}", RelativePath); builder.AppendLine(); builder.AppendFormat("WorkingDirectory: {0}", WorkingDirectory); builder.AppendLine(); builder.AppendFormat("Arguments: {0}", Arguments); builder.AppendLine(); builder.AppendFormat("IconLocation: {0}", IconLocation); builder.AppendLine(); return builder.ToString(); } public static ShellLinkFile Load(string filename) { var result = new ShellLinkFile(); using var stream = new FileStream(filename, FileMode.Open); using var binaryReader = new BinaryReader(stream); var reader = new ShellLinkFileHandler(result); reader.ReadFrom(binaryReader); return result; } private static FileSystemInfo SetFileSystemInfo(ShellLinkFile slf, string target) { var targetInfo = Directory.Exists(target) ? (FileSystemInfo)new DirectoryInfo(target) : new FileInfo(target); if (!targetInfo.Exists) return targetInfo; slf.Header.FileAttributes = targetInfo.Attributes; slf.Header.AccessTime = targetInfo.LastAccessTime; slf.Header.CreationTime = targetInfo.CreationTime; slf.Header.WriteTime = targetInfo.LastWriteTime; if (targetInfo is FileInfo info) slf.Header.FileSize = Convert.ToInt32(info.Length); return targetInfo; } public static ShellLinkFile CreateRelative(string baseDirectory, string relativeTarget) { if (Path.IsPathRooted(relativeTarget)) throw new ArgumentException("Target must be relative to base directory !!!"); var result = new ShellLinkFile(); SetFileSystemInfo(result, Path.Combine(baseDirectory, relativeTarget)); result.Header.ShowCommand = ShowCommand.Normal; result.RelativePath = relativeTarget; result.WorkingDirectory = "."; return result; } public static ShellLinkFile CreateAbsolute(string target) { var result = new ShellLinkFile(); var targetInfo = SetFileSystemInfo(result, target); result.Header.ShowCommand = ShowCommand.Normal; result.RelativePath = targetInfo.FullName; result.WorkingDirectory = targetInfo is FileInfo info ? info.DirectoryName : targetInfo.FullName; return result; } public void SaveAs(string filename) { using var stream = new FileStream(filename, FileMode.Create); using var binaryWriter = new BinaryWriter(stream); var writer = new ShellLinkFileHandler(this); writer.WriteTo(binaryWriter); } } }
// // AggregateFixture.cs // // Author: // John Moore <jcwmoore@gmail.com> // // Copyright (c) 2013 John Moore // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using NUnit.Framework; using System; using NUnit.Framework; using System; using System.Collections.Generic; using System.Linq; using System.Data.SQLite; using System.Data.Entity; namespace System.Data.SQLite.Tests.Entity { [TestFixture()] /// <summary> /// http://code.msdn.microsoft.com/LINQ-Aggregate-Operators-c51b3869 /// </summary> public class AggregateFixture { string _db; [SetUp] public void Setup() { _db = string.Format("Data Source={0}.db3", Guid.NewGuid()); //using (var conn = new SQLiteConnection(_db)) // using (var cmd = conn.CreateCommand()) //{ // conn.Open(); // cmd.CommandText = "CREATE TABLE IF NOT EXISTS Dinners (DinnerId INTEGER PRIMARY KEY AUTOINCREMENT, Title TEXT, EventDate DATETIME, Address TEXT, DinnerGuid TEXT NOT NULL, dv FLOAT NOT NULL);"; // cmd.ExecuteNonQuery(); // cmd.CommandText = "CREATE TABLE IF NOT EXISTS Rsvps (RsvpId INTEGER PRIMARY KEY AUTOINCREMENT, DinnerId INTEGER NOT NULL REFERENCES Dinners(DinnerId), Email TEXT);"; // cmd.ExecuteNonQuery(); //} //Database.SetInitializer<NerdDinners>(null); } [TearDown] public void TearDown() { System.IO.File.Delete(_db.Split('=')[1]); } [Test()] public void SimpleCountTest() { using (var ctx = new NerdDinners(new SQLiteConnection(_db))) { var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.AddRange( new [] { dinner, new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }, new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }, new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }, new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 } }); ctx.SaveChanges(); } using (var ctx = new NerdDinners(new SQLiteConnection(_db))) { var res = ctx.Dinners.Count(); Assert.That(res, Is.EqualTo(5)); } } [Test()] public void CountContidionalTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = ctx.Dinners.Count(x => x.DoubleValue > 1.2); Assert.That(res, Is.EqualTo(3)); } [Test()] public void CountNestedTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.Rsvps.Add(new Rsvp { Dinner = dinner, Email = "testemail1" }); ctx.Rsvps.Add(new Rsvp { Dinner = dinner, Email = "testemail2" }); ctx.Rsvps.Add(new Rsvp { Dinner = dinner, Email = "testemail3" }); ctx.Rsvps.Add(new Rsvp { Dinner = dinner, Email = "testemail4" }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = (from d in ctx.Dinners select new { Title = d.Title, Ct = d.Rsvps.Count() }).ToList(); Assert.That(res.Count, Is.EqualTo(5)); Assert.That(res.Count(r => r.Ct > 0), Is.EqualTo(1)); } [Test()] public void SimpleSumTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = ctx.Dinners.Select(d => d.DoubleValue).Sum(); Assert.That(res, Is.GreaterThanOrEqualTo(6.5)); } [Test()] public void GroupedSumTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = (from d in ctx.Dinners group d by d.Title into g select new { g.Key, Value = g.Sum(x => x.DoubleValue) }).ToList(); Assert.That(res.First().Value, Is.GreaterThanOrEqualTo(6.5)); } [Test()] public void SimpleMinTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = ctx.Dinners.Select(d => d.DoubleValue).Min(); Assert.That(res, Is.EqualTo(1.1)); } [Test()] public void GroupedMinTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = (from d in ctx.Dinners group d by d.Title into g select new { g.Key, Value = g.Min(x => x.DoubleValue) }).ToList(); Assert.That(res.First().Value, Is.EqualTo(1.1)); } [Test()] public void SimpleMaxTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = ctx.Dinners.Select(d => d.DoubleValue).Max(); Assert.That(res, Is.EqualTo(1.5)); } [Test()] public void GroupedMaxTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = (from d in ctx.Dinners group d by d.Title into g select new { g.Key, Value = g.Max(x => x.DoubleValue) }).ToList(); Assert.That(res.First().Value, Is.EqualTo(1.5)); } [Test()] public void SimpleAvgTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = ctx.Dinners.Select(d => d.DoubleValue).Average(); Assert.That(res, Is.EqualTo(1.3)); } [Test()] public void GroupedAvgTest() { var ctx = new NerdDinners(new SQLiteConnection(_db)); var dinner = new Dinner { Address = "test1", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.1 }; ctx.Dinners.Add(dinner); ctx.Dinners.Add(new Dinner { Address = "test2", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.2 }); ctx.Dinners.Add(new Dinner { Address = "test3", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.3 }); ctx.Dinners.Add(new Dinner { Address = "test4", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.4 }); ctx.Dinners.Add(new Dinner { Address = "test5", EventDate = DateTime.Today, Title = "John's dinner", Identifier = Guid.NewGuid(), DoubleValue = 1.5 }); ctx.SaveChanges(); ctx.Dispose(); ctx = new NerdDinners(new SQLiteConnection(_db)); var res = (from d in ctx.Dinners group d by d.Title into g select new { g.Key, Value = g.Average(x => x.DoubleValue) }).ToList(); Assert.That(res.First().Value, Is.EqualTo(1.3)); } } }
using System; using System.Collections; namespace BenTools.Data { public interface IPriorityQueue : ICollection, ICloneable, IList { int Push(object O); object Pop(); object Peek(); void Update(int i); } public class BinaryPriorityQueue : IPriorityQueue, ICollection, ICloneable, IList { protected ArrayList InnerList = new ArrayList(); protected IComparer Comparer; #region contructors public BinaryPriorityQueue() : this(System.Collections.Comparer.Default) {} public BinaryPriorityQueue(IComparer c) { Comparer = c; } public BinaryPriorityQueue(int C) : this(System.Collections.Comparer.Default,C) {} public BinaryPriorityQueue(IComparer c, int Capacity) { Comparer = c; InnerList.Capacity = Capacity; } protected BinaryPriorityQueue(ArrayList Core, IComparer Comp, bool Copy) { if(Copy) InnerList = Core.Clone() as ArrayList; else InnerList = Core; Comparer = Comp; } #endregion protected void SwitchElements(int i, int j) { object h = InnerList[i]; InnerList[i] = InnerList[j]; InnerList[j] = h; } protected virtual int OnCompare(int i, int j) { return Comparer.Compare(InnerList[i],InnerList[j]); } #region public methods /// <summary> /// Push an object onto the PQ /// </summary> /// <param name="O">The new object</param> /// <returns>The index in the list where the object is _now_. This will change when objects are taken from or put onto the PQ.</returns> public int Push(object O) { int p = InnerList.Count,p2; InnerList.Add(O); // E[p] = O do { if(p==0) break; p2 = (p-1)/2; if(OnCompare(p,p2)<0) { SwitchElements(p,p2); p = p2; } else break; }while(true); return p; } /// <summary> /// Get the smallest object and remove it. /// </summary> /// <returns>The smallest object</returns> public object Pop() { object result = InnerList[0]; int p = 0,p1,p2,pn; InnerList[0] = InnerList[InnerList.Count-1]; InnerList.RemoveAt(InnerList.Count-1); do { pn = p; p1 = 2*p+1; p2 = 2*p+2; if(InnerList.Count>p1 && OnCompare(p,p1)>0) // links kleiner p = p1; if(InnerList.Count>p2 && OnCompare(p,p2)>0) // rechts noch kleiner p = p2; if(p==pn) break; SwitchElements(p,pn); }while(true); return result; } /// <summary> /// Notify the PQ that the object at position i has changed /// and the PQ needs to restore order. /// Since you dont have access to any indexes (except by using the /// explicit IList.this) you should not call this function without knowing exactly /// what you do. /// </summary> /// <param name="i">The index of the changed object.</param> public void Update(int i) { int p = i,pn; int p1,p2; do // aufsteigen { if(p==0) break; p2 = (p-1)/2; if(OnCompare(p,p2)<0) { SwitchElements(p,p2); p = p2; } else break; }while(true); if(p<i) return; do // absteigen { pn = p; p1 = 2*p+1; p2 = 2*p+2; if(InnerList.Count>p1 && OnCompare(p,p1)>0) // links kleiner p = p1; if(InnerList.Count>p2 && OnCompare(p,p2)>0) // rechts noch kleiner p = p2; if(p==pn) break; SwitchElements(p,pn); }while(true); } /// <summary> /// Get the smallest object without removing it. /// </summary> /// <returns>The smallest object</returns> public object Peek() { if(InnerList.Count>0) return InnerList[0]; return null; } public bool Contains(object value) { return InnerList.Contains(value); } public void Clear() { InnerList.Clear(); } public int Count { get { return InnerList.Count; } } IEnumerator IEnumerable.GetEnumerator() { return InnerList.GetEnumerator(); } public void CopyTo(Array array, int index) { InnerList.CopyTo(array,index); } public object Clone() { return new BinaryPriorityQueue(InnerList,Comparer,true); } public bool IsSynchronized { get { return InnerList.IsSynchronized; } } public object SyncRoot { get { return this; } } #endregion #region explicit implementation bool IList.IsReadOnly { get { return false; } } object IList.this[int index] { get { return InnerList[index]; } set { InnerList[index] = value; Update(index); } } int IList.Add(object o) { return Push(o); } void IList.RemoveAt(int index) { throw new NotSupportedException(); } void IList.Insert(int index, object value) { throw new NotSupportedException(); } void IList.Remove(object value) { throw new NotSupportedException(); } int IList.IndexOf(object value) { throw new NotSupportedException(); } bool IList.IsFixedSize { get { return false; } } public static BinaryPriorityQueue Syncronized(BinaryPriorityQueue P) { return new BinaryPriorityQueue(ArrayList.Synchronized(P.InnerList),P.Comparer,false); } public static BinaryPriorityQueue ReadOnly(BinaryPriorityQueue P) { return new BinaryPriorityQueue(ArrayList.ReadOnly(P.InnerList),P.Comparer,false); } #endregion } }
using Signum.Engine.Files; using Signum.Engine.Authorization; using Signum.Engine.Basics; using Signum.Engine.DynamicQuery; using Signum.Engine.Maps; using Signum.Engine.Operations; using Signum.Engine.Processes; using Signum.Entities.Files; using Signum.Entities.Processes; using Signum.Utilities; using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Reflection; using Signum.Entities.Printing; using Signum.Entities; using System.IO; using Signum.Engine.Scheduler; using Signum.Entities.Basics; namespace Signum.Engine.Printing { public static class PrintingLogic { public static int DeleteFilesAfter = 24 * 60; //Minutes public static Action<PrintLineEntity> Print = e => throw new NotImplementedException("PrintingLogic.Print is not defined"); [AutoExpressionField] public static IQueryable<PrintLineEntity> Lines(this PrintPackageEntity e) => As.Expression(() => Database.Query<PrintLineEntity>().Where(a => a.Package.Is(e))); public static FileTypeSymbol? TestFileType; public static void Start(SchemaBuilder sb, FileTypeSymbol? testFileType = null) { if (sb.NotDefined(MethodInfo.GetCurrentMethod())) { TestFileType = testFileType; sb.Include<PrintLineEntity>() .WithQuery(() => p => new { Entity = p, p.CreationDate, p.File, p.State, p.Package, p.PrintedOn, p.Referred, }); sb.Include<PrintPackageEntity>() .WithQuery(() => e => new { Entity = e, e.Id, e.Name }); ProcessLogic.AssertStarted(sb); ProcessLogic.Register(PrintPackageProcess.PrintPackage, new PrintPackageAlgorithm()); PermissionAuthLogic.RegisterPermissions(PrintPermission.ViewPrintPanel); PrintLineGraph.Register(); SimpleTaskLogic.Register(PrintTask.RemoveOldFiles, (ScheduledTaskContext ctx) => { var lines = Database.Query<PrintLineEntity>().Where(a => a.State == PrintLineState.Printed).Where(b => b.CreationDate <= DateTime.Now.AddMinutes(-DeleteFilesAfter)); foreach (var line in lines) { try { using (var tr = new Transaction()) { line.File.DeleteFileOnCommit(); line.State = PrintLineState.PrintedAndDeleted; using (OperationLogic.AllowSave<PackageLineEntity>()) line.Save(); tr.Commit(); } } catch (Exception e) { e.LogException(); } } return null; }); } } public class PrintPackageAlgorithm : IProcessAlgorithm { public void Execute(ExecutingProcess executingProcess) { PrintPackageEntity package = (PrintPackageEntity)executingProcess.Data!; executingProcess.ForEachLine(package.Lines().Where(a => a.State != PrintLineState.Printed), line => { PrintLineGraph.Print(line); }); } } public static PrintLineEntity CreateLine(Entity referred, FileTypeSymbol fileType, string fileName, byte[] content) { return CreateLine(referred, new FilePathEmbedded(fileType, fileName, content)); } public static PrintLineEntity CreateLine(Entity referred, FilePathEmbedded file) { return new PrintLineEntity { Referred = referred.ToLite(), State = PrintLineState.ReadyToPrint, File = file, }.Save(); } public static ProcessEntity? CreateProcess(FileTypeSymbol? fileType = null) { using (var tr = new Transaction()) { var query = Database.Query<PrintLineEntity>() .Where(a => a.State == PrintLineState.ReadyToPrint); if (fileType != null) query = query.Where(a => a.File.FileType == fileType); if (query.Count() == 0) return null; var package = new PrintPackageEntity() { Name = fileType?.ToString() + " (" + query.Count() + ")" }.Save(); query.UnsafeUpdate() .Set(a => a.Package, a => package.ToLite()) .Set(a => a.State, a => PrintLineState.Enqueued) .Execute(); var result = ProcessLogic.Create(PrintPackageProcess.PrintPackage, package).Save(); return tr.Commit(result); } } public static List<PrintStat> GetReadyToPrintStats() { return Database.Query<PrintLineEntity>() .Where(a => a.State == PrintLineState.ReadyToPrint) .GroupBy(a => a.File.FileType) .Select(gr => new PrintStat(gr.Key,gr.Count())) .ToList(); } public static void CancelPrinting(Entity entity, FileTypeSymbol fileType) { var list = ReadyToPrint(entity, fileType).ToList(); list.ForEach(a => { a.State = PrintLineState.Cancelled; a.File.DeleteFileOnCommit(); }); list.SaveList(); } public static FileContent SavePrintLine(this FileContent file, Entity entity, FileTypeSymbol fileTypeForPrinting) { CancelPrinting(entity, fileTypeForPrinting); CreateLine(entity, fileTypeForPrinting, Path.GetFileName(file.FileName)!, file.Bytes); return file; } public static IQueryable<PrintLineEntity> ReadyToPrint(Entity entity, FileTypeSymbol fileType) { return Database.Query<PrintLineEntity>().Where(a => a.Referred.Is(entity) && a.File.FileType == fileType && a.State == PrintLineState.ReadyToPrint); } } public class PrintStat { public FileTypeSymbol fileType; public int count; public PrintStat(FileTypeSymbol fileType, int count) { this.fileType = fileType; this.count = count; } } public class PrintLineGraph : Graph<PrintLineEntity, PrintLineState> { public static void Register() { GetState = e => e.State; new Construct(PrintLineOperation.CreateTest) { ToStates = { PrintLineState.NewTest }, Construct = (args) => new PrintLineEntity { State = PrintLineState.NewTest, TestFileType = PrintingLogic.TestFileType!, } }.Register(); new Execute(PrintLineOperation.SaveTest) { CanBeNew = true, CanBeModified = true, FromStates = { PrintLineState.NewTest }, ToStates = { PrintLineState.ReadyToPrint }, Execute = (e, _) => { e.State = PrintLineState.ReadyToPrint; } }.Register(); new Execute(PrintLineOperation.Print) { FromStates = {PrintLineState.ReadyToPrint}, ToStates = {PrintLineState.Printed, PrintLineState.Error}, Execute = (e, _) => { Print(e); } }.Register(); new Execute(PrintLineOperation.Retry) { FromStates = {PrintLineState.Error, PrintLineState.Cancelled}, ToStates = {PrintLineState.ReadyToPrint }, Execute = (e, _) => { e.State = PrintLineState.ReadyToPrint; e.Package = null; } }.Register(); new Execute(PrintLineOperation.Cancel) { FromStates = { PrintLineState.ReadyToPrint, PrintLineState.Error }, ToStates = { PrintLineState.Cancelled }, Execute = (e, _) => { e.State = PrintLineState.Cancelled; e.Package = null; e.PrintedOn = null; e.File.DeleteFileOnCommit(); } }.Register(); } public static void Print(PrintLineEntity line) { using (OperationLogic.AllowSave<PrintLineEntity>()) { try { PrintingLogic.Print?.Invoke(line); line.State = PrintLineState.Printed; line.PrintedOn = TimeZoneManager.Now; line.Save(); } catch (Exception ex) { if (Transaction.InTestTransaction) //Transaction.IsTestTransaction throw; var exLog = ex.LogException().ToLite(); try { using (Transaction tr = Transaction.ForceNew()) { line.State = PrintLineState.Error; line.Save(); tr.Commit(); } } catch { } throw; } } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.ComponentModel.Composition.Hosting; using System.Linq; using System.Threading.Tasks; using System.Xml.Linq; using Microsoft.CodeAnalysis.Editor.Commands; using Microsoft.CodeAnalysis.Editor.CSharp.CallHierarchy; using Microsoft.CodeAnalysis.Editor.Host; using Microsoft.CodeAnalysis.Editor.Implementation.CallHierarchy; using Microsoft.CodeAnalysis.Editor.Implementation.Notification; using Microsoft.CodeAnalysis.Editor.SymbolMapping; using Microsoft.CodeAnalysis.Editor.UnitTests.Utilities; using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces; using Microsoft.CodeAnalysis.Notification; using Microsoft.VisualStudio.Language.CallHierarchy; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Editor; using Roslyn.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.UnitTests.CallHierarchy { public class CallHierarchyTestState { private readonly CallHierarchyCommandHandler _commandHandler; private readonly MockCallHierarchyPresenter _presenter; internal TestWorkspace Workspace; private readonly ITextBuffer _subjectBuffer; private readonly IWpfTextView _textView; private class MockCallHierarchyPresenter : ICallHierarchyPresenter { public CallHierarchyItem PresentedRoot; public void PresentRoot(CallHierarchyItem root) { this.PresentedRoot = root; } } private class MockSearchCallback : ICallHierarchySearchCallback { private readonly Action<CallHierarchyItem> _verifyMemberItem; private readonly TaskCompletionSource<object> _completionSource = new TaskCompletionSource<object>(); private readonly Action<ICallHierarchyNameItem> _verifyNameItem; public MockSearchCallback(Action<CallHierarchyItem> verify) { _verifyMemberItem = verify; } public MockSearchCallback(Action<ICallHierarchyNameItem> verify) { _verifyNameItem = verify; } public void AddResult(ICallHierarchyNameItem item) { _verifyNameItem(item); } public void AddResult(ICallHierarchyMemberItem item) { _verifyMemberItem((CallHierarchyItem)item); } public void InvalidateResults() { } public void ReportProgress(int current, int maximum) { } public void SearchFailed(string message) { _completionSource.SetException(new Exception(message)); } public void SearchSucceeded() { _completionSource.SetResult(null); } internal void WaitForCompletion() { _completionSource.Task.Wait(); } } public static async Task<CallHierarchyTestState> CreateAsync(XElement markup, params Type[] additionalTypes) { var exportProvider = CreateExportProvider(additionalTypes); var workspace = await TestWorkspace.CreateAsync(markup, exportProvider: exportProvider); return new CallHierarchyTestState(workspace); } private CallHierarchyTestState(TestWorkspace workspace) { this.Workspace = workspace; var testDocument = Workspace.Documents.Single(d => d.CursorPosition.HasValue); _textView = testDocument.GetTextView(); _subjectBuffer = testDocument.GetTextBuffer(); var provider = Workspace.GetService<CallHierarchyProvider>(); var notificationService = Workspace.Services.GetService<INotificationService>() as INotificationServiceCallback; var callback = new Action<string, string, NotificationSeverity>((message, title, severity) => NotificationMessage = message); notificationService.NotificationCallback = callback; _presenter = new MockCallHierarchyPresenter(); _commandHandler = new CallHierarchyCommandHandler(new[] { _presenter }, provider, TestWaitIndicator.Default); } private static VisualStudio.Composition.ExportProvider CreateExportProvider(Type[] additionalTypes) { var catalog = TestExportProvider.MinimumCatalogWithCSharpAndVisualBasic .WithPart(typeof(CallHierarchyProvider)) .WithPart(typeof(SymbolMappingServiceFactory)) .WithPart(typeof(EditorNotificationServiceFactory)) .WithParts(additionalTypes); return MinimalTestExportProvider.CreateExportProvider(catalog); } public static async Task<CallHierarchyTestState> CreateAsync(string markup, params Type[] additionalTypes) { var exportProvider = CreateExportProvider(additionalTypes); var workspace = await TestWorkspace.CreateCSharpAsync(markup, exportProvider: exportProvider); return new CallHierarchyTestState(markup, workspace); } private CallHierarchyTestState(string markup, TestWorkspace workspace) { this.Workspace = workspace; var testDocument = Workspace.Documents.Single(d => d.CursorPosition.HasValue); _textView = testDocument.GetTextView(); _subjectBuffer = testDocument.GetTextBuffer(); var provider = Workspace.GetService<CallHierarchyProvider>(); var notificationService = Workspace.Services.GetService<INotificationService>() as INotificationServiceCallback; var callback = new Action<string, string, NotificationSeverity>((message, title, severity) => NotificationMessage = message); notificationService.NotificationCallback = callback; _presenter = new MockCallHierarchyPresenter(); _commandHandler = new CallHierarchyCommandHandler(new[] { _presenter }, provider, TestWaitIndicator.Default); } internal string NotificationMessage { get; private set; } internal CallHierarchyItem GetRoot() { var args = new ViewCallHierarchyCommandArgs(_textView, _subjectBuffer); _commandHandler.ExecuteCommand(args, () => { }); return _presenter.PresentedRoot; } internal IImmutableSet<Document> GetDocuments(string[] documentNames) { var selectedDocuments = new List<Document>(); this.Workspace.CurrentSolution.Projects.Do(p => p.Documents.Where(d => documentNames.Contains(d.Name)).Do(d => selectedDocuments.Add(d))); return ImmutableHashSet.CreateRange<Document>(selectedDocuments); } internal void SearchRoot(CallHierarchyItem root, string displayName, Action<CallHierarchyItem> verify, CallHierarchySearchScope scope, IImmutableSet<Document> documents = null) { var callback = new MockSearchCallback(verify); var category = root.SupportedSearchCategories.First(c => c.DisplayName == displayName).Name; if (documents != null) { root.StartSearchWithDocuments(category, scope, callback, documents); } else { root.StartSearch(category, scope, callback); } callback.WaitForCompletion(); } internal void SearchRoot(CallHierarchyItem root, string displayName, Action<ICallHierarchyNameItem> verify, CallHierarchySearchScope scope, IImmutableSet<Document> documents = null) { var callback = new MockSearchCallback(verify); var category = root.SupportedSearchCategories.First(c => c.DisplayName == displayName).Name; if (documents != null) { root.StartSearchWithDocuments(category, scope, callback, documents); } else { root.StartSearch(category, scope, callback); } callback.WaitForCompletion(); } internal string ConvertToName(ICallHierarchyMemberItem root) { var name = root.MemberName; if (!string.IsNullOrEmpty(root.ContainingTypeName)) { name = root.ContainingTypeName + "." + name; } if (!string.IsNullOrEmpty(root.ContainingNamespaceName)) { name = root.ContainingNamespaceName + "." + name; } return name; } internal string ConvertToName(ICallHierarchyNameItem root) { return root.Name; } internal void VerifyRoot(CallHierarchyItem root, string name = "", string[] expectedCategories = null) { Assert.Equal(name, ConvertToName(root)); if (expectedCategories != null) { var categories = root.SupportedSearchCategories.Select(s => s.DisplayName); foreach (var category in expectedCategories) { Assert.Contains(category, categories); } } } internal void VerifyResultName(CallHierarchyItem root, string searchCategory, string[] expectedCallers, CallHierarchySearchScope scope = CallHierarchySearchScope.EntireSolution, IImmutableSet<Document> documents = null) { this.SearchRoot(root, searchCategory, (ICallHierarchyNameItem c) => { Assert.True(expectedCallers.Any()); Assert.True(expectedCallers.Contains(ConvertToName(c))); }, scope, documents); } internal void VerifyResult(CallHierarchyItem root, string searchCategory, string[] expectedCallers, CallHierarchySearchScope scope = CallHierarchySearchScope.EntireSolution, IImmutableSet<Document> documents = null) { this.SearchRoot(root, searchCategory, (CallHierarchyItem c) => { Assert.True(expectedCallers.Any()); Assert.True(expectedCallers.Contains(ConvertToName(c))); }, scope, documents); } internal void Navigate(CallHierarchyItem root, string searchCategory, string callSite, CallHierarchySearchScope scope = CallHierarchySearchScope.EntireSolution, IImmutableSet<Document> documents = null) { CallHierarchyItem item = null; this.SearchRoot(root, searchCategory, (CallHierarchyItem c) => item = c, scope, documents); if (callSite == ConvertToName(item)) { var detail = item.Details.FirstOrDefault(); if (detail != null) { detail.NavigateTo(); } else { item.NavigateTo(); } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.IO; using System.Threading.Channels; using System.Threading.Tasks; using Microsoft.AspNetCore.SignalR.Tests; using Microsoft.AspNetCore.Testing; using Xunit; namespace Microsoft.AspNetCore.SignalR.Client.Tests { // This includes tests that verify HubConnection conforms to the Hub Protocol, without setting up a full server (even TestServer). // We can also have more control over the messages we send to HubConnection in order to ensure that protocol errors and other quirks // don't cause problems. public partial class HubConnectionTests { public class Protocol : VerifiableLoggedTest { [Fact] public async Task SendAsyncSendsANonBlockingInvocationMessage() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var invokeTask = hubConnection.SendAsync("Foo").DefaultTimeout(); var invokeMessage = await connection.ReadSentTextMessageAsync().DefaultTimeout(); // ReadSentTextMessageAsync strips off the record separator (because it has use it as a separator now that we use Pipelines) Assert.Equal("{\"type\":1,\"target\":\"Foo\",\"arguments\":[]}", invokeMessage); await invokeTask.DefaultTimeout(); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task ClientSendsHandshakeMessageWhenStartingConnection() { var connection = new TestConnection(autoHandshake: false); var hubConnection = CreateHubConnection(connection); try { // We can't await StartAsync because it depends on the negotiate process! var startTask = hubConnection.StartAsync(); var handshakeMessage = await connection.ReadHandshakeAndSendResponseAsync().DefaultTimeout(); // ReadSentTextMessageAsync strips off the record separator (because it has use it as a separator now that we use Pipelines) Assert.Equal("{\"protocol\":\"json\",\"version\":1}", handshakeMessage); await startTask.DefaultTimeout(); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task InvalidHandshakeResponseCausesStartToFail() { using (StartVerifiableLog()) { var connection = new TestConnection(autoHandshake: false); var hubConnection = CreateHubConnection(connection); try { // We can't await StartAsync because it depends on the negotiate process! var startTask = hubConnection.StartAsync(); await connection.ReadSentTextMessageAsync().DefaultTimeout(); // The client expects the first message to be a handshake response, but a handshake response doesn't have a "type". await connection.ReceiveJsonMessage(new { type = "foo" }).DefaultTimeout(); var ex = await Assert.ThrowsAsync<InvalidDataException>(() => startTask).DefaultTimeout(); Assert.Equal("Expected a handshake response from the server.", ex.Message); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } } [Fact] public async Task ClientIsOkayReceivingMinorVersionInHandshake() { // We're just testing that the client doesn't fail when a minor version is added to the handshake // The client doesn't actually use that version anywhere yet so there's nothing else to test at this time var connection = new TestConnection(autoHandshake: false); var hubConnection = CreateHubConnection(connection); try { var startTask = hubConnection.StartAsync(); var message = await connection.ReadHandshakeAndSendResponseAsync(56).DefaultTimeout(); await startTask.DefaultTimeout(); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task InvokeSendsAnInvocationMessage() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var invokeTask = hubConnection.InvokeAsync("Foo"); var invokeMessage = await connection.ReadSentTextMessageAsync().DefaultTimeout(); // ReadSentTextMessageAsync strips off the record separator (because it has use it as a separator now that we use Pipelines) Assert.Equal("{\"type\":1,\"invocationId\":\"1\",\"target\":\"Foo\",\"arguments\":[]}", invokeMessage); Assert.Equal(TaskStatus.WaitingForActivation, invokeTask.Status); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task ReceiveCloseMessageWithoutErrorWillCloseHubConnection() { var closedTcs = new TaskCompletionSource<Exception>(); var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); hubConnection.Closed += e => { closedTcs.SetResult(e); return Task.CompletedTask; }; try { await hubConnection.StartAsync().DefaultTimeout(); await connection.ReceiveJsonMessage(new { type = 7 }).DefaultTimeout(); var closeException = await closedTcs.Task.DefaultTimeout(); Assert.Null(closeException); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task ReceiveCloseMessageWithErrorWillCloseHubConnection() { var closedTcs = new TaskCompletionSource<Exception>(); var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); hubConnection.Closed += e => { closedTcs.SetResult(e); return Task.CompletedTask; }; try { await hubConnection.StartAsync().DefaultTimeout(); await connection.ReceiveJsonMessage(new { type = 7, error = "Error!" }).DefaultTimeout(); var closeException = await closedTcs.Task.DefaultTimeout(); Assert.NotNull(closeException); Assert.Equal("The server closed the connection with the following error: Error!", closeException.Message); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task StreamSendsAnInvocationMessage() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var channel = await hubConnection.StreamAsChannelAsync<object>("Foo").DefaultTimeout(); var invokeMessage = await connection.ReadSentTextMessageAsync().DefaultTimeout(); // ReadSentTextMessageAsync strips off the record separator (because it has use it as a separator now that we use Pipelines) Assert.Equal("{\"type\":4,\"invocationId\":\"1\",\"target\":\"Foo\",\"arguments\":[]}", invokeMessage); // Complete the channel await connection.ReceiveJsonMessage(new { invocationId = "1", type = 3 }).DefaultTimeout(); await channel.Completion.DefaultTimeout(); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task InvokeCompletedWhenCompletionMessageReceived() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var invokeTask = hubConnection.InvokeAsync("Foo"); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 3 }).DefaultTimeout(); await invokeTask.DefaultTimeout(); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task StreamCompletesWhenCompletionMessageIsReceived() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var channel = await hubConnection.StreamAsChannelAsync<int>("Foo").DefaultTimeout(); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 3 }).DefaultTimeout(); Assert.Empty(await channel.ReadAndCollectAllAsync().DefaultTimeout()); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task InvokeYieldsResultWhenCompletionMessageReceived() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var invokeTask = hubConnection.InvokeAsync<int>("Foo"); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 3, result = 42 }).DefaultTimeout(); Assert.Equal(42, await invokeTask.DefaultTimeout()); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task InvokeFailsWithExceptionWhenCompletionWithErrorReceived() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var invokeTask = hubConnection.InvokeAsync<int>("Foo"); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 3, error = "An error occurred" }).DefaultTimeout(); var ex = await Assert.ThrowsAsync<HubException>(() => invokeTask).DefaultTimeout(); Assert.Equal("An error occurred", ex.Message); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task StreamFailsIfCompletionMessageHasPayload() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var channel = await hubConnection.StreamAsChannelAsync<string>("Foo").DefaultTimeout(); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 3, result = "Oops" }).DefaultTimeout(); var ex = await Assert.ThrowsAsync<InvalidOperationException>(() => channel.ReadAndCollectAllAsync()).DefaultTimeout(); Assert.Equal("Server provided a result in a completion response to a streamed invocation.", ex.Message); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task StreamFailsWithExceptionWhenCompletionWithErrorReceived() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var channel = await hubConnection.StreamAsChannelAsync<int>("Foo").DefaultTimeout(); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 3, error = "An error occurred" }).DefaultTimeout(); var ex = await Assert.ThrowsAsync<HubException>(async () => await channel.ReadAndCollectAllAsync()).DefaultTimeout(); Assert.Equal("An error occurred", ex.Message); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task InvokeFailsWithErrorWhenStreamingItemReceived() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var invokeTask = hubConnection.InvokeAsync<int>("Foo"); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 2, item = 42 }).DefaultTimeout(); var ex = await Assert.ThrowsAsync<InvalidOperationException>(() => invokeTask).DefaultTimeout(); Assert.Equal("Streaming hub methods must be invoked with the 'HubConnection.StreamAsChannelAsync' method.", ex.Message); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task StreamYieldsItemsAsTheyArrive() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); var channel = await hubConnection.StreamAsChannelAsync<string>("Foo").DefaultTimeout(); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 2, item = "1" }).DefaultTimeout(); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 2, item = "2" }).DefaultTimeout(); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 2, item = "3" }).DefaultTimeout(); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 3 }).DefaultTimeout(); var notifications = await channel.ReadAndCollectAllAsync().DefaultTimeout(); Assert.Equal(new[] { "1", "2", "3", }, notifications.ToArray()); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task HandlerRegisteredWithOnIsFiredWhenInvocationReceived() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); var handlerCalled = new TaskCompletionSource<object[]>(); try { await hubConnection.StartAsync().DefaultTimeout(); hubConnection.On<int, string, float>("Foo", (r1, r2, r3) => handlerCalled.TrySetResult(new object[] { r1, r2, r3 })); var args = new object[] { 1, "Foo", 2.0f }; await connection.ReceiveJsonMessage(new { invocationId = "1", type = 1, target = "Foo", arguments = args }).DefaultTimeout(); Assert.Equal(args, await handlerCalled.Task.DefaultTimeout()); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task HandlerIsRemovedProperlyWithOff() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); var handlerCalled = new TaskCompletionSource<int>(); try { await hubConnection.StartAsync().DefaultTimeout(); hubConnection.On<int>("Foo", (val) => { handlerCalled.TrySetResult(val); }); hubConnection.Remove("Foo"); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 1, target = "Foo", arguments = 1 }).DefaultTimeout(); var handlerTask = handlerCalled.Task; // We expect the handler task to timeout since the handler has been removed with the call to Remove("Foo") var ex = Assert.ThrowsAsync<TimeoutException>(async () => await handlerTask.DefaultTimeout(2000)); // Ensure that the task from the WhenAny is not the handler task Assert.False(handlerCalled.Task.IsCompleted); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task DisposingSubscriptionAfterCallingRemoveHandlerDoesntFail() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); var handlerCalled = new TaskCompletionSource<int>(); try { await hubConnection.StartAsync().DefaultTimeout(); var subscription = hubConnection.On<int>("Foo", (val) => { handlerCalled.TrySetResult(val); }); hubConnection.Remove("Foo"); await connection.ReceiveJsonMessage(new { invocationId = "1", type = 1, target = "Foo", arguments = 1 }).DefaultTimeout(); var handlerTask = handlerCalled.Task; subscription.Dispose(); // We expect the handler task to timeout since the handler has been removed with the call to Remove("Foo") var ex = Assert.ThrowsAsync<TimeoutException>(async () => await handlerTask.DefaultTimeout(2000)); // Ensure that the task from the WhenAny is not the handler task Assert.False(handlerCalled.Task.IsCompleted); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task AcceptsPingMessages() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { await hubConnection.StartAsync().DefaultTimeout(); // Send an invocation var invokeTask = hubConnection.InvokeAsync("Foo"); // Receive the ping mid-invocation so we can see that the rest of the flow works fine await connection.ReceiveJsonMessage(new { type = 6 }).DefaultTimeout(); // Receive a completion await connection.ReceiveJsonMessage(new { invocationId = "1", type = 3 }).DefaultTimeout(); // Ensure the invokeTask completes properly await invokeTask.DefaultTimeout(); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task PartialHandshakeResponseWorks() { var connection = new TestConnection(autoHandshake: false); var hubConnection = CreateHubConnection(connection); try { var task = hubConnection.StartAsync(); await connection.ReceiveTextAsync("{").DefaultTimeout(); Assert.False(task.IsCompleted); await connection.ReceiveTextAsync("}").DefaultTimeout(); Assert.False(task.IsCompleted); await connection.ReceiveTextAsync("\u001e").DefaultTimeout(); await task.DefaultTimeout(); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task HandshakeAndInvocationInSameBufferWorks() { var payload = "{}\u001e{\"type\":1, \"target\": \"Echo\", \"arguments\":[\"hello\"]}\u001e"; var connection = new TestConnection(autoHandshake: false); var hubConnection = CreateHubConnection(connection); try { var tcs = new TaskCompletionSource<string>(); hubConnection.On<string>("Echo", data => { tcs.TrySetResult(data); }); await connection.ReceiveTextAsync(payload).DefaultTimeout(); await hubConnection.StartAsync().DefaultTimeout(); var response = await tcs.Task.DefaultTimeout(); Assert.Equal("hello", response); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task PartialInvocationWorks() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); try { var tcs = new TaskCompletionSource<string>(); hubConnection.On<string>("Echo", data => { tcs.TrySetResult(data); }); await hubConnection.StartAsync().DefaultTimeout(); await connection.ReceiveTextAsync("{\"type\":1, ").DefaultTimeout(); Assert.False(tcs.Task.IsCompleted); await connection.ReceiveTextAsync("\"target\": \"Echo\", \"arguments\"").DefaultTimeout(); Assert.False(tcs.Task.IsCompleted); await connection.ReceiveTextAsync(":[\"hello\"]}\u001e").DefaultTimeout(); var response = await tcs.Task.DefaultTimeout(); Assert.Equal("hello", response); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task ClientPingsMultipleTimes() { var connection = new TestConnection(); var hubConnection = CreateHubConnection(connection); hubConnection.TickRate = TimeSpan.FromMilliseconds(30); hubConnection.KeepAliveInterval = TimeSpan.FromMilliseconds(80); try { await hubConnection.StartAsync().DefaultTimeout(); var firstPing = await connection.ReadSentTextMessageAsync(ignorePings: false).DefaultTimeout(); Assert.Equal("{\"type\":6}", firstPing); var secondPing = await connection.ReadSentTextMessageAsync(ignorePings: false).DefaultTimeout(); Assert.Equal("{\"type\":6}", secondPing); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } [Fact] public async Task ClientWithInherentKeepAliveDoesNotPing() { var connection = new TestConnection(hasInherentKeepAlive: true); var hubConnection = CreateHubConnection(connection); hubConnection.TickRate = TimeSpan.FromMilliseconds(30); hubConnection.KeepAliveInterval = TimeSpan.FromMilliseconds(80); try { await hubConnection.StartAsync().DefaultTimeout(); await Task.Delay(1000); await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); Assert.Equal(0, (await connection.ReadAllSentMessagesAsync(ignorePings: false).DefaultTimeout()).Count); } finally { await hubConnection.DisposeAsync().DefaultTimeout(); await connection.DisposeAsync().DefaultTimeout(); } } } } }
// // System.Web.Services.Description.HttpSimpleProtocolImporter.cs // // Author: // Lluis Sanchez Gual (lluis@ximian.com) // // Copyright (C) 2003 Ximian, Inc. // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System.CodeDom; using System.Web.Services; using System.Web.Services.Protocols; using System.Web.Services.Configuration; using System.Xml; using System.Xml.Serialization; using System.Configuration; using System.Collections; namespace System.Web.Services.Description { internal abstract class HttpSimpleProtocolImporter : ProtocolImporter { #region Fields HttpBinding httpBinding; SoapCodeExporter soapExporter; SoapSchemaImporter soapImporter; XmlCodeExporter xmlExporter; XmlSchemaImporter xmlImporter; CodeIdentifiers memberIds; XmlReflectionImporter xmlReflectionImporter; #endregion // Fields #region Constructors public HttpSimpleProtocolImporter () { } #endregion // Constructors #region Methods protected override CodeTypeDeclaration BeginClass () { httpBinding = (HttpBinding) Binding.Extensions.Find (typeof(HttpBinding)); CodeTypeDeclaration codeClass = new CodeTypeDeclaration (ClassName); string location = null; if (Port != null) { HttpAddressBinding sab = (HttpAddressBinding) Port.Extensions.Find (typeof(HttpAddressBinding)); if (sab != null) location = sab.Location; } CodeConstructor cc = new CodeConstructor (); cc.Attributes = MemberAttributes.Public; GenerateServiceUrl (location, cc.Statements); codeClass.Members.Add (cc); memberIds = new CodeIdentifiers (); return codeClass; } protected override void BeginNamespace () { xmlImporter = new XmlSchemaImporter (LiteralSchemas, ClassNames); soapImporter = new SoapSchemaImporter (EncodedSchemas, ClassNames); xmlExporter = new XmlCodeExporter (CodeNamespace, null); xmlReflectionImporter = new XmlReflectionImporter (); } protected override void EndClass () { if (xmlExporter.IncludeMetadata.Count > 0) { if (CodeTypeDeclaration.CustomAttributes == null) CodeTypeDeclaration.CustomAttributes = new CodeAttributeDeclarationCollection (); CodeTypeDeclaration.CustomAttributes.AddRange (xmlExporter.IncludeMetadata); } } protected override void EndNamespace () { } protected override bool IsBindingSupported () { throw new NotImplementedException (); } [MonoTODO] protected override bool IsOperationFlowSupported (OperationFlow flow) { throw new NotImplementedException (); } protected override CodeMemberMethod GenerateMethod () { try { HttpOperationBinding httpOper = OperationBinding.Extensions.Find (typeof (HttpOperationBinding)) as HttpOperationBinding; if (httpOper == null) throw new Exception ("Http operation binding not found"); XmlMembersMapping inputMembers = ImportInMembersMapping (InputMessage); XmlTypeMapping outputMember = ImportOutMembersMapping (OutputMessage); CodeMemberMethod met = GenerateMethod (memberIds, httpOper, inputMembers, outputMember); xmlExporter.ExportMembersMapping (inputMembers); if (outputMember != null) xmlExporter.ExportTypeMapping (outputMember); return met; } catch (Exception ex) { UnsupportedOperationBindingWarning (ex.Message); return null; } } XmlMembersMapping ImportInMembersMapping (Message msg) { SoapSchemaMember[] mems = new SoapSchemaMember [msg.Parts.Count]; for (int n=0; n<mems.Length; n++) { SoapSchemaMember mem = new SoapSchemaMember(); mem.MemberName = msg.Parts[n].Name; mem.MemberType = msg.Parts[n].Type; mems[n] = mem; } return soapImporter.ImportMembersMapping (Operation.Name, "", mems); } XmlTypeMapping ImportOutMembersMapping (Message msg) { if (msg.Parts.Count == 0) return null; if (msg.Parts[0].Name == "Body" && msg.Parts[0].Element == XmlQualifiedName.Empty) return xmlReflectionImporter.ImportTypeMapping (typeof(XmlNode)); else { // This is a bit hacky. The issue is that types such as string[] are to be imported // as such, not as ArrayOfString class. ImportTypeMapping will return a // class if the type has not been imported as an array before, hence the // call to ImportMembersMapping. xmlImporter.ImportMembersMapping (new XmlQualifiedName[] {msg.Parts[0].Element}); return xmlImporter.ImportTypeMapping (msg.Parts[0].Element); } } CodeMemberMethod GenerateMethod (CodeIdentifiers memberIds, HttpOperationBinding httpOper, XmlMembersMapping inputMembers, XmlTypeMapping outputMember) { CodeIdentifiers pids = new CodeIdentifiers (); CodeMemberMethod method = new CodeMemberMethod (); CodeMemberMethod methodBegin = new CodeMemberMethod (); CodeMemberMethod methodEnd = new CodeMemberMethod (); method.Attributes = MemberAttributes.Public; methodBegin.Attributes = MemberAttributes.Public; methodEnd.Attributes = MemberAttributes.Public; // Find unique names for temporary variables for (int n=0; n<inputMembers.Count; n++) pids.AddUnique (inputMembers[n].MemberName, inputMembers[n]); string varAsyncResult = pids.AddUnique ("asyncResult","asyncResult"); string varCallback = pids.AddUnique ("callback","callback"); string varAsyncState = pids.AddUnique ("asyncState","asyncState"); string messageName = memberIds.AddUnique(CodeIdentifier.MakeValid(Operation.Name),method); method.Name = Operation.Name; methodBegin.Name = memberIds.AddUnique(CodeIdentifier.MakeValid("Begin" + Operation.Name),method); methodEnd.Name = memberIds.AddUnique(CodeIdentifier.MakeValid("End" + Operation.Name),method); method.ReturnType = new CodeTypeReference (typeof(void)); methodEnd.ReturnType = new CodeTypeReference (typeof(void)); methodEnd.Parameters.Add (new CodeParameterDeclarationExpression (typeof (IAsyncResult),varAsyncResult)); CodeExpression[] paramArray = new CodeExpression [inputMembers.Count]; for (int n=0; n<inputMembers.Count; n++) { string ptype = GetSimpleType (inputMembers[n]); CodeParameterDeclarationExpression param = new CodeParameterDeclarationExpression (ptype, inputMembers[n].MemberName); param.Direction = FieldDirection.In; method.Parameters.Add (param); methodBegin.Parameters.Add (param); paramArray [n] = new CodeVariableReferenceExpression (param.Name); } bool isVoid = true; if (outputMember != null) { method.ReturnType = new CodeTypeReference (outputMember.TypeFullName); methodEnd.ReturnType = new CodeTypeReference (outputMember.TypeFullName); xmlExporter.AddMappingMetadata (method.ReturnTypeCustomAttributes, outputMember, ""); isVoid = false; } methodBegin.Parameters.Add (new CodeParameterDeclarationExpression (typeof (AsyncCallback),varCallback)); methodBegin.Parameters.Add (new CodeParameterDeclarationExpression (typeof (object),varAsyncState)); methodBegin.ReturnType = new CodeTypeReference (typeof(IAsyncResult)); // Array of input parameters CodeArrayCreateExpression methodParams; if (paramArray.Length > 0) methodParams = new CodeArrayCreateExpression (typeof(object), paramArray); else methodParams = new CodeArrayCreateExpression (typeof(object), 0); // Generate method url CodeThisReferenceExpression ethis = new CodeThisReferenceExpression(); CodeExpression thisURlExp = new CodeFieldReferenceExpression (ethis, "Url"); CodePrimitiveExpression metUrl = new CodePrimitiveExpression (httpOper.Location); CodeBinaryOperatorExpression expMethodLocation = new CodeBinaryOperatorExpression (thisURlExp, CodeBinaryOperatorType.Add, metUrl); // Invoke call CodePrimitiveExpression varMsgName = new CodePrimitiveExpression (messageName); CodeMethodInvokeExpression inv; inv = new CodeMethodInvokeExpression (ethis, "Invoke", varMsgName, expMethodLocation, methodParams); if (!isVoid) method.Statements.Add (new CodeMethodReturnStatement (new CodeCastExpression (method.ReturnType, inv))); else method.Statements.Add (inv); // Begin Invoke Call CodeExpression expCallb = new CodeVariableReferenceExpression (varCallback); CodeExpression expAsyncs = new CodeVariableReferenceExpression (varAsyncState); inv = new CodeMethodInvokeExpression (ethis, "BeginInvoke", varMsgName, expMethodLocation, methodParams, expCallb, expAsyncs); methodBegin.Statements.Add (new CodeMethodReturnStatement (inv)); // End Invoke call CodeExpression varAsyncr = new CodeVariableReferenceExpression (varAsyncResult); inv = new CodeMethodInvokeExpression (ethis, "EndInvoke", varAsyncr); if (!isVoid) methodEnd.Statements.Add (new CodeMethodReturnStatement (new CodeCastExpression (methodEnd.ReturnType, inv))); else methodEnd.Statements.Add (inv); // Attributes CodeAttributeDeclaration att = new CodeAttributeDeclaration ("System.Web.Services.Protocols.HttpMethodAttribute"); att.Arguments.Add (new CodeAttributeArgument (new CodeTypeOfExpression(GetOutMimeFormatter ()))); att.Arguments.Add (new CodeAttributeArgument (new CodeTypeOfExpression(GetInMimeFormatter ()))); AddCustomAttribute (method, att, true); CodeTypeDeclaration.Members.Add (method); CodeTypeDeclaration.Members.Add (methodBegin); CodeTypeDeclaration.Members.Add (methodEnd); return method; } #if NET_2_0 internal override CodeExpression BuildInvokeAsync (string messageName, CodeArrayCreateExpression paramsArray, CodeExpression delegateField, CodeExpression userStateVar) { HttpOperationBinding httpOper = OperationBinding.Extensions.Find (typeof (HttpOperationBinding)) as HttpOperationBinding; CodeThisReferenceExpression ethis = new CodeThisReferenceExpression(); CodeExpression thisURlExp = new CodeFieldReferenceExpression (ethis, "Url"); CodePrimitiveExpression metUrl = new CodePrimitiveExpression (httpOper.Location); CodeBinaryOperatorExpression expMethodLocation = new CodeBinaryOperatorExpression (thisURlExp, CodeBinaryOperatorType.Add, metUrl); CodeMethodInvokeExpression inv2 = new CodeMethodInvokeExpression (ethis, "InvokeAsync"); inv2.Parameters.Add (new CodePrimitiveExpression (messageName)); inv2.Parameters.Add (expMethodLocation); inv2.Parameters.Add (paramsArray); inv2.Parameters.Add (delegateField); inv2.Parameters.Add (userStateVar); return inv2; } #endif protected virtual Type GetInMimeFormatter () { return null; } protected virtual Type GetOutMimeFormatter () { if (OperationBinding.Output.Extensions.Find (typeof(MimeXmlBinding)) != null) return typeof (XmlReturnReader); MimeContentBinding bin = (MimeContentBinding) OperationBinding.Output.Extensions.Find (typeof(MimeContentBinding)); if (bin != null && bin.Type == "text/xml") return typeof (XmlReturnReader); return typeof(NopReturnReader); } string GetSimpleType (XmlMemberMapping member) { // MS seems to always use System.String for input parameters, except for byte[] switch (member.TypeName) { case "hexBinary": case "base64Binary": return "System.String"; default: string ptype = member.TypeFullName; int i = ptype.IndexOf ('['); if (i == -1) return "System.String"; else return "System.String" + ptype.Substring (i); } } #endregion } }
using System; using System.Globalization; using System.Text; namespace Org.BouncyCastle.Asn1 { /** * UTC time object. */ public class DerUtcTime : Asn1Object { private readonly string time; /** * return an UTC Time from the passed in object. * * @exception ArgumentException if the object cannot be converted. */ public static DerUtcTime GetInstance( object obj) { if (obj == null || obj is DerUtcTime) { return (DerUtcTime)obj; } if (obj is Asn1OctetString) { return new DerUtcTime(((Asn1OctetString)obj).GetOctets()); } throw new ArgumentException("illegal object in GetInstance: " + obj.GetType().Name); } /** * return an UTC Time from a tagged object. * * @param obj the tagged object holding the object we want * @param explicitly true if the object is meant to be explicitly * tagged false otherwise. * @exception ArgumentException if the tagged object cannot * be converted. */ public static DerUtcTime GetInstance( Asn1TaggedObject obj, bool explicitly) { return GetInstance(obj.GetObject()); } /** * The correct format for this is YYMMDDHHMMSSZ (it used to be that seconds were * never encoded. When you're creating one of these objects from scratch, that's * what you want to use, otherwise we'll try to deal with whatever Gets read from * the input stream... (this is why the input format is different from the GetTime() * method output). * <p> * @param time the time string.</p> */ public DerUtcTime( string time) { if (time == null) throw new ArgumentNullException("time"); this.time = time; try { ToDateTime(); } catch (FormatException e) { throw new ArgumentException("invalid date string: " + e.Message); } } /** * base constructor from a DateTime object */ public DerUtcTime( DateTime time) { this.time = time.ToString("yyMMddHHmmss") + "Z"; } internal DerUtcTime( byte[] bytes) { // // explicitly convert to characters // this.time = Encoding.ASCII.GetString(bytes, 0, bytes.Length); } // public DateTime ToDateTime() // { // string tm = this.AdjustedTimeString; // // return new DateTime( // Int16.Parse(tm.Substring(0, 4)), // Int16.Parse(tm.Substring(4, 2)), // Int16.Parse(tm.Substring(6, 2)), // Int16.Parse(tm.Substring(8, 2)), // Int16.Parse(tm.Substring(10, 2)), // Int16.Parse(tm.Substring(12, 2))); // } /** * return the time as a date based on whatever a 2 digit year will return. For * standardised processing use ToAdjustedDateTime(). * * @return the resulting date * @exception ParseException if the date string cannot be parsed. */ public DateTime ToDateTime() { return ParseDateString(TimeString, @"yyMMddHHmmss'GMT'zzz"); } /** * return the time as an adjusted date * in the range of 1950 - 2049. * * @return a date in the range of 1950 to 2049. * @exception ParseException if the date string cannot be parsed. */ public DateTime ToAdjustedDateTime() { return ParseDateString(AdjustedTimeString, @"yyyyMMddHHmmss'GMT'zzz"); } private DateTime ParseDateString( string dateStr, string formatStr) { DateTime dt = DateTime.ParseExact( dateStr, formatStr, DateTimeFormatInfo.InvariantInfo); return dt.ToUniversalTime(); } /** * return the time - always in the form of * YYMMDDhhmmssGMT(+hh:mm|-hh:mm). * <p> * Normally in a certificate we would expect "Z" rather than "GMT", * however adding the "GMT" means we can just use: * <pre> * dateF = new SimpleDateFormat("yyMMddHHmmssz"); * </pre> * To read in the time and Get a date which is compatible with our local * time zone.</p> * <p> * <b>Note:</b> In some cases, due to the local date processing, this * may lead to unexpected results. If you want to stick the normal * convention of 1950 to 2049 use the GetAdjustedTime() method.</p> */ public string TimeString { get { // // standardise the format. // if (time.IndexOf('-') < 0 && time.IndexOf('+') < 0) { if (time.Length == 11) { return time.Substring(0, 10) + "00GMT+00:00"; } else { return time.Substring(0, 12) + "GMT+00:00"; } } else { int index = time.IndexOf('-'); if (index < 0) { index = time.IndexOf('+'); } string d = time; if (index == time.Length - 3) { d += "00"; } if (index == 10) { return d.Substring(0, 10) + "00GMT" + d.Substring(10, 3) + ":" + d.Substring(13, 2); } else { return d.Substring(0, 12) + "GMT" + d.Substring(12, 3) + ":" + d.Substring(15, 2); } } } } [Obsolete("Use 'AdjustedTimeString' property instead")] public string AdjustedTime { get { return AdjustedTimeString; } } /// <summary> /// Return a time string as an adjusted date with a 4 digit year. /// This goes in the range of 1950 - 2049. /// </summary> public string AdjustedTimeString { get { string d = TimeString; string c = d[0] < '5' ? "20" : "19"; return c + d; } } private byte[] GetOctets() { return Encoding.ASCII.GetBytes(time); } internal override void Encode( DerOutputStream derOut) { derOut.WriteEncoded(Asn1Tags.UtcTime, GetOctets()); } protected override bool Asn1Equals( Asn1Object asn1Object) { DerUtcTime other = asn1Object as DerUtcTime; if (other == null) return false; return this.time.Equals(other.time); } protected override int Asn1GetHashCode() { return time.GetHashCode(); } public override string ToString() { return time; } } }
// Copyright (c) Microsoft Open Technologies, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using Microsoft.Win32; using System.Runtime.InteropServices; using ComTypes = System.Runtime.InteropServices.ComTypes; using System.Collections; using System.IO; using System.Windows.Forms; using System.Diagnostics; using System.Globalization; using System.Reflection; using System.Text; using System.Threading; using Microsoft.VisualStudio.Shell.Interop; using System.Diagnostics.CodeAnalysis; namespace Microsoft.VisualStudio.FSharp.ProjectSystem { /// <summary> /// This type of node is used for references to COM components. /// </summary> [CLSCompliant(false)] [ComVisible(true)] public class ComReferenceNode : ReferenceNode { private enum RegKind { RegKind_Default = 0, RegKind_Register = 1, RegKind_None = 2 } [ DllImport( "oleaut32.dll", CharSet = CharSet.Unicode, PreserveSig = false )] private static extern void LoadTypeLibEx(string strTypeLibName, RegKind regKind, [ MarshalAs( UnmanagedType.Interface )] out object typeLib ); private string typeName; private Guid typeGuid; private string projectRelativeFilePath; private string installedFilePath; private string minorVersionNumber; private string majorVersionNumber; private readonly int lcid; public override string Caption { get { return this.typeName; } } public override string Url { get { return this.projectRelativeFilePath; } } /// <summary> /// Returns the Guid of the COM object. /// </summary> public Guid TypeGuid { get { return this.typeGuid; } } /// <summary> /// Returns the path where the COM object is installed. /// </summary> public string InstalledFilePath { get { return this.installedFilePath; } } [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "LCID")] public int LCID { get { return lcid; } } public int MajorVersionNumber { get { if (string.IsNullOrEmpty(majorVersionNumber)) { return 0; } return int.Parse(majorVersionNumber, CultureInfo.CurrentCulture); } } public int MinorVersionNumber { get { if (string.IsNullOrEmpty(minorVersionNumber)) { return 0; } return int.Parse(minorVersionNumber, CultureInfo.CurrentCulture); } } private Automation.OAComReference comReference; public override object Object { get { if (null == comReference) { comReference = new Automation.OAComReference(this); } return comReference; } } internal ComReferenceNode(ProjectNode root, ProjectElement element) : base(root, element) { this.typeName = this.ItemNode.GetMetadata(ProjectFileConstants.Include); string typeGuidAsString = this.ItemNode.GetMetadata(ProjectFileConstants.Guid); if (typeGuidAsString != null) { this.typeGuid = new Guid(typeGuidAsString); } this.majorVersionNumber = this.ItemNode.GetMetadata(ProjectFileConstants.VersionMajor); this.minorVersionNumber = this.ItemNode.GetMetadata(ProjectFileConstants.VersionMinor); this.lcid = int.Parse(this.ItemNode.GetMetadata(ProjectFileConstants.Lcid)); this.SetProjectItemsThatRelyOnReferencesToBeResolved(false); this.SetInstalledFilePath(); } /// <summary> /// Overloaded constructor for creating a ComReferenceNode from selector data /// </summary> /// <param name="root">The Project node</param> /// <param name="selectorData">The component selctor data.</param> internal ComReferenceNode(ProjectNode root, VSCOMPONENTSELECTORDATA selectorData) : base(root) { if (root == null) { throw new ArgumentNullException("root"); } if (selectorData.type == VSCOMPONENTTYPE.VSCOMPONENTTYPE_Project || selectorData.type == VSCOMPONENTTYPE.VSCOMPONENTTYPE_ComPlus) { throw new ArgumentException(); } // Initialize private state this.typeName = selectorData.bstrTitle; this.typeGuid = selectorData.guidTypeLibrary; this.majorVersionNumber = selectorData.wTypeLibraryMajorVersion.ToString(CultureInfo.InvariantCulture); this.minorVersionNumber = selectorData.wTypeLibraryMinorVersion.ToString(CultureInfo.InvariantCulture); this.lcid = (int) selectorData.lcidTypeLibrary; // Check to see if the COM object actually exists. this.SetInstalledFilePath(); // If the value cannot be set throw. if (String.IsNullOrEmpty(this.installedFilePath)) { var message = string.Format(SR.GetString(SR.ReferenceCouldNotBeAdded, CultureInfo.CurrentUICulture), selectorData.bstrTitle); throw new InvalidOperationException(message); } } // Create a ComReferenceNode via a string to a TLB internal ComReferenceNode(ProjectNode root, string filePath) : base(root) { object otypeLib = null; ComTypes.ITypeLib typeLib = null; IntPtr ptrToLibAttr = IntPtr.Zero; try { LoadTypeLibEx( filePath, RegKind.RegKind_None, out otypeLib ); typeLib = (ComTypes.ITypeLib)otypeLib; if (typeLib == null) { throw new ArgumentException(); } ComTypes.TYPELIBATTR typeAttr = new ComTypes.TYPELIBATTR(); typeLib.GetLibAttr(out ptrToLibAttr); typeAttr = (ComTypes.TYPELIBATTR)Marshal.PtrToStructure(ptrToLibAttr, typeAttr.GetType()); // Initialize state this.typeGuid = typeAttr.guid; this.majorVersionNumber = typeAttr.wMajorVerNum.ToString(CultureInfo.InvariantCulture); this.minorVersionNumber = typeAttr.wMinorVerNum.ToString(CultureInfo.InvariantCulture); this.lcid = typeAttr.lcid; // Check to see if the COM object actually exists. this.SetInstalledFilePath(); // If the value cannot be set throw. if (String.IsNullOrEmpty(this.installedFilePath)) { var message = string.Format(SR.GetString(SR.ReferenceCouldNotBeAdded, CultureInfo.CurrentUICulture), filePath); throw new InvalidOperationException(message); } } finally { if (typeLib != null) typeLib.ReleaseTLibAttr(ptrToLibAttr); } } /// <summary> /// Links a reference node to the project and hierarchy. /// </summary> public override void BindReferenceData() { Debug.Assert(this.ItemNode != null, "The AssemblyName field has not been initialized"); // We need to create the project element at this point if it has not been created. // We cannot do that from the ctor if input comes from a component selector data, since had we been doing that we would have added a project element to the project file. // The problem with that approach is that we would need to remove the project element if the item cannot be added to the hierachy (E.g. It already exists). // It is just safer to update the project file now. This is the intent of this method. // Call MSBuild to build the target ResolveComReferences if (this.ItemNode == null || this.ItemNode.Item == null) { this.ItemNode = this.GetProjectElementBasedOnInputFromComponentSelectorData(); } this.SetProjectItemsThatRelyOnReferencesToBeResolved(true); } /// <summary> /// Checks if a reference is already added. The method parses all references and compares the the FinalItemSpec and the Guid. /// </summary> /// <returns>true if the assembly has already been added.</returns> public override bool IsAlreadyAdded(out ReferenceNode existingNode) { ReferenceContainerNode referencesFolder = this.ProjectMgr.FindChild(ReferenceContainerNode.ReferencesNodeVirtualName) as ReferenceContainerNode; Debug.Assert(referencesFolder != null, "Could not find the References node"); for (HierarchyNode n = referencesFolder.FirstChild; n != null; n = n.NextSibling) { if (n is ComReferenceNode) { ComReferenceNode referenceNode = n as ComReferenceNode; // We check if the name and guids are the same if (referenceNode.TypeGuid == this.TypeGuid && String.Compare(referenceNode.Caption, this.Caption, StringComparison.OrdinalIgnoreCase) == 0) { existingNode = referenceNode; return true; } } } existingNode = null; return false; } /// <summary> /// Determines if this is node a valid node for painting the default reference icon. /// </summary> /// <returns></returns> public override bool CanShowDefaultIcon() { return !String.IsNullOrEmpty(this.installedFilePath); } /// <summary> /// This is an helper method to convert the VSCOMPONENTSELECTORDATA recieved by the /// implementer of IVsComponentUser into a ProjectElement that can be used to create /// an instance of this class. /// This should not be called for project reference or reference to managed assemblies. /// </summary> /// <returns>ProjectElement corresponding to the COM component passed in</returns> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Globalization", "CA1308:NormalizeStringsToUppercase")] private ProjectElement GetProjectElementBasedOnInputFromComponentSelectorData() { ProjectElement element = new ProjectElement(this.ProjectMgr, this.typeName, ProjectFileConstants.COMReference); // Set the basic information regarding this COM component element.SetMetadata(ProjectFileConstants.Guid, this.typeGuid.ToString("B")); element.SetMetadata(ProjectFileConstants.VersionMajor, this.majorVersionNumber); element.SetMetadata(ProjectFileConstants.VersionMinor, this.minorVersionNumber); element.SetMetadata(ProjectFileConstants.Lcid, this.lcid.ToString()); element.SetMetadata(ProjectFileConstants.Isolated, false.ToString()); // See if a PIA exist for this component TypeLibConverter typelib = new TypeLibConverter(); string assemblyName; string assemblyCodeBase; if (typelib.GetPrimaryInteropAssembly(this.typeGuid, Int32.Parse(this.majorVersionNumber, CultureInfo.InvariantCulture), Int32.Parse(this.minorVersionNumber, CultureInfo.InvariantCulture), this.lcid, out assemblyName, out assemblyCodeBase)) { element.SetMetadata(ProjectFileConstants.WrapperTool, WrapperToolAttributeValue.Primary.ToString().ToLowerInvariant()); } else { // MSBuild will have to generate an interop assembly element.SetMetadata(ProjectFileConstants.WrapperTool, WrapperToolAttributeValue.TlbImp.ToString().ToLowerInvariant()); element.SetMetadata(ProjectFileConstants.Private, true.ToString()); } return element; } private void SetProjectItemsThatRelyOnReferencesToBeResolved(bool renameItemNode) { // Call MSBuild to build the target ResolveComReferences bool success; ErrorHandler.ThrowOnFailure(this.ProjectMgr.BuildTarget(MsBuildTarget.ResolveComReferences, out success)); if (!success) throw new InvalidOperationException(); // Now loop through the generated COM References to find the corresponding one var instance = this.ProjectMgr.BuildProject.CreateProjectInstance(); AssemblyReferenceNode.BuildInstance(this.ProjectMgr, ref instance, MsBuildTarget.ResolveAssemblyReferences); var comReferences = instance.GetItems(MsBuildGeneratedItemType.ComReferenceWrappers); foreach (var reference in comReferences) { if (String.Compare(MSBuildItem.GetMetadataValue(reference, ProjectFileConstants.Guid), this.typeGuid.ToString("B"), StringComparison.OrdinalIgnoreCase) == 0 && String.Compare(MSBuildItem.GetMetadataValue(reference, ProjectFileConstants.VersionMajor), this.majorVersionNumber, StringComparison.OrdinalIgnoreCase) == 0 && String.Compare(MSBuildItem.GetMetadataValue(reference, ProjectFileConstants.VersionMinor), this.minorVersionNumber, StringComparison.OrdinalIgnoreCase) == 0 && String.Compare(MSBuildItem.GetMetadataValue(reference, ProjectFileConstants.Lcid), this.lcid.ToString(), StringComparison.OrdinalIgnoreCase) == 0) { string name = MSBuildItem.GetEvaluatedInclude(reference); if (Path.IsPathRooted(name)) { this.projectRelativeFilePath = name; } else { this.projectRelativeFilePath = Path.Combine(this.ProjectMgr.ProjectFolder, name); } if (renameItemNode) { this.ItemNode.Rename(Path.GetFileNameWithoutExtension(name)); } break; } } } /// <summary> /// Verify that the TypeLib is registered and set the the installed file path of the com reference. /// </summary> /// <returns></returns> private void SetInstalledFilePath() { int major = this.MajorVersionNumber; int minor = this.MinorVersionNumber; string registryPath = string.Format(CultureInfo.InvariantCulture, @"TYPELIB\{0}\{1}.{2}", this.typeGuid.ToString("B"), major.ToString("x"), minor.ToString("x")); using (RegistryKey typeLib = Registry.ClassesRoot.OpenSubKey(registryPath)) { if (typeLib != null) { // Check if we need to set the name for this type. if (string.IsNullOrEmpty(this.typeName)) { this.typeName = typeLib.GetValue(string.Empty) as string; } // Now get the path to the file that contains this type library. // lcid // The hexadecimal string representation of the locale identifier (LCID). // It is one to four hexadecimal digits with no 0x prefix and no leading zeros. using (RegistryKey installKey = typeLib.OpenSubKey(string.Format(CultureInfo.InvariantCulture, @"{0:X}\win32", this.lcid))) { if (installKey != null) { this.installedFilePath = installKey.GetValue(String.Empty) as String; } } } } } } }
#region --- License --- /* Copyright (c) 2006 - 2008 The Open Toolkit library. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #endregion --- License --- using System; using System.Runtime.InteropServices; namespace MatterHackers.VectorMath { /// <summary>Represents a 4D vector using four double-precision floating-point numbers.</summary> [Serializable] [StructLayout(LayoutKind.Sequential)] public struct Vector4 : IEquatable<Vector4> { #region Fields /// <summary> /// The X component of the Vector4d. /// </summary> public double x; /// <summary> /// The Y component of the Vector4d. /// </summary> public double y; /// <summary> /// The Z component of the Vector4d. /// </summary> public double z; /// <summary> /// The W component of the Vector4d. /// </summary> public double w; /// <summary> /// Defines a unit-length Vector4d that points towards the X-axis. /// </summary> public static Vector4 UnitX = new Vector4(1, 0, 0, 0); /// <summary> /// Defines a unit-length Vector4d that points towards the Y-axis. /// </summary> public static Vector4 UnitY = new Vector4(0, 1, 0, 0); /// <summary> /// Defines a unit-length Vector4d that points towards the Z-axis. /// </summary> public static Vector4 UnitZ = new Vector4(0, 0, 1, 0); /// <summary> /// Defines a unit-length Vector4d that points towards the W-axis. /// </summary> public static Vector4 UnitW = new Vector4(0, 0, 0, 1); /// <summary> /// Defines a zero-length Vector4d. /// </summary> public static Vector4 Zero = new Vector4(0, 0, 0, 0); /// <summary> /// Defines an instance with all components set to 1. /// </summary> public static readonly Vector4 One = new Vector4(1, 1, 1, 1); /// <summary> /// Defines the size of the Vector4d struct in bytes. /// </summary> public static readonly int SizeInBytes = Marshal.SizeOf(new Vector4()); #endregion Fields #region Constructors /// <summary> /// Constructs a new Vector4d. /// </summary> /// <param name="x">The x component of the Vector4d.</param> /// <param name="y">The y component of the Vector4d.</param> /// <param name="z">The z component of the Vector4d.</param> /// <param name="w">The w component of the Vector4d.</param> public Vector4(double x, double y, double z, double w) { this.x = x; this.y = y; this.z = z; this.w = w; } /// <summary> /// Constructs a new Vector4d from the given Vector2d. /// </summary> /// <param name="v">The Vector2d to copy components from.</param> public Vector4(Vector2 v) { x = v.x; y = v.y; z = 0.0f; w = 0.0f; } /// <summary> /// Constructs a new Vector4d from the given Vector3d. /// </summary> /// <param name="v">The Vector3d to copy components from.</param> public Vector4(Vector3 v) { x = v.x; y = v.y; z = v.z; w = 0.0f; } /// <summary> /// Constructs a new Vector4d from the specified Vector3d and w component. /// </summary> /// <param name="v">The Vector3d to copy components from.</param> /// <param name="w">The w component of the new Vector4.</param> public Vector4(Vector3 v, double w) { x = v.x; y = v.y; z = v.z; this.w = w; } /// <summary> /// Constructs a new Vector4d from the given Vector4d. /// </summary> /// <param name="v">The Vector4d to copy components from.</param> public Vector4(Vector4 v) { x = v.x; y = v.y; z = v.z; w = v.w; } #endregion Constructors #region Public Members #region Properties public double this[int index] { get { switch (index) { case 0: return x; case 1: return y; case 2: return z; case 3: return w; default: return 0; } } set { switch (index) { case 0: x = value; break; case 1: y = value; break; case 2: z = value; break; case 3: w = value; break; default: throw new Exception(); } } } #endregion Properties #region Instance #region public double Length /// <summary> /// Gets the length (magnitude) of the vector. /// </summary> /// <see cref="LengthFast"/> /// <seealso cref="LengthSquared"/> public double Length { get { return System.Math.Sqrt(x * x + y * y + z * z + w * w); } } #endregion public double Length #region public double LengthSquared /// <summary> /// Gets the square of the vector length (magnitude). /// </summary> /// <remarks> /// This property avoids the costly square root operation required by the Length property. This makes it more suitable /// for comparisons. /// </remarks> /// <see cref="Length"/> public double LengthSquared { get { return x * x + y * y + z * z + w * w; } } #endregion public double LengthSquared #region public void Normalize() /// <summary> /// Scales the Vector4d to unit length. /// </summary> public void Normalize() { double scale = 1.0 / this.Length; x *= scale; y *= scale; z *= scale; w *= scale; } #endregion public void Normalize() #endregion Instance #region Static #region Add /// <summary> /// Adds two vectors. /// </summary> /// <param name="a">Left operand.</param> /// <param name="b">Right operand.</param> /// <returns>Result of operation.</returns> public static Vector4 Add(Vector4 a, Vector4 b) { Add(ref a, ref b, out a); return a; } /// <summary> /// Adds two vectors. /// </summary> /// <param name="a">Left operand.</param> /// <param name="b">Right operand.</param> /// <param name="result">Result of operation.</param> public static void Add(ref Vector4 a, ref Vector4 b, out Vector4 result) { result = new Vector4(a.x + b.x, a.y + b.y, a.z + b.z, a.w + b.w); } #endregion Add #region Subtract /// <summary> /// Subtract one Vector from another /// </summary> /// <param name="a">First operand</param> /// <param name="b">Second operand</param> /// <returns>Result of subtraction</returns> public static Vector4 Subtract(Vector4 a, Vector4 b) { Subtract(ref a, ref b, out a); return a; } /// <summary> /// Subtract one Vector from another /// </summary> /// <param name="a">First operand</param> /// <param name="b">Second operand</param> /// <param name="result">Result of subtraction</param> public static void Subtract(ref Vector4 a, ref Vector4 b, out Vector4 result) { result = new Vector4(a.x - b.x, a.y - b.y, a.z - b.z, a.w - b.w); } #endregion Subtract #region Multiply /// <summary> /// Multiplies a vector by a scalar. /// </summary> /// <param name="vector">Left operand.</param> /// <param name="scale">Right operand.</param> /// <returns>Result of the operation.</returns> public static Vector4 Multiply(Vector4 vector, double scale) { Multiply(ref vector, scale, out vector); return vector; } /// <summary> /// Multiplies a vector by a scalar. /// </summary> /// <param name="vector">Left operand.</param> /// <param name="scale">Right operand.</param> /// <param name="result">Result of the operation.</param> public static void Multiply(ref Vector4 vector, double scale, out Vector4 result) { result = new Vector4(vector.x * scale, vector.y * scale, vector.z * scale, vector.w * scale); } /// <summary> /// Multiplies a vector by the components a vector (scale). /// </summary> /// <param name="vector">Left operand.</param> /// <param name="scale">Right operand.</param> /// <returns>Result of the operation.</returns> public static Vector4 Multiply(Vector4 vector, Vector4 scale) { Multiply(ref vector, ref scale, out vector); return vector; } /// <summary> /// Multiplies a vector by the components of a vector (scale). /// </summary> /// <param name="vector">Left operand.</param> /// <param name="scale">Right operand.</param> /// <param name="result">Result of the operation.</param> public static void Multiply(ref Vector4 vector, ref Vector4 scale, out Vector4 result) { result = new Vector4(vector.x * scale.x, vector.y * scale.y, vector.z * scale.z, vector.w * scale.w); } #endregion Multiply #region Divide /// <summary> /// Divides a vector by a scalar. /// </summary> /// <param name="vector">Left operand.</param> /// <param name="scale">Right operand.</param> /// <returns>Result of the operation.</returns> public static Vector4 Divide(Vector4 vector, double scale) { Divide(ref vector, scale, out vector); return vector; } /// <summary> /// Divides a vector by a scalar. /// </summary> /// <param name="vector">Left operand.</param> /// <param name="scale">Right operand.</param> /// <param name="result">Result of the operation.</param> public static void Divide(ref Vector4 vector, double scale, out Vector4 result) { Multiply(ref vector, 1 / scale, out result); } /// <summary> /// Divides a vector by the components of a vector (scale). /// </summary> /// <param name="vector">Left operand.</param> /// <param name="scale">Right operand.</param> /// <returns>Result of the operation.</returns> public static Vector4 Divide(Vector4 vector, Vector4 scale) { Divide(ref vector, ref scale, out vector); return vector; } /// <summary> /// Divide a vector by the components of a vector (scale). /// </summary> /// <param name="vector">Left operand.</param> /// <param name="scale">Right operand.</param> /// <param name="result">Result of the operation.</param> public static void Divide(ref Vector4 vector, ref Vector4 scale, out Vector4 result) { result = new Vector4(vector.x / scale.x, vector.y / scale.y, vector.z / scale.z, vector.w / scale.w); } #endregion Divide #region Min /// <summary> /// Calculate the component-wise minimum of two vectors /// </summary> /// <param name="a">First operand</param> /// <param name="b">Second operand</param> /// <returns>The component-wise minimum</returns> public static Vector4 Min(Vector4 a, Vector4 b) { a.x = a.x < b.x ? a.x : b.x; a.y = a.y < b.y ? a.y : b.y; a.z = a.z < b.z ? a.z : b.z; a.w = a.w < b.w ? a.w : b.w; return a; } /// <summary> /// Calculate the component-wise minimum of two vectors /// </summary> /// <param name="a">First operand</param> /// <param name="b">Second operand</param> /// <param name="result">The component-wise minimum</param> public static void Min(ref Vector4 a, ref Vector4 b, out Vector4 result) { result.x = a.x < b.x ? a.x : b.x; result.y = a.y < b.y ? a.y : b.y; result.z = a.z < b.z ? a.z : b.z; result.w = a.w < b.w ? a.w : b.w; } #endregion Min #region Max /// <summary> /// Calculate the component-wise maximum of two vectors /// </summary> /// <param name="a">First operand</param> /// <param name="b">Second operand</param> /// <returns>The component-wise maximum</returns> public static Vector4 Max(Vector4 a, Vector4 b) { a.x = a.x > b.x ? a.x : b.x; a.y = a.y > b.y ? a.y : b.y; a.z = a.z > b.z ? a.z : b.z; a.w = a.w > b.w ? a.w : b.w; return a; } /// <summary> /// Calculate the component-wise maximum of two vectors /// </summary> /// <param name="a">First operand</param> /// <param name="b">Second operand</param> /// <param name="result">The component-wise maximum</param> public static void Max(ref Vector4 a, ref Vector4 b, out Vector4 result) { result.x = a.x > b.x ? a.x : b.x; result.y = a.y > b.y ? a.y : b.y; result.z = a.z > b.z ? a.z : b.z; result.w = a.w > b.w ? a.w : b.w; } #endregion Max #region Clamp /// <summary> /// Clamp a vector to the given minimum and maximum vectors /// </summary> /// <param name="vec">Input vector</param> /// <param name="min">Minimum vector</param> /// <param name="max">Maximum vector</param> /// <returns>The clamped vector</returns> public static Vector4 Clamp(Vector4 vec, Vector4 min, Vector4 max) { vec.x = vec.x < min.x ? min.x : vec.x > max.x ? max.x : vec.x; vec.y = vec.y < min.y ? min.y : vec.y > max.y ? max.y : vec.y; vec.z = vec.x < min.z ? min.z : vec.z > max.z ? max.z : vec.z; vec.w = vec.y < min.w ? min.w : vec.w > max.w ? max.w : vec.w; return vec; } /// <summary> /// Clamp a vector to the given minimum and maximum vectors /// </summary> /// <param name="vec">Input vector</param> /// <param name="min">Minimum vector</param> /// <param name="max">Maximum vector</param> /// <param name="result">The clamped vector</param> public static void Clamp(ref Vector4 vec, ref Vector4 min, ref Vector4 max, out Vector4 result) { result.x = vec.x < min.x ? min.x : vec.x > max.x ? max.x : vec.x; result.y = vec.y < min.y ? min.y : vec.y > max.y ? max.y : vec.y; result.z = vec.x < min.z ? min.z : vec.z > max.z ? max.z : vec.z; result.w = vec.y < min.w ? min.w : vec.w > max.w ? max.w : vec.w; } #endregion Clamp #region Normalize /// <summary> /// Scale a vector to unit length /// </summary> /// <param name="vec">The input vector</param> /// <returns>The normalized vector</returns> public static Vector4 Normalize(Vector4 vec) { double scale = 1.0 / vec.Length; vec.x *= scale; vec.y *= scale; vec.z *= scale; vec.w *= scale; return vec; } /// <summary> /// Scale a vector to unit length /// </summary> /// <param name="vec">The input vector</param> /// <param name="result">The normalized vector</param> public static void Normalize(ref Vector4 vec, out Vector4 result) { double scale = 1.0 / vec.Length; result.x = vec.x * scale; result.y = vec.y * scale; result.z = vec.z * scale; result.w = vec.w * scale; } #endregion Normalize #region Dot /// <summary> /// Calculate the dot product of two vectors /// </summary> /// <param name="left">First operand</param> /// <param name="right">Second operand</param> /// <returns>The dot product of the two inputs</returns> public static double Dot(Vector4 left, Vector4 right) { return left.x * right.x + left.y * right.y + left.z * right.z + left.w * right.w; } /// <summary> /// Calculate the dot product of two vectors /// </summary> /// <param name="left">First operand</param> /// <param name="right">Second operand</param> /// <param name="result">The dot product of the two inputs</param> public static void Dot(ref Vector4 left, ref Vector4 right, out double result) { result = left.x * right.x + left.y * right.y + left.z * right.z + left.w * right.w; } #endregion Dot #region Lerp /// <summary> /// Returns a new Vector that is the linear blend of the 2 given Vectors /// </summary> /// <param name="a">First input vector</param> /// <param name="b">Second input vector</param> /// <param name="blend">The blend factor. a when blend=0, b when blend=1.</param> /// <returns>a when blend=0, b when blend=1, and a linear combination otherwise</returns> public static Vector4 Lerp(Vector4 a, Vector4 b, double blend) { a.x = blend * (b.x - a.x) + a.x; a.y = blend * (b.y - a.y) + a.y; a.z = blend * (b.z - a.z) + a.z; a.w = blend * (b.w - a.w) + a.w; return a; } /// <summary> /// Returns a new Vector that is the linear blend of the 2 given Vectors /// </summary> /// <param name="a">First input vector</param> /// <param name="b">Second input vector</param> /// <param name="blend">The blend factor. a when blend=0, b when blend=1.</param> /// <param name="result">a when blend=0, b when blend=1, and a linear combination otherwise</param> public static void Lerp(ref Vector4 a, ref Vector4 b, double blend, out Vector4 result) { result.x = blend * (b.x - a.x) + a.x; result.y = blend * (b.y - a.y) + a.y; result.z = blend * (b.z - a.z) + a.z; result.w = blend * (b.w - a.w) + a.w; } #endregion Lerp #region Barycentric /// <summary> /// Interpolate 3 Vectors using Barycentric coordinates /// </summary> /// <param name="a">First input Vector</param> /// <param name="b">Second input Vector</param> /// <param name="c">Third input Vector</param> /// <param name="u">First Barycentric Coordinate</param> /// <param name="v">Second Barycentric Coordinate</param> /// <returns>a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise</returns> public static Vector4 BaryCentric(Vector4 a, Vector4 b, Vector4 c, double u, double v) { return a + u * (b - a) + v * (c - a); } /// <summary>Interpolate 3 Vectors using Barycentric coordinates</summary> /// <param name="a">First input Vector.</param> /// <param name="b">Second input Vector.</param> /// <param name="c">Third input Vector.</param> /// <param name="u">First Barycentric Coordinate.</param> /// <param name="v">Second Barycentric Coordinate.</param> /// <param name="result">Output Vector. a when u=v=0, b when u=1,v=0, c when u=0,v=1, and a linear combination of a,b,c otherwise</param> public static void BaryCentric(ref Vector4 a, ref Vector4 b, ref Vector4 c, double u, double v, out Vector4 result) { result = a; // copy Vector4 temp = b; // copy Subtract(ref temp, ref a, out temp); Multiply(ref temp, u, out temp); Add(ref result, ref temp, out result); temp = c; // copy Subtract(ref temp, ref a, out temp); Multiply(ref temp, v, out temp); Add(ref result, ref temp, out result); } #endregion Barycentric #region Transform /// <summary>Transform a Vector by the given Matrix</summary> /// <param name="vec">The vector to transform</param> /// <param name="mat">The desired transformation</param> /// <returns>The transformed vector</returns> public static Vector4 Transform(Vector4 vec, Matrix4X4 mat) { Vector4 result; Transform(ref vec, ref mat, out result); return result; } /// <summary>Transform a Vector by the given Matrix</summary> /// <param name="vec">The vector to transform</param> /// <param name="mat">The desired transformation</param> /// <param name="result">The transformed vector</param> public static void Transform(ref Vector4 vec, ref Matrix4X4 mat, out Vector4 result) { result = new Vector4( vec.x * mat.Row0.x + vec.y * mat.Row1.x + vec.z * mat.Row2.x + vec.w * mat.Row3.x, vec.x * mat.Row0.y + vec.y * mat.Row1.y + vec.z * mat.Row2.y + vec.w * mat.Row3.y, vec.x * mat.Row0.z + vec.y * mat.Row1.z + vec.z * mat.Row2.z + vec.w * mat.Row3.z, vec.x * mat.Row0.w + vec.y * mat.Row1.w + vec.z * mat.Row2.w + vec.w * mat.Row3.w); } /// <summary> /// Transforms a vector by a quaternion rotation. /// </summary> /// <param name="vec">The vector to transform.</param> /// <param name="quat">The quaternion to rotate the vector by.</param> /// <returns>The result of the operation.</returns> public static Vector4 Transform(Vector4 vec, Quaternion quat) { Vector4 result; Transform(ref vec, ref quat, out result); return result; } /// <summary> /// Transforms a vector by a quaternion rotation. /// </summary> /// <param name="vec">The vector to transform.</param> /// <param name="quat">The quaternion to rotate the vector by.</param> /// <param name="result">The result of the operation.</param> public static void Transform(ref Vector4 vec, ref Quaternion quat, out Vector4 result) { Quaternion v = new Quaternion(vec.x, vec.y, vec.z, vec.w), i, t; Quaternion.Invert(ref quat, out i); Quaternion.Multiply(ref quat, ref v, out t); Quaternion.Multiply(ref t, ref i, out v); result = new Vector4(v.X, v.Y, v.Z, v.W); } #endregion Transform #endregion Static #region Swizzle /// <summary> /// Gets or sets an OpenTK.Vector2d with the X and Y components of this instance. /// </summary> public Vector2 Xy { get { return new Vector2(x, y); } set { x = value.x; y = value.y; } } /// <summary> /// Gets or sets an OpenTK.Vector3d with the X, Y and Z components of this instance. /// </summary> public Vector3 Xyz { get { return new Vector3(x, y, z); } set { x = value.x; y = value.y; z = value.z; } } #endregion Swizzle #region Operators /// <summary> /// Adds two instances. /// </summary> /// <param name="left">The first instance.</param> /// <param name="right">The second instance.</param> /// <returns>The result of the calculation.</returns> public static Vector4 operator +(Vector4 left, Vector4 right) { left.x += right.x; left.y += right.y; left.z += right.z; left.w += right.w; return left; } /// <summary> /// Subtracts two instances. /// </summary> /// <param name="left">The first instance.</param> /// <param name="right">The second instance.</param> /// <returns>The result of the calculation.</returns> public static Vector4 operator -(Vector4 left, Vector4 right) { left.x -= right.x; left.y -= right.y; left.z -= right.z; left.w -= right.w; return left; } /// <summary> /// Negates an instance. /// </summary> /// <param name="vec">The instance.</param> /// <returns>The result of the calculation.</returns> public static Vector4 operator -(Vector4 vec) { vec.x = -vec.x; vec.y = -vec.y; vec.z = -vec.z; vec.w = -vec.w; return vec; } /// <summary> /// Multiplies an instance by a scalar. /// </summary> /// <param name="vec">The instance.</param> /// <param name="scale">The scalar.</param> /// <returns>The result of the calculation.</returns> public static Vector4 operator *(Vector4 vec, double scale) { vec.x *= scale; vec.y *= scale; vec.z *= scale; vec.w *= scale; return vec; } /// <summary> /// Multiplies an instance by a scalar. /// </summary> /// <param name="scale">The scalar.</param> /// <param name="vec">The instance.</param> /// <returns>The result of the calculation.</returns> public static Vector4 operator *(double scale, Vector4 vec) { vec.x *= scale; vec.y *= scale; vec.z *= scale; vec.w *= scale; return vec; } /// <summary> /// Divides an instance by a scalar. /// </summary> /// <param name="vec">The instance.</param> /// <param name="scale">The scalar.</param> /// <returns>The result of the calculation.</returns> public static Vector4 operator /(Vector4 vec, double scale) { double mult = 1 / scale; vec.x *= mult; vec.y *= mult; vec.z *= mult; vec.w *= mult; return vec; } /// <summary> /// Compares two instances for equality. /// </summary> /// <param name="left">The first instance.</param> /// <param name="right">The second instance.</param> /// <returns>True, if left equals right; false otherwise.</returns> public static bool operator ==(Vector4 left, Vector4 right) { return left.Equals(right); } /// <summary> /// Compares two instances for inequality. /// </summary> /// <param name="left">The first instance.</param> /// <param name="right">The second instance.</param> /// <returns>True, if left does not equa lright; false otherwise.</returns> public static bool operator !=(Vector4 left, Vector4 right) { return !left.Equals(right); } #endregion Operators #region Overrides #region public override string ToString() /// <summary> /// Returns a System.String that represents the current Vector4d. /// </summary> /// <returns></returns> public override string ToString() { return String.Format("{0}, {1}, {2}, {3}", x, y, z, w); } /// <summary> /// Returns a System.String that represents the current Vector4d, formatting each element with format. /// </summary> /// <param name="format"></param> /// <returns></returns> public string ToString(string format = "") { return x.ToString(format) + ", " + y.ToString(format) + ", " + z.ToString(format) + ", " + w.ToString(format); } #endregion public override string ToString() #region public override int GetHashCode() /// <summary> /// Returns the hashcode for this instance. /// </summary> /// <returns>A System.Int32 containing the unique hashcode for this instance.</returns> public override int GetHashCode() { return new { x, y, z, w }.GetHashCode(); } #endregion public override int GetHashCode() #region public override bool Equals(object obj) /// <summary> /// Indicates whether this instance and a specified object are equal. /// </summary> /// <param name="obj">The object to compare to.</param> /// <returns>True if the instances are equal; false otherwise.</returns> public override bool Equals(object obj) { if (!(obj is Vector4)) return false; return this.Equals((Vector4)obj); } #endregion public override bool Equals(object obj) #endregion Overrides #endregion Public Members #region IEquatable<Vector4d> Members /// <summary>Indicates whether the current vector is equal to another vector.</summary> /// <param name="other">A vector to compare with this vector.</param> /// <returns>true if the current vector is equal to the vector parameter; otherwise, false.</returns> public bool Equals(Vector4 other) { return x == other.x && y == other.y && z == other.z && w == other.w; } #endregion IEquatable<Vector4d> Members } }
/* Copyright Microsoft Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT. See the Apache 2 License for the specific language governing permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.Linq; using System.Web.Mvc; using Microsoft.Practices.ServiceLocation; using MileageStats.Domain.Contracts; using MileageStats.Domain.Handlers; using MileageStats.Domain.Models; using MileageStats.Web.Controllers; using MileageStats.Web.Models; using MileageStats.Web.Tests.Mocks; using Moq; using Xunit; using FillupEntry = MileageStats.Domain.Models.FillupEntry; using Vehicle = MileageStats.Domain.Models.Vehicle; namespace MileageStats.Web.Tests.Controllers { public class FillupControllerFixture { private const int NoVehicleSelectedId = 0; private readonly Mock<GetUserByClaimId> userServicesMock; private readonly UserInfo defaultUserInfo; private readonly Mock<IServiceLocator> serviceLocator; const int defaultFillupId = 55; const int defaultVehicleId = 99; public FillupControllerFixture() { serviceLocator = new Mock<IServiceLocator>(); userServicesMock = new Mock<GetUserByClaimId>(null); defaultUserInfo = new UserInfo { ClaimsIdentifier = "TestClaimsIdentifier", UserId = 5 }; } [Fact] public void WhenRequestingFillup_ThenReturnsFillupView() { var controller = GetTestableFillupController(); MockFillupForDefaultVehicle(); MockHandlerFor(() => new Mock<GetVehicleById>(null, null)) .Setup(h => h.Execute(It.IsAny<int>(), It.IsAny<int>())) .Returns(new VehicleModel(null, null)); MockHandlerFor(() => new Mock<GetVehicleListForUser>(null,null)); MockEmptyFillupsForDefaultVehicle(); var result = controller.Details(defaultVehicleId, defaultFillupId); Assert.NotNull(result); } [Fact] public void WhenRequestingFillup_ThenReturnsProvidesViewModelToView() { MockFillupForDefaultVehicle(); MockHandlerFor(() => new Mock<GetVehicleListForUser>(null,null)); MockHandlerFor(() => new Mock<GetVehicleById>(null, null)) .Setup(h => h.Execute(It.IsAny<int>(), It.IsAny<int>())) .Returns(new VehicleModel(null, null)); MockEmptyFillupsForDefaultVehicle(); var controller = GetTestableFillupController(); var result = controller.Details(defaultVehicleId, defaultFillupId); var model = result.Extract<FillupViewModel>(); Assert.NotNull(model); } [Fact] public void WhenRequestingFillup_ThenReturnsProvidesFillupsInViewModel() { var fillupEntries = new[] { new FillupEntry {VehicleId = defaultVehicleId}, new FillupEntry {VehicleId = defaultVehicleId} }; MockFillupForDefaultVehicle(); MockHandlerFor(() => new Mock<GetVehicleListForUser>(null,null)); MockHandlerFor( () => new Mock<GetFillupsForVehicle>(null), x => x .Setup(h => h.Execute(defaultVehicleId)) .Returns(new List<FillupEntry>(fillupEntries))); MockHandlerFor(() => new Mock<GetVehicleById>(null, null)) .Setup(h => h.Execute(It.IsAny<int>(), It.IsAny<int>())) .Returns(new VehicleModel(null, null)); var controller = GetTestableFillupController(); var result = controller.Details(defaultVehicleId, defaultFillupId); var model = result.Extract<FillupViewModel>(); Assert.NotNull(model); } [Fact] public void WhenRequestingVehicleFillups_ReturnsViewWithViewResult() { MockVehicleListWithVehicles(defaultVehicleId); MockFillupsForDefaultVehicle(); MockHandlerFor(() => new Mock<GetVehicleById>(null, null)) .Setup(h => h.Execute(It.IsAny<int>(), It.IsAny<int>())) .Returns(new VehicleModel(null, null)); var controller = GetTestableFillupController(); var actual = controller.List(defaultVehicleId); var model = actual.Extract<List<FillupListViewModel>>(); Assert.NotNull(actual); Assert.NotNull(model); } [Fact] public void WhenRequestingVehicleFillups_ReturnsViewWithPopulatedViewModel() { MockVehicleListWithVehicles(defaultVehicleId); MockFillupsForDefaultVehicle(); MockHandlerFor(() => new Mock<GetVehicleById>(null, null)) .Setup(h => h.Execute(It.IsAny<int>(), It.IsAny<int>())) .Returns(new VehicleModel(null, null)); var controller = GetTestableFillupController(); var result = controller.List(defaultVehicleId); var model = result.Extract<List<FillupListViewModel>>(); Assert.Equal(1, model.Count()); // expect 1 group of fillups Assert.Equal(3, model.First().Fillups.Count()); // expect 3 members in that group } [Fact] public void WhenAddingFillupGet_ShowsFillupEntryView() { MockHandlerFor( () => new Mock<GetVehicleById>(null, null), x => x .Setup(h => h.Execute(defaultUserInfo.UserId, defaultVehicleId)) .Returns(new VehicleModel(new Vehicle{VehicleId = defaultVehicleId}, new VehicleStatisticsModel()))); MockFillupsForDefaultVehicle(); var controller = GetTestableFillupController(); var result = controller.Add(defaultVehicleId); Assert.IsType(typeof(ContentTypeAwareResult), result); } [Fact] public void WhenAddingFillupGet_ProvidesPrePopulatedModel() { // this test has some unnessary setup, just to reflect what is happening with data access // ultimately, the data access should be improved to remove the extra calls to the db var fillups = new List<FillupEntry> { new FillupEntry { VehicleId = defaultVehicleId, FillupEntryId = defaultFillupId, Odometer = 500 }, }; // this is where the actual odometer reading originates var statistics = CalculateStatistics.Calculate(fillups); // fillups is not required on the vehicle for this test to pass var vehicles = new List<VehicleModel> { new VehicleModel(new Vehicle {VehicleId = defaultVehicleId}, statistics ) }; MockHandlerFor( () => new Mock<GetVehicleById>(null,null), x => x .Setup(h => h.Execute(defaultUserInfo.UserId, defaultVehicleId)) .Returns(vehicles[0])); // this test will pass even if this handler returns the wrong set of fillups MockHandlerFor( () => new Mock<GetFillupsForVehicle>(null), x => x .Setup(h => h.Execute(defaultVehicleId)) .Returns(fillups)); var controller = GetTestableFillupController(); var result = controller.Add(defaultVehicleId); var model = result.Extract<FillupEntryFormModel>(); Assert.NotNull(model); Assert.Equal(500, model.Odometer); } [Fact] public void WhenAddingFillupPostExecutes_SendsToServicesTier() { var fillupEntry = new FillupEntryFormModel { VehicleId = defaultVehicleId, Date = DateTime.Now, Odometer = 50, PricePerUnit = 1.25d, TotalUnits = 10.0d }; MockHandlerFor( () => new Mock<CanAddFillup>(null, null), x => x .Setup(h => h.Execute(defaultUserInfo.UserId, defaultVehicleId, fillupEntry)) .Returns(new ValidationResult[] { })); var handler = MockHandlerFor( () => new Mock<AddFillupToVehicle>(null, null), x => x .Setup(h => h.Execute(defaultUserInfo.UserId, defaultVehicleId, fillupEntry)) .Verifiable("handler was not invoked")); MockVehicleListWithVehicles(defaultVehicleId); var controller = GetTestableFillupController(); controller.Add(defaultVehicleId, fillupEntry); handler.Verify(); } [Fact] public void WhenAddingFillupPostExecutes_RedirectsToFillupList() { var fillupEntry = new FillupEntryFormModel { VehicleId = defaultVehicleId, Date = DateTime.Now, Odometer = 50, PricePerUnit = 1.25d, TotalUnits = 10.0d }; MockHandlerFor( () => new Mock<CanAddFillup>(null, null), x => x .Setup(h => h.Execute(defaultUserInfo.UserId, defaultVehicleId, fillupEntry)) .Returns(new ValidationResult[] { })); MockHandlerFor(() => new Mock<AddFillupToVehicle>(null, null)); MockVehicleListWithVehicles(defaultVehicleId); var controller = GetTestableFillupController(); var context = controller.MockRequestForMediaType("text/html"); var proxy = (ITestableContentTypeAwareResult)controller.Add(defaultVehicleId, fillupEntry); var result = proxy.GetActionResultFor(context.Object) as RedirectToRouteResult; Assert.NotNull(result); Assert.Equal("List", result.RouteValues["action"]); Assert.Equal("Fillup", result.RouteValues["controller"]); } // returns controller with mocks private FillupController GetTestableFillupController() { var controller = new FillupController(userServicesMock.Object, serviceLocator.Object); controller.SetFakeControllerContext(); controller.SetUserIdentity(new MileageStatsIdentity("TestUser", defaultUserInfo.DisplayName, defaultUserInfo.UserId)); return controller; } Mock<T> MockHandlerFor<T>(Func<Mock<T>> create, Action<Mock<T>> setup = null) where T : class { return serviceLocator.MockHandlerFor(create, setup); } private void MockVehicleListWithVehicles(int selectedVehicle) { MockHandlerFor( () => new Mock<GetVehicleListForUser>(null,null), x => x .Setup(h => h.Execute(defaultUserInfo.UserId)) .Returns(defaultVehicleId.StandardVehicleList())); } private void MockFillupsForDefaultVehicle() { var list = new List<FillupEntry> { new FillupEntry {VehicleId = defaultVehicleId, FillupEntryId = defaultFillupId}, new FillupEntry {VehicleId = defaultVehicleId, FillupEntryId = defaultFillupId + 1}, new FillupEntry {VehicleId = defaultVehicleId, FillupEntryId = defaultFillupId + 2}, }; MockHandlerFor( () => new Mock<GetFillupsForVehicle>(null), x => x .Setup(h => h.Execute(defaultVehicleId)) .Returns(list)); } private void MockEmptyFillupsForDefaultVehicle() { MockHandlerFor( () => new Mock<GetFillupsForVehicle>(null), x => x .Setup(h => h.Execute(defaultVehicleId)) .Returns(new List<FillupEntry>())); } private void MockFillupForDefaultVehicle() { MockHandlerFor( () => new Mock<GetFillupById>(null), x => x .Setup(h => h.Execute(defaultFillupId)) .Returns(new FillupEntry { VehicleId = defaultVehicleId })); } } }
using UnityEditor.Experimental.GraphView; using UnityEngine; using UnityEngine.UIElements; using System.Collections.Generic; using Type = System.Type; using System.Linq; using UnityEngine.Profiling; using PositionType = UnityEngine.UIElements.Position; namespace UnityEditor.VFX.UI { class VFXDataAnchor : Port, IControlledElement<VFXDataAnchorController>, IEdgeConnectorListener { VisualElement m_ConnectorHighlight; VFXDataAnchorController m_Controller; Controller IControlledElement.controller { get { return m_Controller; } } public VFXDataAnchorController controller { get { return m_Controller; } set { if (m_Controller != null) { m_Controller.UnregisterHandler(this); } m_Controller = value; if (m_Controller != null) { m_Controller.RegisterHandler(this); } } } VFXNodeUI m_Node; public new VFXNodeUI node { get {return m_Node; } } protected VFXDataAnchor(Orientation anchorOrientation, Direction anchorDirection, Type type, VFXNodeUI node) : base(anchorOrientation, anchorDirection, Capacity.Multi, type) { Profiler.BeginSample("VFXDataAnchor.VFXDataAnchor"); this.AddStyleSheetPath("VFXDataAnchor"); AddToClassList("VFXDataAnchor"); this.AddStyleSheetPath("VFXTypeColor"); m_ConnectorHighlight = new VisualElement(); m_ConnectorHighlight.style.position = PositionType.Absolute; m_ConnectorHighlight.style.top = 0f; m_ConnectorHighlight.style.left = 0f; m_ConnectorHighlight.style.bottom = 0f; m_ConnectorHighlight.style.right = 0f; m_ConnectorHighlight.pickingMode = PickingMode.Ignore; VisualElement connector = m_ConnectorBox as VisualElement; connector.Add(m_ConnectorHighlight); m_Node = node; RegisterCallback<MouseEnterEvent>(OnMouseEnter); RegisterCallback<MouseLeaveEvent>(OnMouseLeave); this.AddManipulator(new ContextualMenuManipulator(BuildContextualMenu)); Profiler.EndSample(); } public void BuildContextualMenu(ContextualMenuPopulateEvent evt) { var op = controller.sourceNode.model as VFXOperatorNumericCascadedUnified; if (op != null) evt.menu.AppendAction("Remove Slot", OnRemove, e => op.operandCount > 2 ? DropdownMenuAction.Status.Normal : DropdownMenuAction.Status.Disabled); } void OnRemove(DropdownMenuAction e) { var op = controller.sourceNode as VFXCascadedOperatorController; op.RemoveOperand(controller); } public static VFXDataAnchor Create(VFXDataAnchorController controller, VFXNodeUI node) { var anchor = new VFXDataAnchor(controller.orientation, controller.direction, controller.portType, node); anchor.m_EdgeConnector = new EdgeConnector<VFXDataEdge>(anchor); anchor.controller = controller; anchor.AddManipulator(anchor.m_EdgeConnector); return anchor; } bool m_EdgeDragging; public override void OnStartEdgeDragging() { m_EdgeDragging = true; highlight = false; } public override void OnStopEdgeDragging() { m_EdgeDragging = false; highlight = true; } void OnMouseEnter(MouseEnterEvent e) { if (m_EdgeDragging && !highlight) e.PreventDefault(); } void OnMouseLeave(MouseLeaveEvent e) { if (m_EdgeDragging && !highlight) e.PreventDefault(); } public override bool collapsed { get { return !controller.expandedInHierachy; } } IEnumerable<VFXDataEdge> GetAllEdges() { VFXView view = GetFirstAncestorOfType<VFXView>(); foreach (var edgeController in controller.connections) { VFXDataEdge edge = view.GetDataEdgeByController(edgeController as VFXDataEdgeController); if (edge != null) yield return edge; } } void IControlledElement.OnControllerChanged(ref ControllerChangedEvent e) { if (e.controller == controller) { SelfChange(e.change); } } public virtual void SelfChange(int change) { if (change != VFXDataAnchorController.Change.hidden) { if (controller.connected) AddToClassList("connected"); else RemoveFromClassList("connected"); portType = controller.portType; string className = VFXTypeDefinition.GetTypeCSSClass(controller.portType); // update the css type of the class foreach (var cls in VFXTypeDefinition.GetTypeCSSClasses()) { if (cls != className) { m_ConnectorBox.RemoveFromClassList(cls); RemoveFromClassList(cls); } } AddToClassList(className); m_ConnectorBox.AddToClassList(className); AddToClassList("EdgeConnector"); switch (controller.direction) { case Direction.Input: AddToClassList("Input"); break; case Direction.Output: AddToClassList("Output"); break; } portName = ""; } if (controller.expandedInHierachy) { RemoveFromClassList("hidden"); } else { AddToClassList("hidden"); } if (controller.direction == Direction.Output) m_ConnectorText.text = controller.name; else m_ConnectorText.text = ""; } void IEdgeConnectorListener.OnDrop(GraphView graphView, Edge edge) { VFXView view = graphView as VFXView; VFXDataEdge dataEdge = edge as VFXDataEdge; VFXDataEdgeController edgeController = new VFXDataEdgeController(dataEdge.input.controller, dataEdge.output.controller); view.controller.AddElement(edgeController); } public override void Disconnect(Edge edge) { base.Disconnect(edge); UpdateCapColor(); } void IEdgeConnectorListener.OnDropOutsidePort(Edge edge, Vector2 position) { VFXSlot startSlot = controller.model; VFXView view = this.GetFirstAncestorOfType<VFXView>(); VFXViewController viewController = view.controller; VFXNodeUI endNode = null; foreach (var node in view.GetAllNodes()) { if (node.worldBound.Contains(position)) { endNode = node; } } VFXDataEdge dataEdge = edge as VFXDataEdge; bool exists = false; if (dataEdge.controller != null) { exists = true; view.controller.RemoveElement(dataEdge.controller); } if (endNode != null) { VFXNodeController nodeController = endNode.controller; if (nodeController != null) { IVFXSlotContainer slotContainer = nodeController.slotContainer; if (controller.direction == Direction.Input) { foreach (var output in nodeController.outputPorts.Where(t => t.model == null || t.model.IsMasterSlot())) { if (viewController.CreateLink(controller, output)) break; } } else { foreach (var input in nodeController.inputPorts.Where(t => t.model == null || t.model.IsMasterSlot())) { if (viewController.CreateLink(input, controller)) break; } } } } else if (controller.direction == Direction.Input && Event.current.modifiers == EventModifiers.Alt) { VFXModelDescriptorParameters parameterDesc = VFXLibrary.GetParameters().FirstOrDefault(t => t.name == controller.portType.UserFriendlyName()); if (parameterDesc != null) { VFXParameter parameter = viewController.AddVFXParameter(view.contentViewContainer.GlobalToBound(position) - new Vector2(360, 0), parameterDesc); startSlot.Link(parameter.outputSlots[0]); CopyValueToParameter(parameter); } } else if (!exists) { VFXFilterWindow.Show(VFXViewWindow.currentWindow, Event.current.mousePosition, view.ViewToScreenPosition(Event.current.mousePosition), new VFXNodeProvider(viewController, AddLinkedNode, ProviderFilter, new Type[] { typeof(VFXOperator), typeof(VFXParameter), typeof(VFXContext) })); } } bool ProviderFilter(VFXNodeProvider.Descriptor d) { var mySlot = controller.model; var parameterDescriptor = d.modelDescriptor as VFXParameterController; IVFXSlotContainer container = null; if (parameterDescriptor != null) { container = parameterDescriptor.model; } else { VFXModelDescriptor desc = d.modelDescriptor as VFXModelDescriptor; if (desc == null) return false; container = desc.model as IVFXSlotContainer; if (container == null) return false; if ( direction == Direction.Output && mySlot != null && container is VFXOperatorDynamicOperand && (container as VFXOperatorDynamicOperand).validTypes.Contains(mySlot.property.type)) return true; } IEnumerable<Type> validTypes = null; if (mySlot == null) { var op = controller.sourceNode.model as VFXOperatorDynamicOperand; if (op != null) validTypes = op.validTypes; } var getSlots = direction == Direction.Input ? (System.Func<int, VFXSlot>)container.GetOutputSlot : (System.Func<int, VFXSlot>)container.GetInputSlot; int count = direction == Direction.Input ? container.GetNbOutputSlots() : container.GetNbInputSlots(); for (int i = 0; i < count; ++i) { var slot = getSlots(i); if (mySlot != null && slot.CanLink(mySlot)) return true; else if (validTypes != null && validTypes.Contains(slot.property.type)) return true; } return false; } void AddLinkedNode(VFXNodeProvider.Descriptor d, Vector2 mPos) { var mySlot = controller.model; VFXView view = GetFirstAncestorOfType<VFXView>(); VFXViewController viewController = controller.viewController; if (view == null) return; var newNodeController = view.AddNode(d, mPos); if (newNodeController == null) return; IEnumerable<Type> validTypes = null; var op = controller.sourceNode.model as VFXOperatorNumericCascadedUnified; if (mySlot == null && op != null) { validTypes = op.validTypes; } // If linking to a new parameter, copy the slot value and space if (direction == Direction.Input && controller.model != null) //model will be null for upcomming which won't have a value { if (newNodeController is VFXOperatorController) { var inlineOperator = (newNodeController as VFXOperatorController).model as VFXInlineOperator; if (inlineOperator != null) { var value = controller.model.value; object convertedValue = null; if (VFXConverter.TryConvertTo(value, inlineOperator.type, out convertedValue)) { inlineOperator.inputSlots[0].value = convertedValue; } if (inlineOperator.inputSlots[0].spaceable && controller.model.spaceable) { inlineOperator.inputSlots[0].space = controller.model.space; } } } } var ports = direction == Direction.Input ? newNodeController.outputPorts : newNodeController.inputPorts; int count = ports.Count(); for (int i = 0; i < count; ++i) { var port = ports[i]; if (mySlot != null) { if (viewController.CreateLink(direction == Direction.Input ? controller : port, direction == Direction.Input ? port : controller)) { break; } } else if (validTypes != null) { if (validTypes.Contains(port.model.property.type)) { if (viewController.CreateLink(controller, port)) { break; } } } } } void CopyValueToParameter(VFXParameter parameter) { var value = controller.model.value; object convertedValue = null; if (VFXConverter.TryConvertTo(value, parameter.type, out convertedValue)) { parameter.value = convertedValue; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Data; using System.Data.Common; using System.Data.SqlClient; using System.Data.SqlTypes; using System.Diagnostics; using System.Globalization; namespace Microsoft.SqlServer.Server { // Utilities for manipulating smi-related metadata. // // Since this class is built on top of SMI, SMI should not have a dependency on this class // // These are all based off of knowing the clr type of the value // as an ExtendedClrTypeCode enum for rapid access. internal class MetaDataUtilsSmi { internal const SqlDbType InvalidSqlDbType = (SqlDbType)(-1); internal const long InvalidMaxLength = -2; // Standard type inference map to get SqlDbType when all you know is the value's type (typecode) // This map's index is off by one (add one to typecode locate correct entry) in order // to support ExtendedSqlDbType.Invalid // This array is meant to be accessed from InferSqlDbTypeFromTypeCode. private static readonly SqlDbType[] s_extendedTypeCodeToSqlDbTypeMap = { InvalidSqlDbType, // Invalid extended type code SqlDbType.Bit, // System.Boolean SqlDbType.TinyInt, // System.Byte SqlDbType.NVarChar, // System.Char SqlDbType.DateTime, // System.DateTime InvalidSqlDbType, // System.DBNull doesn't have an inferable SqlDbType SqlDbType.Decimal, // System.Decimal SqlDbType.Float, // System.Double InvalidSqlDbType, // null reference doesn't have an inferable SqlDbType SqlDbType.SmallInt, // System.Int16 SqlDbType.Int, // System.Int32 SqlDbType.BigInt, // System.Int64 InvalidSqlDbType, // System.SByte doesn't have an inferable SqlDbType SqlDbType.Real, // System.Single SqlDbType.NVarChar, // System.String InvalidSqlDbType, // System.UInt16 doesn't have an inferable SqlDbType InvalidSqlDbType, // System.UInt32 doesn't have an inferable SqlDbType InvalidSqlDbType, // System.UInt64 doesn't have an inferable SqlDbType InvalidSqlDbType, // System.Object doesn't have an inferable SqlDbType SqlDbType.VarBinary, // System.ByteArray SqlDbType.NVarChar, // System.CharArray SqlDbType.UniqueIdentifier, // System.Guid SqlDbType.VarBinary, // System.Data.SqlTypes.SqlBinary SqlDbType.Bit, // System.Data.SqlTypes.SqlBoolean SqlDbType.TinyInt, // System.Data.SqlTypes.SqlByte SqlDbType.DateTime, // System.Data.SqlTypes.SqlDateTime SqlDbType.Float, // System.Data.SqlTypes.SqlDouble SqlDbType.UniqueIdentifier, // System.Data.SqlTypes.SqlGuid SqlDbType.SmallInt, // System.Data.SqlTypes.SqlInt16 SqlDbType.Int, // System.Data.SqlTypes.SqlInt32 SqlDbType.BigInt, // System.Data.SqlTypes.SqlInt64 SqlDbType.Money, // System.Data.SqlTypes.SqlMoney SqlDbType.Decimal, // System.Data.SqlTypes.SqlDecimal SqlDbType.Real, // System.Data.SqlTypes.SqlSingle SqlDbType.NVarChar, // System.Data.SqlTypes.SqlString SqlDbType.NVarChar, // System.Data.SqlTypes.SqlChars SqlDbType.VarBinary, // System.Data.SqlTypes.SqlBytes SqlDbType.Xml, // System.Data.SqlTypes.SqlXml SqlDbType.Structured, // System.Data.DataTable SqlDbType.Structured, // System.Collections.IEnumerable, used for TVPs it must return IDataRecord SqlDbType.Structured, // System.Collections.Generic.IEnumerable<Microsoft.SqlServer.Server.SqlDataRecord> SqlDbType.Time, // System.TimeSpan SqlDbType.DateTimeOffset, // System.DateTimeOffset }; // Dictionary to map from clr type object to ExtendedClrTypeCodeMap enum. // This dictionary should only be accessed from DetermineExtendedTypeCode and class ctor for setup. private static readonly Dictionary<Type, ExtendedClrTypeCode> s_typeToExtendedTypeCodeMap = CreateTypeToExtendedTypeCodeMap(); private static Dictionary<Type, ExtendedClrTypeCode> CreateTypeToExtendedTypeCodeMap() { int Count = 42; // Keep this initialization list in the same order as ExtendedClrTypeCode for ease in validating! var dictionary = new Dictionary<Type, ExtendedClrTypeCode>(Count) { { typeof(bool), ExtendedClrTypeCode.Boolean }, { typeof(byte), ExtendedClrTypeCode.Byte }, { typeof(char), ExtendedClrTypeCode.Char }, { typeof(DateTime), ExtendedClrTypeCode.DateTime }, { typeof(DBNull), ExtendedClrTypeCode.DBNull }, { typeof(decimal), ExtendedClrTypeCode.Decimal }, { typeof(double), ExtendedClrTypeCode.Double }, // lookup code will handle special-case null-ref, omitting the addition of ExtendedTypeCode.Empty { typeof(short), ExtendedClrTypeCode.Int16 }, { typeof(int), ExtendedClrTypeCode.Int32 }, { typeof(long), ExtendedClrTypeCode.Int64 }, { typeof(sbyte), ExtendedClrTypeCode.SByte }, { typeof(float), ExtendedClrTypeCode.Single }, { typeof(string), ExtendedClrTypeCode.String }, { typeof(ushort), ExtendedClrTypeCode.UInt16 }, { typeof(uint), ExtendedClrTypeCode.UInt32 }, { typeof(ulong), ExtendedClrTypeCode.UInt64 }, { typeof(object), ExtendedClrTypeCode.Object }, { typeof(byte[]), ExtendedClrTypeCode.ByteArray }, { typeof(char[]), ExtendedClrTypeCode.CharArray }, { typeof(Guid), ExtendedClrTypeCode.Guid }, { typeof(SqlBinary), ExtendedClrTypeCode.SqlBinary }, { typeof(SqlBoolean), ExtendedClrTypeCode.SqlBoolean }, { typeof(SqlByte), ExtendedClrTypeCode.SqlByte }, { typeof(SqlDateTime), ExtendedClrTypeCode.SqlDateTime }, { typeof(SqlDouble), ExtendedClrTypeCode.SqlDouble }, { typeof(SqlGuid), ExtendedClrTypeCode.SqlGuid }, { typeof(SqlInt16), ExtendedClrTypeCode.SqlInt16 }, { typeof(SqlInt32), ExtendedClrTypeCode.SqlInt32 }, { typeof(SqlInt64), ExtendedClrTypeCode.SqlInt64 }, { typeof(SqlMoney), ExtendedClrTypeCode.SqlMoney }, { typeof(SqlDecimal), ExtendedClrTypeCode.SqlDecimal }, { typeof(SqlSingle), ExtendedClrTypeCode.SqlSingle }, { typeof(SqlString), ExtendedClrTypeCode.SqlString }, { typeof(SqlChars), ExtendedClrTypeCode.SqlChars }, { typeof(SqlBytes), ExtendedClrTypeCode.SqlBytes }, { typeof(SqlXml), ExtendedClrTypeCode.SqlXml }, { typeof(DataTable), ExtendedClrTypeCode.DataTable }, { typeof(DbDataReader), ExtendedClrTypeCode.DbDataReader }, { typeof(IEnumerable<SqlDataRecord>), ExtendedClrTypeCode.IEnumerableOfSqlDataRecord }, { typeof(TimeSpan), ExtendedClrTypeCode.TimeSpan }, { typeof(DateTimeOffset), ExtendedClrTypeCode.DateTimeOffset }, }; return dictionary; } internal static bool IsCharOrXmlType(SqlDbType type) { return IsUnicodeType(type) || IsAnsiType(type) || type == SqlDbType.Xml; } internal static bool IsUnicodeType(SqlDbType type) { return type == SqlDbType.NChar || type == SqlDbType.NVarChar || type == SqlDbType.NText; } internal static bool IsAnsiType(SqlDbType type) { return type == SqlDbType.Char || type == SqlDbType.VarChar || type == SqlDbType.Text; } internal static bool IsBinaryType(SqlDbType type) { return type == SqlDbType.Binary || type == SqlDbType.VarBinary || type == SqlDbType.Image; } // Does this type use PLP format values? internal static bool IsPlpFormat(SmiMetaData metaData) { return metaData.MaxLength == SmiMetaData.UnlimitedMaxLengthIndicator || metaData.SqlDbType == SqlDbType.Image || metaData.SqlDbType == SqlDbType.NText || metaData.SqlDbType == SqlDbType.Text || metaData.SqlDbType == SqlDbType.Udt; } // If we know we're only going to use this object to assign to a specific SqlDbType back end object, // we can save some processing time by only checking for the few valid types that can be assigned to the dbType. // This assumes a switch statement over SqlDbType is faster than getting the ClrTypeCode and iterating over a // series of if statements, or using a hash table. // NOTE: the form of these checks is taking advantage of a feature of the JIT compiler that is supposed to // optimize checks of the form '(xxx.GetType() == typeof( YYY ))'. The JIT team claimed at one point that // this doesn't even instantiate a Type instance, thus was the fastest method for individual comparisons. // Given that there's a known SqlDbType, thus a minimal number of comparisons, it's likely this is faster // than the other approaches considered (both GetType().GetTypeCode() switch and hash table using Type keys // must instantiate a Type object. The typecode switch also degenerates into a large if-then-else for // all but the primitive clr types. internal static ExtendedClrTypeCode DetermineExtendedTypeCodeForUseWithSqlDbType( SqlDbType dbType, bool isMultiValued, object value ) { ExtendedClrTypeCode extendedCode = ExtendedClrTypeCode.Invalid; // fast-track null, which is valid for all types if (null == value) { extendedCode = ExtendedClrTypeCode.Empty; } else if (DBNull.Value == value) { extendedCode = ExtendedClrTypeCode.DBNull; } else { switch (dbType) { case SqlDbType.BigInt: if (value.GetType() == typeof(Int64)) extendedCode = ExtendedClrTypeCode.Int64; else if (value.GetType() == typeof(SqlInt64)) extendedCode = ExtendedClrTypeCode.SqlInt64; break; case SqlDbType.Binary: case SqlDbType.VarBinary: case SqlDbType.Image: case SqlDbType.Timestamp: if (value.GetType() == typeof(byte[])) extendedCode = ExtendedClrTypeCode.ByteArray; else if (value.GetType() == typeof(SqlBinary)) extendedCode = ExtendedClrTypeCode.SqlBinary; else if (value.GetType() == typeof(SqlBytes)) extendedCode = ExtendedClrTypeCode.SqlBytes; else if (value.GetType() == typeof(StreamDataFeed)) extendedCode = ExtendedClrTypeCode.Stream; break; case SqlDbType.Bit: if (value.GetType() == typeof(bool)) extendedCode = ExtendedClrTypeCode.Boolean; else if (value.GetType() == typeof(SqlBoolean)) extendedCode = ExtendedClrTypeCode.SqlBoolean; break; case SqlDbType.Char: case SqlDbType.NChar: case SqlDbType.NText: case SqlDbType.NVarChar: case SqlDbType.Text: case SqlDbType.VarChar: if (value.GetType() == typeof(string)) extendedCode = ExtendedClrTypeCode.String; if (value.GetType() == typeof(TextDataFeed)) extendedCode = ExtendedClrTypeCode.TextReader; else if (value.GetType() == typeof(SqlString)) extendedCode = ExtendedClrTypeCode.SqlString; else if (value.GetType() == typeof(char[])) extendedCode = ExtendedClrTypeCode.CharArray; else if (value.GetType() == typeof(SqlChars)) extendedCode = ExtendedClrTypeCode.SqlChars; else if (value.GetType() == typeof(char)) extendedCode = ExtendedClrTypeCode.Char; break; case SqlDbType.Date: case SqlDbType.DateTime2: case SqlDbType.DateTime: case SqlDbType.SmallDateTime: if (value.GetType() == typeof(DateTime)) extendedCode = ExtendedClrTypeCode.DateTime; else if (value.GetType() == typeof(SqlDateTime)) extendedCode = ExtendedClrTypeCode.SqlDateTime; break; case SqlDbType.Decimal: if (value.GetType() == typeof(Decimal)) extendedCode = ExtendedClrTypeCode.Decimal; else if (value.GetType() == typeof(SqlDecimal)) extendedCode = ExtendedClrTypeCode.SqlDecimal; break; case SqlDbType.Real: if (value.GetType() == typeof(Single)) extendedCode = ExtendedClrTypeCode.Single; else if (value.GetType() == typeof(SqlSingle)) extendedCode = ExtendedClrTypeCode.SqlSingle; break; case SqlDbType.Int: if (value.GetType() == typeof(Int32)) extendedCode = ExtendedClrTypeCode.Int32; else if (value.GetType() == typeof(SqlInt32)) extendedCode = ExtendedClrTypeCode.SqlInt32; break; case SqlDbType.Money: case SqlDbType.SmallMoney: if (value.GetType() == typeof(SqlMoney)) extendedCode = ExtendedClrTypeCode.SqlMoney; else if (value.GetType() == typeof(Decimal)) extendedCode = ExtendedClrTypeCode.Decimal; break; case SqlDbType.Float: if (value.GetType() == typeof(SqlDouble)) extendedCode = ExtendedClrTypeCode.SqlDouble; else if (value.GetType() == typeof(Double)) extendedCode = ExtendedClrTypeCode.Double; break; case SqlDbType.UniqueIdentifier: if (value.GetType() == typeof(SqlGuid)) extendedCode = ExtendedClrTypeCode.SqlGuid; else if (value.GetType() == typeof(Guid)) extendedCode = ExtendedClrTypeCode.Guid; break; case SqlDbType.SmallInt: if (value.GetType() == typeof(Int16)) extendedCode = ExtendedClrTypeCode.Int16; else if (value.GetType() == typeof(SqlInt16)) extendedCode = ExtendedClrTypeCode.SqlInt16; break; case SqlDbType.TinyInt: if (value.GetType() == typeof(Byte)) extendedCode = ExtendedClrTypeCode.Byte; else if (value.GetType() == typeof(SqlByte)) extendedCode = ExtendedClrTypeCode.SqlByte; break; case SqlDbType.Variant: // SqlDbType doesn't help us here, call general-purpose function extendedCode = DetermineExtendedTypeCode(value); // Some types aren't allowed for Variants but are for the general-purpose function. // Match behavior of other types and return invalid in these cases. if (ExtendedClrTypeCode.SqlXml == extendedCode) { extendedCode = ExtendedClrTypeCode.Invalid; } break; case SqlDbType.Udt: throw ADP.DbTypeNotSupported(SqlDbType.Udt.ToString()); case SqlDbType.Time: if (value.GetType() == typeof(TimeSpan)) extendedCode = ExtendedClrTypeCode.TimeSpan; break; case SqlDbType.DateTimeOffset: if (value.GetType() == typeof(DateTimeOffset)) extendedCode = ExtendedClrTypeCode.DateTimeOffset; break; case SqlDbType.Xml: if (value.GetType() == typeof(SqlXml)) extendedCode = ExtendedClrTypeCode.SqlXml; if (value.GetType() == typeof(XmlDataFeed)) extendedCode = ExtendedClrTypeCode.XmlReader; else if (value.GetType() == typeof(System.String)) extendedCode = ExtendedClrTypeCode.String; break; case SqlDbType.Structured: if (isMultiValued) { if (value is DataTable) { extendedCode = ExtendedClrTypeCode.DataTable; } else if (value is IEnumerable<SqlDataRecord>) { extendedCode = ExtendedClrTypeCode.IEnumerableOfSqlDataRecord; } else if (value is DbDataReader) { extendedCode = ExtendedClrTypeCode.DbDataReader; } } break; default: // Leave as invalid break; } } return extendedCode; } // Method to map from Type to ExtendedTypeCode internal static ExtendedClrTypeCode DetermineExtendedTypeCodeFromType(Type clrType) { ExtendedClrTypeCode resultCode; return s_typeToExtendedTypeCodeMap.TryGetValue(clrType, out resultCode) ? resultCode : ExtendedClrTypeCode.Invalid; } // Returns the ExtendedClrTypeCode that describes the given value internal static ExtendedClrTypeCode DetermineExtendedTypeCode(object value) { return value != null ? DetermineExtendedTypeCodeFromType(value.GetType()) : ExtendedClrTypeCode.Empty; } // returns a sqldbtype for the given type code internal static SqlDbType InferSqlDbTypeFromTypeCode(ExtendedClrTypeCode typeCode) { Debug.Assert(typeCode >= ExtendedClrTypeCode.Invalid && typeCode <= ExtendedClrTypeCode.Last, "Someone added a typecode without adding support here!"); return s_extendedTypeCodeToSqlDbTypeMap[(int)typeCode + 1]; } // Infer SqlDbType from Type in the general case. Katmai-only (or later) features that need to // infer types should use InferSqlDbTypeFromType_Katmai. internal static SqlDbType InferSqlDbTypeFromType(Type type) { ExtendedClrTypeCode typeCode = DetermineExtendedTypeCodeFromType(type); SqlDbType returnType; if (ExtendedClrTypeCode.Invalid == typeCode) { returnType = InvalidSqlDbType; // Return invalid type so caller can generate specific error } else { returnType = InferSqlDbTypeFromTypeCode(typeCode); } return returnType; } // Inference rules changed for Katmai-or-later-only cases. Only features that are guaranteed to be // running against Katmai and don't have backward compat issues should call this code path. // example: TVP's are a new Katmai feature (no back compat issues) so can infer DATETIME2 // when mapping System.DateTime from DateTable or DbDataReader. DATETIME2 is better because // of greater range that can handle all DateTime values. internal static SqlDbType InferSqlDbTypeFromType_Katmai(Type type) { SqlDbType returnType = InferSqlDbTypeFromType(type); if (SqlDbType.DateTime == returnType) { returnType = SqlDbType.DateTime2; } return returnType; } internal static SqlMetaData SmiExtendedMetaDataToSqlMetaData(SmiExtendedMetaData source) { if (SqlDbType.Xml == source.SqlDbType) { return new SqlMetaData(source.Name, source.SqlDbType, source.MaxLength, source.Precision, source.Scale, source.LocaleId, source.CompareOptions, source.TypeSpecificNamePart1, source.TypeSpecificNamePart2, source.TypeSpecificNamePart3, true ); } return new SqlMetaData(source.Name, source.SqlDbType, source.MaxLength, source.Precision, source.Scale, source.LocaleId, source.CompareOptions, null); } // Convert SqlMetaData instance to an SmiExtendedMetaData instance. internal static SmiExtendedMetaData SqlMetaDataToSmiExtendedMetaData(SqlMetaData source) { // now map everything across to the extended metadata object string typeSpecificNamePart1 = null; string typeSpecificNamePart2 = null; string typeSpecificNamePart3 = null; if (SqlDbType.Xml == source.SqlDbType) { typeSpecificNamePart1 = source.XmlSchemaCollectionDatabase; typeSpecificNamePart2 = source.XmlSchemaCollectionOwningSchema; typeSpecificNamePart3 = source.XmlSchemaCollectionName; } else if (SqlDbType.Udt == source.SqlDbType) { throw ADP.DbTypeNotSupported(SqlDbType.Udt.ToString()); } return new SmiExtendedMetaData(source.SqlDbType, source.MaxLength, source.Precision, source.Scale, source.LocaleId, source.CompareOptions, source.Name, typeSpecificNamePart1, typeSpecificNamePart2, typeSpecificNamePart3); } // compare SmiMetaData to SqlMetaData and determine if they are compatible. internal static bool IsCompatible(SmiMetaData firstMd, SqlMetaData secondMd) { return firstMd.SqlDbType == secondMd.SqlDbType && firstMd.MaxLength == secondMd.MaxLength && firstMd.Precision == secondMd.Precision && firstMd.Scale == secondMd.Scale && firstMd.CompareOptions == secondMd.CompareOptions && firstMd.LocaleId == secondMd.LocaleId && firstMd.SqlDbType != SqlDbType.Structured && // SqlMetaData doesn't support Structured types !firstMd.IsMultiValued; // SqlMetaData doesn't have a "multivalued" option } // This is a modified version of SmiMetaDataFromSchemaTableRow above // Since CoreCLR doesn't have GetSchema, we need to infer the MetaData from the CLR Type alone internal static SmiExtendedMetaData SmiMetaDataFromType(string colName, Type colType) { // Determine correct SqlDbType. SqlDbType colDbType = InferSqlDbTypeFromType_Katmai(colType); if (InvalidSqlDbType == colDbType) { // Unknown through standard mapping, use VarBinary for columns that are Object typed, otherwise we error out. if (typeof(object) == colType) { colDbType = SqlDbType.VarBinary; } else { throw SQL.UnsupportedColumnTypeForSqlProvider(colName, colType.ToString()); } } // Determine metadata modifier values per type (maxlength, precision, scale, etc) long maxLength = 0; byte precision = 0; byte scale = 0; switch (colDbType) { case SqlDbType.BigInt: case SqlDbType.Bit: case SqlDbType.DateTime: case SqlDbType.Float: case SqlDbType.Image: case SqlDbType.Int: case SqlDbType.Money: case SqlDbType.NText: case SqlDbType.Real: case SqlDbType.UniqueIdentifier: case SqlDbType.SmallDateTime: case SqlDbType.SmallInt: case SqlDbType.SmallMoney: case SqlDbType.Text: case SqlDbType.Timestamp: case SqlDbType.TinyInt: case SqlDbType.Variant: case SqlDbType.Xml: case SqlDbType.Date: // These types require no metadata modifiers break; case SqlDbType.Binary: case SqlDbType.VarBinary: // source isn't specifying a size, so assume the Maximum if (SqlDbType.Binary == colDbType) { maxLength = SmiMetaData.MaxBinaryLength; } else { maxLength = SmiMetaData.UnlimitedMaxLengthIndicator; } break; case SqlDbType.Char: case SqlDbType.VarChar: // source isn't specifying a size, so assume the Maximum if (SqlDbType.Char == colDbType) { maxLength = SmiMetaData.MaxANSICharacters; } else { maxLength = SmiMetaData.UnlimitedMaxLengthIndicator; } break; case SqlDbType.NChar: case SqlDbType.NVarChar: // source isn't specifying a size, so assume the Maximum if (SqlDbType.NChar == colDbType) { maxLength = SmiMetaData.MaxUnicodeCharacters; } else { maxLength = SmiMetaData.UnlimitedMaxLengthIndicator; } break; case SqlDbType.Decimal: // Decimal requires precision and scale precision = SmiMetaData.DefaultDecimal.Precision; scale = SmiMetaData.DefaultDecimal.Scale; break; case SqlDbType.Time: case SqlDbType.DateTime2: case SqlDbType.DateTimeOffset: // requires scale scale = SmiMetaData.DefaultTime.Scale; break; case SqlDbType.Udt: case SqlDbType.Structured: default: // These types are not supported from SchemaTable throw SQL.UnsupportedColumnTypeForSqlProvider(colName, colType.ToString()); } return new SmiExtendedMetaData( colDbType, maxLength, precision, scale, CultureInfo.CurrentCulture.LCID, SmiMetaData.GetDefaultForType(colDbType).CompareOptions, false, // no support for multi-valued columns in a TVP yet null, // no support for structured columns yet null, colName, null, null, null); } // Extract metadata for a single DataColumn internal static SmiExtendedMetaData SmiMetaDataFromDataColumn(DataColumn column, DataTable parent) { SqlDbType dbType = InferSqlDbTypeFromType_Katmai(column.DataType); if (InvalidSqlDbType == dbType) { throw SQL.UnsupportedColumnTypeForSqlProvider(column.ColumnName, column.DataType.Name); } long maxLength = AdjustMaxLength(dbType, column.MaxLength); if (InvalidMaxLength == maxLength) { throw SQL.InvalidColumnMaxLength(column.ColumnName, maxLength); } byte precision; byte scale; if (column.DataType == typeof(SqlDecimal)) { // Must scan all values in column to determine best-fit precision & scale Debug.Assert(null != parent); scale = 0; byte nonFractionalPrecision = 0; // finds largest non-Fractional portion of precision foreach (DataRow row in parent.Rows) { object obj = row[column]; if (!(obj is DBNull)) { SqlDecimal value = (SqlDecimal)obj; if (!value.IsNull) { byte tempNonFractPrec = checked((byte)(value.Precision - value.Scale)); if (tempNonFractPrec > nonFractionalPrecision) { nonFractionalPrecision = tempNonFractPrec; } if (value.Scale > scale) { scale = value.Scale; } } } } precision = checked((byte)(nonFractionalPrecision + scale)); if (SqlDecimal.MaxPrecision < precision) { throw SQL.InvalidTableDerivedPrecisionForTvp(column.ColumnName, precision); } else if (0 == precision) { precision = 1; } } else if (dbType == SqlDbType.DateTime2 || dbType == SqlDbType.DateTimeOffset || dbType == SqlDbType.Time) { // Time types care about scale, too. But have to infer maximums for these. precision = 0; scale = SmiMetaData.DefaultTime.Scale; } else if (dbType == SqlDbType.Decimal) { // Must scan all values in column to determine best-fit precision & scale Debug.Assert(null != parent); scale = 0; byte nonFractionalPrecision = 0; // finds largest non-Fractional portion of precision foreach (DataRow row in parent.Rows) { object obj = row[column]; if (!(obj is DBNull)) { SqlDecimal value = (SqlDecimal)(Decimal)obj; byte tempNonFractPrec = checked((byte)(value.Precision - value.Scale)); if (tempNonFractPrec > nonFractionalPrecision) { nonFractionalPrecision = tempNonFractPrec; } if (value.Scale > scale) { scale = value.Scale; } } } precision = checked((byte)(nonFractionalPrecision + scale)); if (SqlDecimal.MaxPrecision < precision) { throw SQL.InvalidTableDerivedPrecisionForTvp(column.ColumnName, precision); } else if (0 == precision) { precision = 1; } } else { precision = 0; scale = 0; } // In Net Core, since DataColumn.Locale is not accessible because it is internal and in a separate assembly, // we try to get the Locale from the parent CultureInfo columnLocale = ((null != parent) ? parent.Locale : CultureInfo.CurrentCulture); return new SmiExtendedMetaData( dbType, maxLength, precision, scale, columnLocale.LCID, SmiMetaData.DefaultNVarChar.CompareOptions, false, // no support for multi-valued columns in a TVP yet null, // no support for structured columns yet null, // no support for structured columns yet column.ColumnName, null, null, null); } internal static long AdjustMaxLength(SqlDbType dbType, long maxLength) { if (SmiMetaData.UnlimitedMaxLengthIndicator != maxLength) { if (maxLength < 0) { maxLength = InvalidMaxLength; } switch (dbType) { case SqlDbType.Binary: if (maxLength > SmiMetaData.MaxBinaryLength) { maxLength = InvalidMaxLength; } break; case SqlDbType.Char: if (maxLength > SmiMetaData.MaxANSICharacters) { maxLength = InvalidMaxLength; } break; case SqlDbType.NChar: if (maxLength > SmiMetaData.MaxUnicodeCharacters) { maxLength = InvalidMaxLength; } break; case SqlDbType.NVarChar: // Promote to MAX type if it won't fit in a normal type if (SmiMetaData.MaxUnicodeCharacters < maxLength) { maxLength = SmiMetaData.UnlimitedMaxLengthIndicator; } break; case SqlDbType.VarBinary: // Promote to MAX type if it won't fit in a normal type if (SmiMetaData.MaxBinaryLength < maxLength) { maxLength = SmiMetaData.UnlimitedMaxLengthIndicator; } break; case SqlDbType.VarChar: // Promote to MAX type if it won't fit in a normal type if (SmiMetaData.MaxANSICharacters < maxLength) { maxLength = SmiMetaData.UnlimitedMaxLengthIndicator; } break; default: break; } } return maxLength; } } }
using System; using System.Text; using System.Collections.Generic; using System.Linq; using Antlr4.Runtime.Misc; using IFC4.Generators; namespace Express { public class ExpressListener : ExpressBaseListener { private ILanguageGenerator generator; private Dictionary<string,TypeData> typeData = new Dictionary<string,TypeData>(); private Dictionary<string,FunctionData> funcData = new Dictionary<string, FunctionData>(); public Dictionary<string,TypeData> TypeData{ get{return typeData;} } public Dictionary<string,FunctionData> FunctionData{ get{return funcData;} } public ExpressListener(ILanguageGenerator generator) { this.generator = generator; } public override void EnterSchemaDecl(ExpressParser.SchemaDeclContext context) { } // TYPE public override void EnterTypeBody(ExpressParser.TypeBodyContext context) { var name = context.typeDef().SimpleId().GetText(); TypeData td = null; int rank = 0; bool returnsCollection = false; var isGeneric = false; if(context.typeSel().collectionType() != null) { var wrappedType = ParseCollectionType(context.typeSel().collectionType(), ref rank, ref returnsCollection, ref isGeneric); td = new WrapperType(name, wrappedType, generator, returnsCollection, rank); } else if(context.typeSel().simpleType() != null) { var wrappedType = ParseSimpleType(context.typeSel().simpleType()); td = new WrapperType(name, wrappedType, generator, returnsCollection, rank); } else if(context.typeSel().namedType() != null) { var wrappedType = ParseNamedType(context.typeSel().namedType()); td = new WrapperType(name, wrappedType, generator, returnsCollection, rank); } else if(context.typeSel().enumType() != null) { var values = context.typeSel().enumType().enumValues().GetText().Split(','); td = new EnumType(name, generator, values); } else if(context.typeSel().selectType() != null) { var values = context.typeSel().selectType().selectValues().GetText().Split(','); td = new SelectType(name, generator, values); } typeData.Add(name, td); } // ENTITY public override void EnterEntityDecl(ExpressParser.EntityDeclContext context) { Entity entity; var entityName = context.entityHead().entityDef().SimpleId().GetText(); if(typeData.ContainsKey(entityName)) { // TypeData was created previously possible as a reference // to a sub or super type. entity = (Entity)typeData[entityName]; } else{ entity = new Entity(entityName, generator); typeData.Add(entityName, entity); } var subSuper = context.entityHead().subSuper(); // SUPERTYPE if(subSuper.supertypeDecl() != null) { var super = subSuper.supertypeDecl(); entity.IsAbstract = super.ABSTRACT() != null; var factor = super.supertypeExpr().supertypeFactor(); // IFC: Use choice only. if(factor[0].choice() != null) { foreach(var superRef in factor[0].choice().supertypeExpr()) { var superName = superRef.supertypeFactor()[0].entityRef().SimpleId().GetText(); Entity sup; if(typeData.ContainsKey(superName)) { sup = (Entity)typeData[superName]; } else { sup = new Entity(superName, generator); typeData.Add(superName, sup); } entity.Supers.Add(sup); } } } // SUBTYPE if(subSuper.subtypeDecl() != null) { foreach(var subRef in subSuper.subtypeDecl().entityRef()) { var subName = subRef.SimpleId().GetText(); Entity sub; if(typeData.ContainsKey(subName)) { sub = (Entity)typeData[subName]; } else { sub = new Entity(subName, generator); typeData.Add(subName, sub); } entity.Subs.Add(sub); } } if(context.entityBody().attributes() != null) { var attrs = context.entityBody().attributes(); foreach(var expl in attrs.explicitClause()) { if(expl.explDef() != null) { var optional = expl.explDef().OPTIONAL() != null; foreach(var attrDef in expl.explDef().attrDef()) { var rank = 0; var isCollection = false; var name = ""; var isGeneric = false; if(attrDef.SimpleId() != null) { name = attrDef.SimpleId().GetText(); } else if(attrDef.Path() != null) { name = attrDef.Path().GetText(); } var type = ParseCollectionTypeSel(expl.explDef().collectionTypeSel(), ref rank, ref isCollection, ref isGeneric); var ad = new AttributeData(generator, name, type, rank, isCollection, isGeneric, false, optional, false); entity.Attributes.Add(ad); if(ad.Type == null) { throw new Exception($"The Type of attribute data, {ad.Name}, is null."); } } } else if(expl.explRedef() != null) { var rank = 0; var isCollection = false; var name = ""; var attrRef = expl.explRedef().attrRef(); var isGeneric = false; if(attrRef.SimpleId() != null) { name = attrRef.SimpleId().GetText(); } else if (attrRef.Path() != null) { name = attrRef.Path().GetText(); } var optional = expl.explRedef().OPTIONAL() != null; var type = ParseCollectionTypeSel(expl.explRedef().collectionTypeSel(), ref rank, ref isCollection, ref isGeneric); var ad = new AttributeData(generator, name, type, rank, isCollection, isGeneric, false, optional, false); entity.Attributes.Add(ad); if(ad.Type == null) { throw new Exception($"The Type of attribute data, {ad.Name}, is null."); } } } // DERIVE foreach(var der in attrs.deriveClause()) { foreach(var derAttr in der.derivedAttr()) { var name = ""; var rank = 0; bool isCollection = false; bool isGeneric = false; if(derAttr.deriveDef() != null) { if(derAttr.deriveDef().attrDef().SimpleId() != null) { name = derAttr.deriveDef().attrDef().SimpleId().GetText(); } else if(derAttr.deriveDef().attrDef().Path() != null) { name = derAttr.deriveDef().attrDef().Path().GetText(); } var type = ParseCollectionTypeSel(derAttr.deriveDef().collectionTypeSel(), ref rank, ref isCollection, ref isGeneric); var ad = new AttributeData(generator, name, type, rank, isCollection, isGeneric, true, false, false); entity.Attributes.Add(ad); } else if(derAttr.derivedRedef() != null) { if(derAttr.derivedRedef().attrRef().SimpleId() != null) { name = derAttr.derivedRedef().attrRef().SimpleId().GetText(); } else if(derAttr.derivedRedef().attrRef().Path() != null) { name = derAttr.derivedRedef().attrRef().Path().GetText(); } var type = ParseCollectionTypeSel(derAttr.derivedRedef().collectionTypeSel(), ref rank, ref isCollection, ref isGeneric); var ad = new AttributeData(generator, name, type, rank, isCollection, isGeneric, true, false, false); entity.Attributes.Add(ad); } } } // INVERSE foreach(var inv in attrs.inverseClause()) { foreach(var invAttr in inv.inverseAttr()) { var name = ""; var rank = 0; bool isCollection = false; bool optional = false; bool inverse = true; if(invAttr.inverseDef() != null) { if(invAttr.inverseDef().attrDef().SimpleId() != null) { name = invAttr.inverseDef().attrDef().SimpleId().GetText(); } else if(invAttr.inverseDef().attrDef().Path() != null) { name = invAttr.inverseDef().attrDef().Path().GetText(); } var type = ParseInverseType(invAttr.inverseDef().inverseType(), ref isCollection, ref rank); var ad = new AttributeData(generator, name, type, rank, isCollection, false, false, optional, inverse); entity.Attributes.Add(ad); } else if(invAttr.inverseRedef() != null) { if(invAttr.inverseRedef().attrRef()[0].SimpleId() != null) { name = invAttr.inverseRedef().attrRef()[0].SimpleId().GetText(); } else if(invAttr.inverseRedef().attrRef()[0].Path() != null) { name = invAttr.inverseRedef().attrRef()[0].Path().GetText(); } var type = ParseInverseType(invAttr.inverseRedef().inverseType(), ref isCollection, ref rank); var ad = new AttributeData(generator, name, type, rank, isCollection, false, false, optional, inverse); entity.Attributes.Add(ad); } } } } if(entity.Attributes.Any(a=>a.IsCollection && a.Rank == 0)) { throw new Exception("I found an attribute with IsCollection=true, but a rank of 0."); } if(entity.Attributes.Any(a=>a.IsInverse && a.IsOptional)) { throw new Exception("I found an attribute with IsInverse=true, but marked as not optional."); } } // FUNCTION public override void EnterFuncHead(ExpressParser.FuncHeadContext context) { var fName = context.funcDef().SimpleId().GetText(); var fCollection = false; var fRank = 0; var fGeneric = false; var fType = ""; // Parse the return type if(context.returnTypeChoice().allTypeSel() != null) { fType = ParseAllTypeSel(context.returnTypeChoice().allTypeSel(), ref fCollection, ref fGeneric); } else if(context.returnTypeChoice().collectionType() != null) { fType = ParseCollectionType(context.returnTypeChoice().collectionType(), ref fRank, ref fCollection, ref fGeneric); } var parameters = new List<ParameterData>(); foreach(var formalParam in context.formalParams()) { foreach(var p in formalParam.formalParam()) { var pCollection = false; var pGeneric = false; var pRank = 0; var pType = ""; if(p.returnTypeChoice().allTypeSel() != null) { pType = ParseAllTypeSel(p.returnTypeChoice().allTypeSel(), ref pCollection, ref pGeneric); } else if(p.returnTypeChoice().collectionType() != null) { pType = ParseCollectionType(p.returnTypeChoice().collectionType(), ref pRank, ref pCollection, ref pGeneric); } foreach(var def in p.paramDef()) { var pName = def.SimpleId().GetText(); parameters.Add(new ParameterData(generator, pName, pCollection, pRank, pGeneric, pType)); } } } var returnType = new TypeReference(generator, fType, fCollection, fRank, fGeneric); var fd = new Express.FunctionData(fName, returnType, parameters); funcData.Add(context.funcDef().SimpleId().GetText(), fd); } private string ParseInverseType(ExpressParser.InverseTypeContext context, ref bool isCollection, ref int rank) { if(context.SET() != null || context.BAG() != null) { isCollection = true; rank++; } return context.entityRef().SimpleId().GetText(); } private string ParseAllTypeSel(ExpressParser.AllTypeSelContext context, ref bool isCollection, ref bool isGeneric) { if(context.simpleType() != null) { return ParseSimpleType(context.simpleType()); } else if(context.namedType() != null) { if(context.namedType().typeRef() != null) { return context.namedType().typeRef().SimpleId().GetText(); } else if(context.namedType().entityRef() != null) { return context.namedType().entityRef().SimpleId().GetText(); } } else if(context.pseudoType() != null) { if(context.pseudoType().genericType() != null) { isGeneric = true; return context.pseudoType().genericType().typeLabel().SimpleId().GetText(); } } else if(context.aggregateType() != null) { // not used in IFC throw new NotImplementedException(); } else if(context.conformantType() != null) { return ParseConformantType(context.conformantType(), ref isCollection, ref isGeneric); } throw new Exception($"I could not parse the all type selection with context: {context.GetText()}"); } private string ParseConformantType(ExpressParser.ConformantTypeContext context, ref bool isCollection, ref bool isGeneric) { if(context.conformantArray() != null) { return ParseAllTypeSel(context.conformantArray().allTypeSel(), ref isCollection, ref isGeneric); } else if(context.conformantBag() != null) { return ParseAllTypeSel(context.conformantBag().allTypeSel(), ref isCollection, ref isGeneric); } else if(context.conformantList() != null) { return ParseAllTypeSel(context.conformantList().allTypeSel(), ref isCollection, ref isGeneric); } else if(context.conformantSet() != null) { return ParseAllTypeSel(context.conformantSet().allTypeSel(), ref isCollection, ref isGeneric); } throw new Exception($"I could not parse the conformant type with context: {context.GetText()}"); } private string ParseSimpleType(ExpressParser.SimpleTypeContext context) { return generator.ParseSimpleType(context); } private string ParseNamedType(ExpressParser.NamedTypeContext context) { if(context.typeRef() != null) { return context.typeRef().SimpleId().GetText(); } else if(context.entityRef() != null) { return context.entityRef().SimpleId().GetText(); } return null; } private string ParseCollectionType(ExpressParser.CollectionTypeContext context, ref int rank, ref bool isCollection, ref bool isGeneric) { rank++; isCollection = true; if(context.arrayType() != null) { return ParseCollectionTypeSel(context.arrayType().collectionTypeSel(), ref rank, ref isCollection, ref isGeneric); } else if(context.listType() != null) { return ParseCollectionTypeSel(context.listType().collectionTypeSel(), ref rank, ref isCollection, ref isGeneric); } else if(context.setType() != null) { return ParseCollectionTypeSel(context.setType().collectionTypeSel(), ref rank, ref isCollection, ref isGeneric); } else if(context.bagType() != null) { return ParseCollectionTypeSel(context.bagType().collectionTypeSel(), ref rank, ref isCollection, ref isGeneric); } throw new Exception("I could not parse the collection type."); } private string ParseCollectionTypeSel(ExpressParser.CollectionTypeSelContext context, ref int rank, ref bool isCollection, ref bool isGeneric) { if(context.collectionType() != null) { return ParseCollectionType(context.collectionType(), ref rank, ref isCollection, ref isGeneric); } else if(context.simpleType() != null) { return ParseSimpleType(context.simpleType()); } else if(context.namedType() != null) { return ParseNamedType(context.namedType()); } else if(context.genericType() != null) { isGeneric = true; return "T"; } throw new Exception($"I could not parse te collection type selection with context: {context.GetText()}"); } } }
using System; using System.Collections.Generic; using CocosSharp; using Random = CocosSharp.CCRandom; namespace tests { public class EaseSpriteDemo : TestNavigationLayer { protected CCSprite m_grossini; protected CCSprite m_kathia; protected String m_strTitle; protected CCSprite m_tamara; public override string Title { get { return "No title"; } } public EaseSpriteDemo () : base () { m_grossini = new CCSprite(TestResource.s_pPathGrossini); m_tamara = new CCSprite(TestResource.s_pPathSister1); m_kathia = new CCSprite(TestResource.s_pPathSister2); AddChild(m_grossini, 3); AddChild(m_kathia, 2); AddChild(m_tamara, 1); } public override void OnEnter() { base.OnEnter(); CCSize windowSize = Layer.VisibleBoundsWorldspace.Size; float spirteHalfWidth = m_grossini.ContentSize.Width / 2.0f; m_grossini.Position = new CCPoint(spirteHalfWidth + 10.0f, windowSize.Height * 0.3f); m_kathia.Position = new CCPoint(spirteHalfWidth + 10.0f, windowSize.Height * 0.6f); m_tamara.Position = new CCPoint(spirteHalfWidth + 10.0f, windowSize.Height * 0.9f); } public override void RestartCallback(object sender) { CCScene s = new EaseActionsTestScene(); s.AddChild(EaseTest.restartEaseAction()); Director.ReplaceScene(s); } public override void NextCallback(object sender) { CCScene s = new EaseActionsTestScene(); s.AddChild(EaseTest.nextEaseAction()); Director.ReplaceScene(s); } public override void BackCallback(object sender) { CCScene s = new EaseActionsTestScene(); s.AddChild(EaseTest.backEaseAction()); Director.ReplaceScene(s); } public void PositionForTwo() { m_grossini.Position = new CCPoint(60, 120); m_tamara.Position = new CCPoint(60, 220); m_kathia.Visible = false; } } public class SpriteEase : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var size = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy(3, new CCPoint(size.Width - 130, 0)); var move_back = (CCFiniteTimeAction) move.Reverse(); var move_ease_in = new CCEaseIn(move, 2.5f); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseOut(move, 2.5f); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime(0.25f); var seq1 = new CCSequence(move, delay, move_back, delay) { Tag = 1 }; var seq2 = new CCSequence(move_ease_in, delay, move_ease_in_back, delay) { Tag = 1 }; var seq3 = new CCSequence(move_ease_out, delay, move_ease_out_back, delay) { Tag = 1 }; m_grossini.RepeatForever (seq1); m_tamara.RepeatForever (seq2); m_kathia.RepeatForever (seq3); Schedule(testStopAction, 6.25f); } public override string Title { get { return "EaseIn - EaseOut - Stop"; } } public void testStopAction(float dt) { Unschedule(testStopAction); m_kathia.StopAction(1); m_tamara.StopAction(1); m_grossini.StopAction(1); } } public class SpriteEaseInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var size = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(size.Width - 130, 0)); var move_ease_inout1 = new CCEaseInOut(move, 0.65f); var move_ease_inout_back1 = move_ease_inout1.Reverse(); var move_ease_inout2 = new CCEaseInOut(move, 1.35f); var move_ease_inout_back2 = move_ease_inout2.Reverse(); var move_ease_inout3 = new CCEaseInOut(move, 1.0f); var move_ease_inout_back3 = move_ease_inout3.Reverse() as CCFiniteTimeAction; var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move_ease_inout1, delay, move_ease_inout_back1, delay); var seq2 = new CCSequence(move_ease_inout2, delay, move_ease_inout_back2, delay); var seq3 = new CCSequence(move_ease_inout3, delay, move_ease_inout_back3, delay); m_tamara.RunAction(new CCRepeatForever ((CCFiniteTimeAction)seq1)); m_kathia.RunAction(new CCRepeatForever ((CCFiniteTimeAction)seq2)); m_grossini.RunAction(new CCRepeatForever ((CCFiniteTimeAction)seq3)); } public override string Title { get { return "EaseInOut and rates"; } } } public class SpriteEaseExponential : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseExponentialIn(move); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseExponentialOut(move); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, delay); var seq2 = new CCSequence(move_ease_in, delay, move_ease_in_back, delay); var seq3 = new CCSequence(move_ease_out, delay, move_ease_out_back, delay); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); m_kathia.RunAction(new CCRepeatForever (seq3)); } public override string Title { get { return "ExpIn - ExpOut actions"; } } } public class SpriteEaseExponentialInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease = new CCEaseExponentialInOut(move); var move_ease_back = move_ease.Reverse(); //-. reverse() var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, delay); var seq2 = new CCSequence(move_ease, delay, move_ease_back, delay); PositionForTwo(); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); } public override string Title { get { return "EaseExponentialInOut action"; } } } public class SpriteEaseSine : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var move = new CCMoveBy (3, new CCPoint(VisibleBoundsWorldspace.Right().X - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseSineIn(move); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseSineOut(move); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, delay); var seq2 = new CCSequence(move_ease_in, delay, move_ease_in_back, delay); var seq3 = new CCSequence(move_ease_out, delay, move_ease_out_back, delay); m_grossini.RepeatForever(seq1); m_tamara.RepeatForever(seq2); m_kathia.RepeatForever(seq3); } public override string Title { get { return "EaseSineIn - EaseSineOut"; } } } public class SpriteEaseSineInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease = new CCEaseSineInOut(move); var move_ease_back = move_ease.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, delay); var seq2 = new CCSequence(move_ease, delay, move_ease_back, delay); PositionForTwo(); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); } public override string Title { get { return "EaseSineInOut action"; } } } public class SpriteEaseElastic : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseElasticIn(move); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseElasticOut(move); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, delay); var seq2 = new CCSequence(move_ease_in, delay, move_ease_in_back, delay); var seq3 = new CCSequence(move_ease_out, delay, move_ease_out_back, delay); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); m_kathia.RunAction(new CCRepeatForever (seq3)); } public override string Title { get { return "Elastic In - Out actions"; } } } public class SpriteEaseElasticInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_ease_inout1 = new CCEaseElasticInOut(move, 0.3f); var move_ease_inout_back1 = move_ease_inout1.Reverse(); var move_ease_inout2 = new CCEaseElasticInOut(move, 0.45f); var move_ease_inout_back2 = move_ease_inout2.Reverse(); var move_ease_inout3 = new CCEaseElasticInOut(move, 0.6f); var move_ease_inout_back3 = move_ease_inout3.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move_ease_inout1, delay, move_ease_inout_back1, delay); var seq2 = new CCSequence(move_ease_inout2, delay, move_ease_inout_back2, delay); var seq3 = new CCSequence(move_ease_inout3, delay, move_ease_inout_back3, delay); m_tamara.RunAction(new CCRepeatForever (seq1)); m_kathia.RunAction(new CCRepeatForever (seq2)); m_grossini.RunAction(new CCRepeatForever (seq3)); } public override string Title { get { return "EaseElasticInOut action"; } } } public class SpriteEaseBounce : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseBounceIn(move); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseBounceOut(move); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, delay); var seq2 = new CCSequence(move_ease_in, delay, move_ease_in_back, delay); var seq3 = new CCSequence(move_ease_out, delay, move_ease_out_back, delay); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); m_kathia.RunAction(new CCRepeatForever (seq3)); } public override string Title { get { return "Bounce In - Out actions"; } } } public class SpriteEaseBounceInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease = new CCEaseBounceInOut(move); var move_ease_back = move_ease.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, delay); var seq2 = new CCSequence(move_ease, delay, move_ease_back, delay); PositionForTwo(); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); } public override string Title { get { return "EaseBounceInOut action"; } } } public class SpriteEaseBack : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseBackIn(move); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseBackOut(move); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, delay); var seq2 = new CCSequence(move_ease_in, delay, move_ease_in_back, delay); var seq3 = new CCSequence(move_ease_out, delay, move_ease_out_back, delay); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); m_kathia.RunAction(new CCRepeatForever (seq3)); } public override string Title { get { return "Back In - Out actions"; } } } public class SpriteEaseBackInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease = new CCEaseBackInOut(move); var move_ease_back = move_ease.Reverse() as CCFiniteTimeAction; var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, delay); var seq2 = new CCSequence(move_ease, delay, move_ease_back, delay); PositionForTwo(); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); } public override string Title { get { return "EaseBackInOut action"; } } } public class SpeedTest : EaseSpriteDemo { CCSpeed speedAction1, speedAction2, speedAction3; public override void OnEnter() { base.OnEnter(); var s = Layer.VisibleBoundsWorldspace.Size; // rotate and jump var jump1 = new CCJumpBy (4, new CCPoint(-s.Width + 80, 0), 100, 4); var jump2 = jump1.Reverse(); var rot1 = new CCRotateBy (4, 360 * 2); var rot2 = rot1.Reverse(); var seq3_1 = new CCSequence(jump2, jump1); var seq3_2 = new CCSequence(rot1, rot2); var spawn = new CCSpawn(seq3_1, seq3_2); speedAction1 = new CCSpeed(new CCRepeatForever (spawn), 1.0f); speedAction2 = new CCSpeed(new CCRepeatForever (spawn), 2.0f); speedAction3 = new CCSpeed(new CCRepeatForever (spawn), 0.5f); m_grossini.RunAction(speedAction1); m_tamara.RunAction(speedAction2); m_kathia.RunAction(speedAction3); } public override string Title { get { return "Speed action"; } } } public class EaseActionsTestScene : TestScene { protected override void NextTestCase() { } protected override void PreviousTestCase() { } protected override void RestTestCase() { } public override void runThisTest() { var pLayer = EaseTest.nextEaseAction(); AddChild(pLayer); Director.ReplaceScene(this); } } public static class EaseTest { public static int MAX_LAYER = 0; public const int kTagAction1 = 1; public const int kTagAction2 = 2; public const int kTagSlider = 1; private static int sceneIdx = -1; static EaseTest () { MAX_LAYER = easeTestFunctions.Count; } static List<Func<CCLayer>> easeTestFunctions = new List<Func<CCLayer>> () { () => new SpriteEase(), () => new SpriteEaseInOut(), () => new SpriteEaseExponential(), () => new SpriteEaseExponentialInOut(), () => new SpriteEaseSine(), () => new SpriteEaseSineInOut(), () => new SpriteEaseElastic(), () => new SpriteEaseElasticInOut(), () => new SpriteEaseBounce(), () => new SpriteEaseBounceInOut(), () => new SpriteEaseBack(), () => new SpriteEaseBackInOut(), () => new SpeedTest(), }; public static CCLayer createEaseLayer(int index) { return easeTestFunctions[index](); } public static CCLayer nextEaseAction() { sceneIdx++; sceneIdx %= MAX_LAYER; var pLayer = createEaseLayer(sceneIdx); return pLayer; } public static CCLayer backEaseAction() { sceneIdx--; var total = MAX_LAYER; if (sceneIdx < 0) sceneIdx += total; var pLayer = createEaseLayer(sceneIdx); return pLayer; } public static CCLayer restartEaseAction() { var pLayer = createEaseLayer(sceneIdx); return pLayer; } } }
//----------------------------------------------------------------------------- // Copyright (c) 2012 GarageGames, LLC // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. //----------------------------------------------------------------------------- // Reverb environment presets. // // For customized presets, best derive from one of these presets. singleton SFXEnvironment(Generic) { reverbDensity = "1.000"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.8913"; reverbGainLF = "1.000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.8300"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.0500"; reflectionDelay = "0.0070"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.2589"; lateReverbDelay = "0.0110"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PaddedCell) { reverbDensity = "0.1715"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.0010"; reverbGainLF = "1.000"; reverbDecayTime = "0.1700"; reverbDecayHFRatio = "0.1000"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.2500"; reflectionDelay = "0.0010"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.2691"; lateReverbDelay = "0.0020"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetRoom) { reverbDensity = "0.4287"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.5929"; reverbGainLF = "1.000"; reverbDecayTime = "0.4000"; reverbDecayHFRatio = "0.8300"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.1503"; reflectionDelay = "0.0020"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.0629"; lateReverbDelay = "0.0030"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetBathroom) { reverbDensity = "0.1715"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.2512"; reverbGainLF = "1.000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.5400"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.6531"; reflectionDelay = "0.0070"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "3.2734"; lateReverbDelay = "0.0110"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetLivingroom) { reverbDensity = "0.9766"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.0010"; reverbGainLF = "1.0000"; reverbDecayTime = "0.0900"; reverbDecayHFRatio = "0.5000"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.2051"; reflectionDelay = "0.0030"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.2805"; lateReverbDelay = "0.0040"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetStoneroom) { reverbDensity = "1.000"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.7079"; reverbGainLF = "1.0000"; reverbDecayTime = "2.3100"; reverbDecayHFRatio = "0.6400"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.4411"; reflectionDelay = "0.0120"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.1003"; lateReverbDelay = "0.0170"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetAuditorium) { reverbDensity = "1.000"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.5781"; reverbGainLF = "1.0000"; reverbDecayTime = "4.3200"; reverbDecayHFRatio = "0.5900"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.4032"; reflectionDelay = "0.0200"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.7170"; lateReverbDelay = "0.0300"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetConcerthall) { reverbDensity = "1.000"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.5632"; reverbGainLF = "1.0000"; reverbDecayTime = "3.9200"; reverbDecayHFRatio = "0.7000"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.2427"; reflectionDelay = "0.0200"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.9977"; lateReverbDelay = "0.0290"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetCave) { reverbDensity = "1.000"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "1.000"; reverbGainLF = "1.0000"; reverbDecayTime = "2.9100"; reverbDecayHFRatio = "1.3000"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.5000"; reflectionDelay = "0.0250"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.7063"; lateReverbDelay = "0.0220"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "0"; }; singleton SFXEnvironment(PresetArena) { reverbDensity = "1.000"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.4477"; reverbGainLF = "1.0000"; reverbDecayTime = "7.2400"; reverbDecayHFRatio = "0.3300"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.2612"; reflectionDelay = "0.0200"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.0186"; lateReverbDelay = "0.0300"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetHangar) { reverbDensity = "1.000"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.3162"; reverbGainLF = "1.0000"; reverbDecayTime = "10.0500"; reverbDecayHFRatio = "0.2300"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.5000"; reflectionDelay = "0.0200"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.2560"; lateReverbDelay = "0.0300"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetCarpetedHall) { reverbDensity = "0.4287"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.0100"; reverbGainLF = "1.0000"; reverbDecayTime = "0.3000"; reverbDecayHFRatio = "0.1000"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.1215"; reflectionDelay = "0.0020"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.1531"; lateReverbDelay = "0.0300"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetHallway) { reverbDensity = "0.3645"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.7079"; reverbGainLF = "1.0000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.5900"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.2458"; reflectionDelay = "0.0070"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.6615"; lateReverbDelay = "0.0110"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetStoneCorridor) { reverbDensity = "1.000"; reverbDiffusion = "1.000"; reverbGain = "0.3162"; reverbGainHF = "0.7612"; reverbGainLF = "1.0000"; reverbDecayTime = "2.7000"; reverbDecayHFRatio = "0.7900"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.2472"; reflectionDelay = "0.0130"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.5758"; lateReverbDelay = "0.0200"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetStoneAlley) { reverbDensity = "1.000"; reverbDiffusion = "0.300"; reverbGain = "0.3162"; reverbGainHF = "0.7328"; reverbGainLF = "1.0000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.8600"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.2500"; reflectionDelay = "0.0070"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.9954"; lateReverbDelay = "0.0110"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1250"; reverbEchoDepth = "0.9500"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetForest) { reverbDensity = "1.000"; reverbDiffusion = "0.300"; reverbGain = "0.3162"; reverbGainHF = "0.0224"; reverbGainLF = "1.0000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.5400"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.0525"; reflectionDelay = "0.1620"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.7682"; lateReverbDelay = "0.0880"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1250"; reverbEchoDepth = "1.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetCity) { reverbDensity = "1.000"; reverbDiffusion = "0.500"; reverbGain = "0.3162"; reverbGainHF = "0.3981"; reverbGainLF = "1.0000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.6700"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.0730"; reflectionDelay = "0.0070"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.1427"; lateReverbDelay = "0.0110"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1250"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetMountains) { reverbDensity = "1.000"; reverbDiffusion = "0.2700"; reverbGain = "0.3162"; reverbGainHF = "0.0562"; reverbGainLF = "1.0000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.2100"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.0407"; reflectionDelay = "0.3000"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.1919"; lateReverbDelay = "0.1000"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1250"; reverbEchoDepth = "1.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "0"; }; singleton SFXEnvironment(PresetQuarry) { reverbDensity = "1.000"; reverbDiffusion = "1.0000"; reverbGain = "0.3162"; reverbGainHF = "0.3162"; reverbGainLF = "1.0000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.8300"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.0000"; reflectionDelay = "0.0610"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.7783"; lateReverbDelay = "0.0250"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1250"; reverbEchoDepth = "0.7000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetPlain) { reverbDensity = "1.000"; reverbDiffusion = "0.2100"; reverbGain = "0.3162"; reverbGainHF = "0.1000"; reverbGainLF = "1.0000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.5000"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.0585"; reflectionDelay = "0.1790"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.1089"; lateReverbDelay = "0.1000"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "1.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetParkinglot) { reverbDensity = "1.000"; reverbDiffusion = "1.0000"; reverbGain = "0.3162"; reverbGainHF = "1.0000"; reverbGainLF = "1.0000"; reverbDecayTime = "1.6500"; reverbDecayHFRatio = "1.5000"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.2082"; reflectionDelay = "0.0080"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.2652"; lateReverbDelay = "0.0120"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "0"; }; singleton SFXEnvironment(PresetSewerpipe) { reverbDensity = "0.3071"; reverbDiffusion = "0.8000"; reverbGain = "0.3162"; reverbGainHF = "0.3162"; reverbGainLF = "1.0000"; reverbDecayTime = "2.8100"; reverbDecayHFRatio = "0.1400"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "1.6387"; reflectionDelay = "0.0140"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "3.2471"; lateReverbDelay = "0.0210"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetUnderwater) { reverbDensity = "0.3645"; reverbDiffusion = "1.0000"; reverbGain = "0.3162"; reverbGainHF = "0.0100"; reverbGainLF = "1.0000"; reverbDecayTime = "1.4900"; reverbDecayHFRatio = "0.1000"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.5963"; reflectionDelay = "0.0070"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "7.0795"; lateReverbDelay = "0.0110"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "1.1800"; reverbModDepth = "0.3480"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(PresetDrugged) { reverbDensity = "0.4287"; reverbDiffusion = "0.5000"; reverbGain = "0.3162"; reverbGainHF = "1.0000"; reverbGainLF = "1.0000"; reverbDecayTime = "8.3900"; reverbDecayHFRatio = "1.3900"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.8760"; reflectionDelay = "0.0020"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "3.1081"; lateReverbDelay = "0.0300"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "1.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "0"; }; singleton SFXEnvironment(PresetDizzy) { reverbDensity = "0.3645"; reverbDiffusion = "0.6000"; reverbGain = "0.3162"; reverbGainHF = "0.6310"; reverbGainLF = "1.0000"; reverbDecayTime = "17.2300"; reverbDecayHFRatio = "0.5600"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.1392"; reflectionDelay = "0.0200"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.4937"; lateReverbDelay = "0.0300"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "1.0000"; reverbModTime = "0.8100"; reverbModDepth = "0.3100"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "0"; }; singleton SFXEnvironment(PresetPsychotic) { reverbDensity = "0.0625"; reverbDiffusion = "0.5000"; reverbGain = "0.3162"; reverbGainHF = "0.8404"; reverbGainLF = "1.0000"; reverbDecayTime = "7.5600"; reverbDecayHFRatio = "0.9100"; reverbDecayLFRatio = "1.0000"; reflectionsGain = "0.4864"; reflectionDelay = "0.0200"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "2.4378"; lateReverbDelay = "0.0300"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "4.0000"; reverbModDepth = "1.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "0"; }; singleton SFXEnvironment(CastleSmallroom) { reverbDensity = "1.0000"; reverbDiffusion = "0.8900"; reverbGain = "0.3162"; reverbGainHF = "0.3981"; reverbGainLF = "0.1000"; reverbDecayTime = "1.2200"; reverbDecayHFRatio = "0.8300"; reverbDecayLFRatio = "0.3100"; reflectionsGain = "0.8913"; reflectionDelay = "0.0220"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.9953"; lateReverbDelay = "0.0110"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1380"; reverbEchoDepth = "0.0800"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(CastleShortPassage) { reverbDensity = "1.0000"; reverbDiffusion = "0.8900"; reverbGain = "0.3162"; reverbGainHF = "0.3162"; reverbGainLF = "0.1000"; reverbDecayTime = "2.3200"; reverbDecayHFRatio = "0.8300"; reverbDecayLFRatio = "0.3100"; reflectionsGain = "0.8913"; reflectionDelay = "0.0070"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.2589"; lateReverbDelay = "0.0230"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1380"; reverbEchoDepth = "0.0800"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5168.0001"; reverbLFRef = "139.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(CastleMediumRoom) { reverbDensity = "1.0000"; reverbDiffusion = "0.9300"; reverbGain = "0.3162"; reverbGainHF = "0.2818"; reverbGainLF = "0.1000"; reverbDecayTime = "2.0400"; reverbDecayHFRatio = "0.8300"; reverbDecayLFRatio = "0.4600"; reflectionsGain = "0.6310"; reflectionDelay = "0.0220"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.5849"; lateReverbDelay = "0.0110"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1550"; reverbEchoDepth = "0.0300"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5168.0001"; reverbLFRef = "139.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(CastleLargeRoom) { reverbDensity = "1.0000"; reverbDiffusion = "0.8200"; reverbGain = "0.3162"; reverbGainHF = "0.2818"; reverbGainLF = "0.1259"; reverbDecayTime = "2.5300"; reverbDecayHFRatio = "0.8300"; reverbDecayLFRatio = "0.5000"; reflectionsGain = "0.4467"; reflectionDelay = "0.0340"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.2589"; lateReverbDelay = "0.0160"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1850"; reverbEchoDepth = "0.0700"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5168.0001"; reverbLFRef = "139.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(CastleLongPassage) { reverbDensity = "1.0000"; reverbDiffusion = "0.8900"; reverbGain = "0.3162"; reverbGainHF = "0.3981"; reverbGainLF = "0.1000"; reverbDecayTime = "3.4200"; reverbDecayHFRatio = "0.8300"; reverbDecayLFRatio = "0.3100"; reflectionsGain = "0.8913"; reflectionDelay = "0.0070"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.4125"; lateReverbDelay = "0.0230"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1380"; reverbEchoDepth = "0.0800"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5168.0001"; reverbLFRef = "139.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(CastleHall) { reverbDensity = "1.0000"; reverbDiffusion = "0.8100"; reverbGain = "0.3162"; reverbGainHF = "0.2818"; reverbGainLF = "0.1778"; reverbDecayTime = "3.1400"; reverbDecayHFRatio = "0.7900"; reverbDecayLFRatio = "0.6200"; reflectionsGain = "0.1778"; reflectionDelay = "0.0560"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.1220"; lateReverbDelay = "0.0240"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5168.0001"; reverbLFRef = "139.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(CastleCupboard) { reverbDensity = "1.0000"; reverbDiffusion = "0.8900"; reverbGain = "0.3162"; reverbGainHF = "0.2818"; reverbGainLF = "0.1000"; reverbDecayTime = "0.6700"; reverbDecayHFRatio = "0.8700"; reverbDecayLFRatio = "0.3100"; reflectionsGain = "1.4125"; reflectionDelay = "0.0100"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "3.5481"; lateReverbDelay = "0.0070"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1380"; reverbEchoDepth = "0.0800"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5168.0001"; reverbLFRef = "139.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(CastleCourtyard) { reverbDensity = "1.0000"; reverbDiffusion = "0.4200"; reverbGain = "0.3162"; reverbGainHF = "0.4467"; reverbGainLF = "0.1995"; reverbDecayTime = "2.1300"; reverbDecayHFRatio = "0.6100"; reverbDecayLFRatio = "0.2300"; reflectionsGain = "0.2239"; reflectionDelay = "0.1600"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.7079"; lateReverbDelay = "0.0360"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.3700"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5000.0000"; reverbLFRef = "250.0000"; roomRolloffFactor = "0.0000"; decayHFLimit = "0"; }; singleton SFXEnvironment(CastleAlcove) { reverbDensity = "1.0000"; reverbDiffusion = "0.8900"; reverbGain = "0.3162"; reverbGainHF = "0.5012"; reverbGainLF = "0.1000"; reverbDecayTime = "1.6400"; reverbDecayHFRatio = "0.8700"; reverbDecayLFRatio = "0.3100"; reflectionsGain = "1.0000"; reflectionDelay = "0.0070"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.4125"; lateReverbDelay = "0.0340"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1380"; reverbEchoDepth = "0.0800"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "5168.0001"; reverbLFRef = "139.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(FactorySmallRoom) { reverbDensity = "0.3645"; reverbDiffusion = "0.8200"; reverbGain = "0.3162"; reverbGainHF = "0.7943"; reverbGainLF = "0.5012"; reverbDecayTime = "1.7200"; reverbDecayHFRatio = "0.6500"; reverbDecayLFRatio = "1.3100"; reflectionsGain = "0.7079"; reflectionDelay = "0.0100"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.7783"; lateReverbDelay = "0.0240"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1190"; reverbEchoDepth = "0.0700"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "3762.6001"; reverbLFRef = "362.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(FactoryShortPassage) { reverbDensity = "0.3645"; reverbDiffusion = "0.6400"; reverbGain = "0.2512"; reverbGainHF = "0.7943"; reverbGainLF = "0.5012"; reverbDecayTime = "2.5300"; reverbDecayHFRatio = "0.6500"; reverbDecayLFRatio = "1.3100"; reflectionsGain = "1.0000"; reflectionDelay = "0.0100"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.2589"; lateReverbDelay = "0.0380"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1350"; reverbEchoDepth = "0.2300"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "3762.6001"; reverbLFRef = "362.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(FactoryMediumRoom) { reverbDensity = "0.4287"; reverbDiffusion = "0.8200"; reverbGain = "0.2512"; reverbGainHF = "0.7943"; reverbGainLF = "0.5012"; reverbDecayTime = "2.7600"; reverbDecayHFRatio = "0.6500"; reverbDecayLFRatio = "1.3100"; reflectionsGain = "0.2818"; reflectionDelay = "0.0220"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.4125"; lateReverbDelay = "0.0230"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1740"; reverbEchoDepth = "0.0700"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "3762.6001"; reverbLFRef = "362.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(FactoryLargeRoom) { reverbDensity = "0.4287"; reverbDiffusion = "0.7500"; reverbGain = "0.2512"; reverbGainHF = "0.7079"; reverbGainLF = "0.6310"; reverbDecayTime = "4.2400"; reverbDecayHFRatio = "0.5100"; reverbDecayLFRatio = "1.3100"; reflectionsGain = "0.1778"; reflectionDelay = "0.0390"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.1220"; lateReverbDelay = "0.0230"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2310"; reverbEchoDepth = "0.0700"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "3762.6001"; reverbLFRef = "362.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(FactoryLongPassage) { reverbDensity = "0.3645"; reverbDiffusion = "0.6400"; reverbGain = "0.2512"; reverbGainHF = "0.7943"; reverbGainLF = "0.5012"; reverbDecayTime = "4.0000"; reverbDecayHFRatio = "0.6500"; reverbDecayLFRatio = "1.3100"; reflectionsGain = "1.0000"; reflectionDelay = "0.0200"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.2589"; lateReverbDelay = "0.0370"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1350"; reverbEchoDepth = "0.2300"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "3762.6001"; reverbLFRef = "362.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(FactoryHall) { reverbDensity = "0.4287"; reverbDiffusion = "0.7500"; reverbGain = "0.3162"; reverbGainHF = "0.7079"; reverbGainLF = "0.6310"; reverbDecayTime = "7.4300"; reverbDecayHFRatio = "0.5100"; reverbDecayLFRatio = "1.3100"; reflectionsGain = "0.0631"; reflectionDelay = "0.0730"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.8913"; lateReverbDelay = "0.0270"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.0700"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "3762.6001"; reverbLFRef = "362.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(FactoryCupboard) { reverbDensity = "0.3071"; reverbDiffusion = "0.6300"; reverbGain = "0.2512"; reverbGainHF = "0.7943"; reverbGainLF = "0.5012"; reverbDecayTime = "0.4900"; reverbDecayHFRatio = "0.6500"; reverbDecayLFRatio = "1.3100"; reflectionsGain = "1.2589"; reflectionDelay = "0.0100"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.9953"; lateReverbDelay = "0.0320"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1070"; reverbEchoDepth = "0.0700"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "3762.6001"; reverbLFRef = "362.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(FactoryCourtyard) { reverbDensity = "0.3071"; reverbDiffusion = "0.5700"; reverbGain = "0.3162"; reverbGainHF = "0.3162"; reverbGainLF = "0.6310"; reverbDecayTime = "2.3200"; reverbDecayHFRatio = "0.2900"; reverbDecayLFRatio = "0.5600"; reflectionsGain = "0.2239"; reflectionDelay = "0.1400"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "0.3981"; lateReverbDelay = "0.0390"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.2500"; reverbEchoDepth = "0.2900"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "3762.6001"; reverbLFRef = "362.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; }; singleton SFXEnvironment(FactoryAlcove) { reverbDensity = "0.3645"; reverbDiffusion = "0.5900"; reverbGain = "0.2512"; reverbGainHF = "0.7943"; reverbGainLF = "0.5012"; reverbDecayTime = "3.1400"; reverbDecayHFRatio = "0.6500"; reverbDecayLFRatio = "1.3100"; reflectionsGain = "1.4125"; reflectionDelay = "0.0100"; reflectionsPan[ 0 ] = "0.0"; reflectionsPan[ 1 ] = "0.0"; reflectionsPan[ 2 ] = "0.0"; lateReverbGain = "1.0000"; lateReverbDelay = "0.0380"; lateReverbPan[ 0 ] = "0.0"; lateReverbPan[ 1 ] = "0.0"; lateReverbPan[ 2 ] = "0.0"; reverbEchoTime = "0.1140"; reverbEchoDepth = "0.1000"; reverbModTime = "0.2500"; reverbModDepth = "0.0000"; airAbsorbtionGainHF = "0.9943"; reverbHFRef = "3762.6001"; reverbLFRef = "362.5000"; roomRolloffFactor = "0.0000"; decayHFLimit = "1"; };
/** * Screenary: Real-Time Collaboration Redefined. * Screencasting Session * * Copyright 2011-2012 Terri-Anne Cambridge <tacambridge@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections; using System.Collections.Generic; using System.Collections.Concurrent; using System.Runtime.CompilerServices; namespace Screenary.Server { public class ScreencastingSession { public char[] sessionKey { get; set; } public UInt32 senderId { get; set; } public string senderUsername { get; set; } private string sessionPassword; public Client senderClient; private Client remoteController; /* Lists of TCP Clients */ public ConcurrentDictionary<Client, UInt32> joinedClients { get; set; } public ConcurrentDictionary<Client, User> authenticatedClients { get; set; } public ConcurrentDictionary<string, Client> remoteControlRequestClients { get; set; } public struct User { public UInt32 sessionId; public string username; } public ScreencastingSession(char[] sessionKey, UInt32 senderId, string senderUsername, string sessionPassword, Client senderClient) { this.sessionKey = sessionKey; this.senderId = senderId; this.senderUsername = senderUsername; this.sessionPassword = sessionPassword; this.senderClient = senderClient; this.remoteController = null; this.joinedClients = new ConcurrentDictionary<Client, UInt32>(); this.authenticatedClients = new ConcurrentDictionary<Client, User>(); this.remoteControlRequestClients = new ConcurrentDictionary<string, Client>(); } [MethodImpl(MethodImplOptions.Synchronized)] public void AddJoinedUser(Client client, UInt32 id) { joinedClients.TryAdd(client, id); } [MethodImpl(MethodImplOptions.Synchronized)] public void AddFirstUser(Client client, UInt32 id, string username) { User user; user.sessionId = id; user.username = username; remoteController = client; authenticatedClients.TryAdd(client, user); senderClient = client; } [MethodImpl(MethodImplOptions.Synchronized)] public void AddAuthenticatedUser(Client client, UInt32 id, string username) { Boolean done = false; while (!done) { User user; user.sessionId = id; user.username = username; authenticatedClients.TryAdd(client, user); done = true; } UpdateFirstNotifications("joined",username, senderUsername); } [MethodImpl(MethodImplOptions.Synchronized)] public void UpdateAllParticipants() { foreach (Client client in authenticatedClients.Keys) { try { client.OnSessionParticipantListUpdated(GetParticipantUsernames()); } catch (TransportException e) { Console.WriteLine("Caught Transport Exception: " + e.Message); RemoveAuthenticatedUser(client); } } } [MethodImpl(MethodImplOptions.Synchronized)] public void UpdateNotifications(string type, string username) { foreach (Client client in authenticatedClients.Keys) { try { client.OnSessionNotificationUpdate(type, username); } catch (TransportException e) { Console.WriteLine("Caught Transport Exception: " + e.Message); RemoveAuthenticatedUser(client); } } } [MethodImpl(MethodImplOptions.Synchronized)] public void UpdateFirstNotifications(string type, string username, string senderClient) { foreach (Client client in authenticatedClients.Keys) { try { client.OnSessionFirstNotificationUpdate(type, username, senderClient); } catch (TransportException e) { Console.WriteLine("Caught Transport Exception: " + e.Message); RemoveAuthenticatedUser(client); } } } [MethodImpl(MethodImplOptions.Synchronized)] public void RemoveAuthenticatedUser(Client client, string username, UInt32 sessionId) { User user; authenticatedClients.TryRemove(client, out user); joinedClients.TryRemove(client, out sessionId); UpdateNotifications("left", username); } [MethodImpl(MethodImplOptions.Synchronized)] public void RemoveAuthenticatedUser(Client client) { User user; UInt32 sessionId; authenticatedClients.TryRemove(client, out user); joinedClients.TryRemove(client, out sessionId); UpdateNotifications("left", user.username); } [MethodImpl(MethodImplOptions.Synchronized)] public bool Authenticate(Client client, UInt32 sessionId, string username, string password) { bool isAuthenticated = (this.sessionPassword.Equals(password) && isUsernameUnique(username)); if (isAuthenticated) { this.AddAuthenticatedUser(client, sessionId, username); } return isAuthenticated; } [MethodImpl(MethodImplOptions.Synchronized)] public void AddRemoteAccessRequest(Client requestingClient, string username) { if(authenticatedClients.ContainsKey(requestingClient)) { /*if another receiver has control, deny*/ if(remoteController != this.senderClient) { remoteControlRequestClients.TryAdd(username, requestingClient); DenyRemoteAccess(this.senderClient, username); } /*if sender has control, add requester to list and inform sender*/ else { remoteControlRequestClients.TryAdd(username, requestingClient); senderClient.OnSessionRemoteAccessRequested(username); } } } [MethodImpl(MethodImplOptions.Synchronized)] public void GrantRemoteAccess(Client senderClient, string receiverUsername) { string potentialSenderUsername = authenticatedClients[senderClient].username; if(senderUsername.Equals(potentialSenderUsername)) { Client receiverClient = remoteControlRequestClients[receiverUsername]; if(receiverClient != null) { remoteControlRequestClients.TryRemove(receiverUsername, out receiverClient); remoteController = receiverClient; UpdateNotifications("control of", receiverUsername); } } } [MethodImpl(MethodImplOptions.Synchronized)] public void DenyRemoteAccess(Client senderClient, string receiverUsername) { if(this.senderClient == senderClient) { Client receiverClient = null; remoteControlRequestClients.TryRemove(receiverUsername, out receiverClient); receiverClient.OnSessionNotificationUpdate("been denied control of", receiverUsername); } } /** * Sender of Receiver with remote access may terminate and restore access to Sender */ [MethodImpl(MethodImplOptions.Synchronized)] public void TermRemoteAccessRequested(string username) { remoteController = this.senderClient; remoteControlRequestClients.Clear(); UpdateNotifications("control of", this.senderUsername); } public ArrayList GetParticipantUsernames() { ArrayList participantUsernames = new ArrayList(); foreach (User user in authenticatedClients.Values) { participantUsernames.Add(user.username); } return participantUsernames; } public void SendMouseEventToSender(UInt16 pointerFlags, int x, int y) { senderClient.SendMouseEventToSender(pointerFlags, x, y, this.senderId); } public void SendKeyboardEventToSender(UInt16 pointerFlag, UInt16 keyCode) { senderClient.SendKeyboardEventToSender(pointerFlag, keyCode, this.senderId); } [MethodImpl(MethodImplOptions.Synchronized)] public bool isPasswordProtected() { return (!sessionPassword.Equals("")); } [MethodImpl(MethodImplOptions.Synchronized)] private bool isUsernameUnique(string username) { bool isUsernameUnique = true; foreach(User user in authenticatedClients.Values) { if(username.Equals(user.username)) { isUsernameUnique = false; break; } } return isUsernameUnique; } } }
using System; using System.CodeDom; using System.Xml; using System.Collections; using Stetic.Undo; namespace Stetic.Wrapper { public sealed class ActionGroup: ObjectWrapper { string name; ActionCollection actions; ObjectWrapper owner; bool generatePublic = true; public event ActionEventHandler ActionAdded; public event ActionEventHandler ActionRemoved; public event ActionEventHandler ActionChanged; public ActionGroup () { actions = new ActionCollection (this); } public ActionGroup (string name): this () { this.name = name; } public override void Dispose () { foreach (Action a in actions) a.Dispose (); base.Dispose (); } public ActionCollection Actions { get { return actions; } } public string Name { get { return name; } set { name = value; NotifyChanged (); } } public bool GeneratePublic { get { return generatePublic; } set { generatePublic = value; } } public Action GetAction (string name) { foreach (Action ac in actions) if (ac.Name == name) return ac; return null; } internal string GetValidName (Action reqAction, string name) { int max = 0; bool found = false; foreach (Action ac in Actions) { if (ac == reqAction) continue; string bname; int index; WidgetUtils.ParseWidgetName (ac.Name, out bname, out index); if (name == ac.Name) found = true; if (name == bname && index > max) max = index; } if (found) return name + (max+1); else return name; } public override XmlElement Write (ObjectWriter writer) { XmlElement group = writer.XmlDocument.CreateElement ("action-group"); group.SetAttribute ("name", name); if (writer.CreateUndoInfo) group.SetAttribute ("undoId", UndoId); foreach (Action ac in actions) { if (ac.Name.Length > 0) group.AppendChild (writer.WriteObject (ac)); } return group; } public override void Read (ObjectReader reader, XmlElement elem) { name = elem.GetAttribute ("name"); string uid = elem.GetAttribute ("undoId"); if (uid.Length > 0) UndoId = uid; foreach (XmlElement child in elem.SelectNodes ("action")) { Action ac = new Action (); ac.Read (reader, child); actions.Add (ac); } } internal protected override CodeExpression GenerateObjectCreation (GeneratorContext ctx) { return new CodeObjectCreateExpression ( typeof(Gtk.ActionGroup), new CodePrimitiveExpression (Name) ); } internal protected override void GenerateBuildCode (GeneratorContext ctx, CodeExpression var) { foreach (Action action in Actions) { // Create the action CodeExpression acVarExp = ctx.GenerateInstanceExpression (action, action.GenerateObjectCreation (ctx)); ctx.GenerateBuildCode (action, acVarExp); ctx.Statements.Add ( new CodeMethodInvokeExpression ( var, "Add", acVarExp, new CodePrimitiveExpression (action.Accelerator) ) ); } } internal void SetOwner (ObjectWrapper owner) { this.owner = owner; } internal override UndoManager GetUndoManagerInternal () { if (owner != null) return owner.UndoManager; else return base.GetUndoManagerInternal (); } public override ObjectWrapper FindObjectByUndoId (string id) { ObjectWrapper ow = base.FindObjectByUndoId (id); if (ow != null) return ow; foreach (Action ac in Actions) { ow = ac.FindObjectByUndoId (id); if (ow != null) return ow; } return null; } DiffGenerator GetDiffGenerator () { DiffGenerator gen = new DiffGenerator (); gen.CurrentStatusAdaptor = new ActionDiffAdaptor (Project); XmlDiffAdaptor xad = new XmlDiffAdaptor (); xad.ChildElementName = "action"; gen.NewStatusAdaptor = xad; return gen; } public override object GetUndoDiff () { XmlElement oldElem = UndoManager.GetObjectStatus (this); UndoWriter writer = new UndoWriter (oldElem.OwnerDocument, UndoManager); XmlElement newElem = Write (writer); ObjectDiff actionsDiff = GetDiffGenerator().GetDiff (this, oldElem); UndoManager.UpdateObjectStatus (this, newElem); return actionsDiff; } public override object ApplyUndoRedoDiff (object diff) { ObjectDiff actionsDiff = (ObjectDiff) diff; XmlElement status = UndoManager.GetObjectStatus (this); DiffGenerator differ = GetDiffGenerator(); differ.ApplyDiff (this, actionsDiff); actionsDiff = differ.GetDiff (this, status); UndoWriter writer = new UndoWriter (status.OwnerDocument, UndoManager); XmlElement newElem = Write (writer); UndoManager.UpdateObjectStatus (this, newElem); return actionsDiff; } internal void NotifyActionAdded (Action ac) { ac.SetActionGroup (this); ac.ObjectChanged += OnActionChanged; ac.SignalAdded += OnSignalAdded; ac.SignalRemoved += OnSignalRemoved; ac.SignalChanged += OnSignalChanged; ac.UpdateNameIndex (); NotifyChanged (); if (ActionAdded != null) ActionAdded (this, new ActionEventArgs (ac)); } internal void NotifyActionRemoved (Action ac) { ac.SetActionGroup (null); ac.ObjectChanged -= OnActionChanged; ac.SignalAdded -= OnSignalAdded; ac.SignalRemoved -= OnSignalRemoved; ac.SignalChanged -= OnSignalChanged; NotifyChanged (); if (ActionRemoved != null) ActionRemoved (this, new ActionEventArgs (ac)); } void OnActionChanged (object s, ObjectWrapperEventArgs args) { NotifyChanged (); if (ActionChanged != null) ActionChanged (this, new ActionEventArgs ((Action) args.Wrapper)); } void OnSignalAdded (object s, SignalEventArgs args) { OnSignalAdded (args); } void OnSignalRemoved (object s, SignalEventArgs args) { OnSignalRemoved (args); } void OnSignalChanged (object s, SignalChangedEventArgs args) { OnSignalChanged (args); } } public class ActionGroupCollection: CollectionBase { ActionGroup[] toClear; ObjectWrapper owner; internal void SetOwner (ObjectWrapper owner) { this.owner = owner; } public void Add (ActionGroup group) { List.Add (group); } public void Insert (int n, ActionGroup group) { List.Insert (n, group); } public ActionGroup this [int n] { get { return (ActionGroup) List [n]; } } public ActionGroup this [string name] { get { foreach (ActionGroup grp in List) if (grp.Name == name) return grp; return null; } } internal ObjectWrapper FindObjectByUndoId (string id) { foreach (ActionGroup ag in List) { ObjectWrapper ow = ag.FindObjectByUndoId (id); if (ow != null) return ow; } return null; } DiffGenerator GetDiffGenerator (IProject prj) { DiffGenerator gen = new DiffGenerator (); gen.CurrentStatusAdaptor = new ActionDiffAdaptor (prj); XmlDiffAdaptor xad = new XmlDiffAdaptor (); xad.ChildElementName = "action-group"; xad.ProcessProperties = false; xad.ChildAdaptor = new XmlDiffAdaptor (); xad.ChildAdaptor.ChildElementName = "action"; gen.NewStatusAdaptor = xad; return gen; } internal ObjectDiff GetDiff (IProject prj, XmlElement elem) { return GetDiffGenerator (prj).GetDiff (this, elem); } internal void ApplyDiff (IProject prj, ObjectDiff diff) { GetDiffGenerator (prj).ApplyDiff (this, diff); } public int IndexOf (ActionGroup group) { return List.IndexOf (group); } public void Remove (ActionGroup group) { List.Remove (group); } protected override void OnInsertComplete (int index, object val) { NotifyGroupAdded ((ActionGroup) val); } protected override void OnRemoveComplete (int index, object val) { NotifyGroupRemoved ((ActionGroup)val); } protected override void OnSetComplete (int index, object oldv, object newv) { NotifyGroupRemoved ((ActionGroup) oldv); NotifyGroupAdded ((ActionGroup) newv); } protected override void OnClear () { toClear = new ActionGroup [Count]; List.CopyTo (toClear, 0); } protected override void OnClearComplete () { foreach (ActionGroup a in toClear) NotifyGroupRemoved (a); toClear = null; } void NotifyGroupAdded (ActionGroup grp) { grp.SetOwner (owner); grp.ObjectChanged += OnGroupChanged; if (ActionGroupAdded != null) ActionGroupAdded (this, new ActionGroupEventArgs (grp)); } void NotifyGroupRemoved (ActionGroup grp) { grp.SetOwner (null); grp.ObjectChanged -= OnGroupChanged; if (ActionGroupRemoved != null) ActionGroupRemoved (this, new ActionGroupEventArgs (grp)); } void OnGroupChanged (object s, ObjectWrapperEventArgs a) { if (ActionGroupChanged != null) ActionGroupChanged (this, new ActionGroupEventArgs ((ActionGroup)s)); } public ActionGroup[] ToArray () { ActionGroup[] groups = new ActionGroup [Count]; List.CopyTo (groups, 0); return groups; } public event ActionGroupEventHandler ActionGroupAdded; public event ActionGroupEventHandler ActionGroupRemoved; public event ActionGroupEventHandler ActionGroupChanged; } public delegate void ActionEventHandler (object sender, ActionEventArgs args); public class ActionEventArgs: EventArgs { readonly Action action; public ActionEventArgs (Action ac) { action = ac; } public Action Action { get { return action; } } } public delegate void ActionGroupEventHandler (object sender, ActionGroupEventArgs args); public class ActionGroupEventArgs: EventArgs { readonly ActionGroup action; public ActionGroupEventArgs (ActionGroup ac) { action = ac; } public ActionGroup ActionGroup { get { return action; } } } }
// Copyright 2007-2012 Chris Patterson, Dru Sellers, Travis Smith, et. al. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. namespace MassTransit.Transports { using System; using System.Collections.Generic; using System.Diagnostics; using System.Runtime.Serialization; using System.Transactions; using Context; using Exceptions; using Logging; using Magnum.Reflection; using Serialization; using Util; /// <summary> /// See <see cref="IEndpoint"/> for docs. /// </summary> [DebuggerDisplay("{Address}")] public class Endpoint : IEndpoint { static readonly ILog _log = Logger.Get(typeof(Endpoint)); readonly IEndpointAddress _address; readonly IMessageSerializer _serializer; readonly IInboundMessageTracker _tracker; readonly ISupportedMessageSerializers _supportedSerializers; bool _disposed; string _disposedMessage; IOutboundTransport _errorTransport; IDuplexTransport _transport; public Endpoint([NotNull] IEndpointAddress address, [NotNull] IMessageSerializer serializer, [NotNull] IDuplexTransport transport, [NotNull] IOutboundTransport errorTransport, [NotNull] IInboundMessageTracker messageTracker, [NotNull] ISupportedMessageSerializers supportedSerializers) { if (address == null) throw new ArgumentNullException("address"); if (serializer == null) throw new ArgumentNullException("serializer"); if (transport == null) throw new ArgumentNullException("transport"); if (errorTransport == null) throw new ArgumentNullException("errorTransport"); if (messageTracker == null) throw new ArgumentNullException("messageTracker"); if (supportedSerializers == null) throw new ArgumentNullException("supportedSerializers"); _address = address; _errorTransport = errorTransport; _serializer = serializer; _tracker = messageTracker; _supportedSerializers = supportedSerializers; _transport = transport; _disposedMessage = string.Format("The endpoint has already been disposed: {0}", _address); } public IOutboundTransport ErrorTransport { get { return _errorTransport; } } public IMessageSerializer Serializer { get { return _serializer; } } public IEndpointAddress Address { get { return _address; } } public IInboundTransport InboundTransport { get { return _transport.InboundTransport; } } public IOutboundTransport OutboundTransport { get { return _transport.OutboundTransport; } } public void Send<T>(ISendContext<T> context) where T : class { if (_disposed) throw new ObjectDisposedException(_disposedMessage); try { context.SetDestinationAddress(Address.Uri); context.SetBodyWriter(stream => _serializer.Serialize(stream, context)); _transport.Send(context); context.NotifySend(_address); } catch (Exception ex) { throw new SendException(typeof(T), _address.Uri, "An exception was thrown during Send", ex); } } public void Send<T>(T message) where T : class { ISendContext<T> context = ContextStorage.CreateSendContext(message); Send(context); } public void Send<T>(T message, Action<ISendContext<T>> contextCallback) where T : class { ISendContext<T> context = ContextStorage.CreateSendContext(message); contextCallback(context); Send(context); } public void Send(object message) { if (message == null) throw new ArgumentNullException("message"); EndpointObjectSenderCache.Instance[message.GetType()].Send(this, message); } public void Send(object message, Type messageType) { if (message == null) throw new ArgumentNullException("message"); if (messageType == null) throw new ArgumentNullException("messageType"); EndpointObjectSenderCache.Instance[messageType].Send(this, message); } public void Send(object message, Action<ISendContext> contextCallback) { if (message == null) throw new ArgumentNullException("message"); if (contextCallback == null) throw new ArgumentNullException("contextCallback"); Type messageType = message.GetType(); EndpointObjectSenderCache.Instance[messageType].Send(this, message, contextCallback); } public void Send(object message, Type messageType, Action<ISendContext> contextCallback) { if (message == null) throw new ArgumentNullException("message"); if (messageType == null) throw new ArgumentNullException("messageType"); if (contextCallback == null) throw new ArgumentNullException("contextCallback"); EndpointObjectSenderCache.Instance[messageType].Send(this, message, contextCallback); } /// <summary> /// Sends an interface message, initializing the properties of the interface using the anonymous /// object specified /// </summary> /// <typeparam name="T">The interface type to send</typeparam> /// <param name="endpoint">The destination endpoint</param> /// <param name="values">The property values to initialize on the interface</param> public void Send<T>(object values) where T : class { var message = InterfaceImplementationExtensions.InitializeProxy<T>(values); Send(message, x => { }); } /// <summary> /// Sends an interface message, initializing the properties of the interface using the anonymous /// object specified /// </summary> /// <typeparam name="T">The interface type to send</typeparam> /// <param name="endpoint">The destination endpoint</param> /// <param name="values">The property values to initialize on the interface</param> /// <param name="contextCallback">A callback method to modify the send context for the message</param> public void Send<T>(object values, Action<ISendContext<T>> contextCallback) where T : class { var message = InterfaceImplementationExtensions.InitializeProxy<T>(values); Send(message, contextCallback); } public void Dispose() { Dispose(true); } public void Receive(Func<IReceiveContext, Action<IReceiveContext>> receiver, TimeSpan timeout) { if (_disposed) throw new ObjectDisposedException(_disposedMessage); string successfulMessageId = null; try { Exception failedMessageException = null; _transport.Receive(acceptContext => { failedMessageException = null; if (successfulMessageId != null) { _log.DebugFormat("Received Successfully: {0}", successfulMessageId); _tracker.MessageWasReceivedSuccessfully(successfulMessageId); successfulMessageId = null; } Exception retryException; string acceptMessageId = acceptContext.OriginalMessageId ?? acceptContext.MessageId; IEnumerable<Action> faultActions; if (_tracker.IsRetryLimitExceeded(acceptMessageId, out retryException, out faultActions)) { if (_log.IsErrorEnabled) _log.ErrorFormat("Message retry limit exceeded {0}:{1}", Address, acceptMessageId); failedMessageException = retryException; acceptContext.ExecuteFaultActions(faultActions); return MoveMessageToErrorTransport; } if (acceptContext.MessageId != acceptMessageId) { if (_log.IsErrorEnabled) _log.DebugFormat("Message {0} original message id {1}", acceptContext.MessageId, acceptContext.OriginalMessageId); } Action<IReceiveContext> receive; try { acceptContext.SetEndpoint(this); IMessageSerializer serializer; if (!_supportedSerializers.TryGetSerializer(acceptContext.ContentType, out serializer)) throw new SerializationException( string.Format("The content type could not be deserialized: {0}", acceptContext.ContentType)); serializer.Deserialize(acceptContext); receive = receiver(acceptContext); if (receive == null) { Address.LogSkipped(acceptMessageId); if (_tracker.IncrementRetryCount(acceptMessageId)) return MoveMessageToErrorTransport; return null; } } catch (SerializationException sex) { if (_log.IsErrorEnabled) _log.Error("Unrecognized message " + Address + ":" + acceptMessageId, sex); _tracker.IncrementRetryCount(acceptMessageId, sex); return MoveMessageToErrorTransport; } catch (Exception ex) { if (_log.IsErrorEnabled) _log.Error("An exception was thrown preparing the message consumers", ex); if(_tracker.IncrementRetryCount(acceptMessageId, ex)) { if (!_tracker.IsRetryEnabled) { acceptContext.ExecuteFaultActions(acceptContext.GetFaultActions()); return MoveMessageToErrorTransport; } } return null; } return receiveContext => { string receiveMessageId = receiveContext.OriginalMessageId ?? receiveContext.MessageId; try { receive(receiveContext); successfulMessageId = receiveMessageId; } catch (Exception ex) { if (_log.IsErrorEnabled) _log.Error("An exception was thrown by a message consumer", ex); faultActions = receiveContext.GetFaultActions(); if(_tracker.IncrementRetryCount(receiveMessageId, ex, faultActions)) { if (!_tracker.IsRetryEnabled) { receiveContext.ExecuteFaultActions(faultActions); MoveMessageToErrorTransport(receiveContext); return; } } if(!receiveContext.IsTransactional) { SaveMessageToInboundTransport(receiveContext); return; } throw; } }; }, timeout); if (failedMessageException != null) { if(_log.IsErrorEnabled) _log.ErrorFormat("Throwing Original Exception: {0}", failedMessageException.GetType()); throw failedMessageException; } } catch (Exception ex) { if (successfulMessageId != null) { _log.DebugFormat("Increment Retry Count: {0}", successfulMessageId); _tracker.IncrementRetryCount(successfulMessageId, ex); successfulMessageId = null; } throw; } finally { if (successfulMessageId != null) { _log.DebugFormat("Received Successfully: {0}", successfulMessageId); _tracker.MessageWasReceivedSuccessfully(successfulMessageId); successfulMessageId = null; } } } void Dispose(bool disposing) { if (_disposed) return; if (disposing) { _transport.Dispose(); _transport = null; _errorTransport.Dispose(); _errorTransport = null; } _disposed = true; } void MoveMessageToErrorTransport(IReceiveContext context) { var moveContext = new MoveMessageSendContext(context); _errorTransport.Send(moveContext); string messageId = context.OriginalMessageId ?? context.MessageId; _tracker.MessageWasMovedToErrorQueue(messageId); Address.LogMoved(_errorTransport.Address, context.MessageId, ""); } void SaveMessageToInboundTransport(IReceiveContext context) { var moveContext = new MoveMessageSendContext(context); _transport.Send(moveContext); Address.LogReQueued(_transport.Address, context.MessageId, ""); } } }
// // Copyright (C) DataStax Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; using System.Diagnostics; using System.Linq; using Cassandra.IntegrationTests.TestBase; namespace Cassandra.IntegrationTests.TestClusterManagement { public class CcmCluster : ITestCluster { public string Name { get; set; } public string Version { get; set; } public Builder Builder { get; set; } public Cluster Cluster { get; set; } public ISession Session { get; set; } public string InitialContactPoint { get; set; } public string ClusterIpPrefix { get; set; } public string DsePath { get; set; } public string DefaultKeyspace { get; set; } private readonly ICcmProcessExecuter _executor; private CcmBridge _ccm; private int _nodeLength; public CcmCluster(string name, string clusterIpPrefix, string dsePath, ICcmProcessExecuter executor, string defaultKeyspace, string version) { _executor = executor; Name = name; DefaultKeyspace = defaultKeyspace; ClusterIpPrefix = clusterIpPrefix; DsePath = dsePath; InitialContactPoint = ClusterIpPrefix + "1"; Version = version; } public void Create(int nodeLength, TestClusterOptions options = null) { _nodeLength = nodeLength; options = options ?? TestClusterOptions.Default; _ccm = new CcmBridge(Name, ClusterIpPrefix, DsePath, Version, _executor); _ccm.Create(options.UseSsl); _ccm.Populate(nodeLength, options.Dc2NodeLength, options.UseVNodes); _ccm.UpdateConfig(options.CassandraYaml); if (TestClusterManager.IsDse) { _ccm.UpdateDseConfig(options.DseYaml); _ccm.SetWorkloads(nodeLength, options.Workloads); } if (TestClusterManager.Executor is WslCcmProcessExecuter) { _ccm.UpdateConfig(new [] { "read_request_timeout_in_ms: 20000", "counter_write_request_timeout_in_ms: 20000", "write_request_timeout_in_ms: 20000", "request_timeout_in_ms: 20000", "range_request_timeout_in_ms: 30000" }); if (TestClusterManager.IsDse) { if (TestClusterManager.CheckDseVersion(new Version(6, 7), Comparison.LessThan)) { _ccm.UpdateConfig(new[] { "user_defined_function_fail_timeout: 20000" }); } else { _ccm.UpdateConfig(new[] { "user_defined_function_fail_micros: 20000" }); } } } } public void InitClient() { Cluster?.Shutdown(); if (Builder == null) { Builder = TestUtils.NewBuilder(); } Cluster = Builder.AddContactPoint(InitialContactPoint).Build(); Session = Cluster.Connect(); if (DefaultKeyspace != null) { Session.CreateKeyspaceIfNotExists(DefaultKeyspace); Session.ChangeKeyspace(DefaultKeyspace); } } public void ShutDown() { Cluster?.Shutdown(); _ccm.Stop(); } public void Remove() { Trace.TraceInformation($"Removing Cluster with Name: '{Name}', InitialContactPoint: {InitialContactPoint}, and CcmDir: {_ccm.CcmDir}"); _ccm.Remove(); } public void Remove(int nodeId) { Trace.TraceInformation($"Removing node '{nodeId}' from cluster '{Name}'"); _ccm.Remove(nodeId); } public void DecommissionNode(int nodeId) { _ccm.DecommissionNode(nodeId); } public void DecommissionNodeForcefully(int nodeId) { _ccm.ExecuteCcm(string.Format("node{0} nodetool \"decommission -f\"", nodeId), false); } public void PauseNode(int nodeId) { _ccm.ExecuteCcm($"node{nodeId} pause"); } public void ResumeNode(int nodeId) { _ccm.ExecuteCcm($"node{nodeId} resume"); } public void SwitchToThisCluster() { _ccm.SwitchToThis(); } public void StopForce(int nodeIdToStop) { _ccm.StopForce(nodeIdToStop); } public void Stop(int nodeIdToStop) { _ccm.Stop(nodeIdToStop); } public void Start(string[] jvmArgs = null) { var output = _ccm.Start(jvmArgs); if (_executor is WslCcmProcessExecuter) { foreach (var i in Enumerable.Range(1, _nodeLength)) { _ccm.CheckNativePortOpen(output, TestClusterManager.IpPrefix + i); } } } public void Start(int nodeIdToStart, string additionalArgs = null, string newIp = null, string[] jvmArgs = null) { var output = _ccm.Start(nodeIdToStart, additionalArgs, jvmArgs); if (_executor is WslCcmProcessExecuter) { _ccm.CheckNativePortOpen(output, newIp ?? (TestClusterManager.IpPrefix + nodeIdToStart)); } } public void BootstrapNode(int nodeIdToStart, bool start = true) { _ccm.BootstrapNode(nodeIdToStart, start); } public void SetNodeWorkloads(int nodeId, string[] workloads) { if (!TestClusterManager.IsDse) { throw new InvalidOperationException("Cant set workloads on an oss cluster."); } _ccm.SetNodeWorkloads(nodeId, workloads); } public void BootstrapNode(int nodeIdToStart, string dataCenterName, bool start = true) { var originalStart = start; if (_executor is WslCcmProcessExecuter) { start = false; } var output = _ccm.BootstrapNode(nodeIdToStart, dataCenterName, start); if (originalStart && _executor is WslCcmProcessExecuter) { _ccm.CheckNativePortOpen(output, TestClusterManager.IpPrefix + nodeIdToStart); } } public void UpdateDseConfig(params string[] yamlChanges) { if (yamlChanges == null) return; var joinedChanges = string.Join(" ", yamlChanges.Select(s => $"\"{s}\"")); _ccm.ExecuteCcm($"updatedseconf {joinedChanges}"); } public void UpdateConfig(params string[] yamlChanges) { if (yamlChanges == null) return; var joinedChanges = string.Join(" ", yamlChanges.Select(s => $"\"{s}\"")); _ccm.ExecuteCcm($"updateconf {joinedChanges}"); } public void UpdateConfig(int nodeId, params string[] yamlChanges) { if (yamlChanges == null) return; var joinedChanges = string.Join(" ", yamlChanges.Select(s => $"\"{s}\"")); _ccm.ExecuteCcm($"node{nodeId} updateconf {joinedChanges}"); } } }
#region License and Terms // MoreLINQ - Extensions to LINQ to Objects // Copyright (c) 2010 Johannes Rudolph. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion namespace MoreLinq.Test { using System; using System.Collections; using System.Collections.Generic; using System.Data; using System.Linq.Expressions; using NUnit.Framework; [TestFixture] public class ToDataTableTest { class TestObject { public int KeyField; public Guid? ANullableGuidField; public string AString { get; } public decimal? ANullableDecimal { get; } public object Unreadable { set => throw new NotImplementedException(); } public object this[int index] { get => new object(); set { } } public TestObject(int key) { KeyField = key; ANullableGuidField = Guid.NewGuid(); ANullableDecimal = key / 3; AString = "ABCDEFGHIKKLMNOPQRSTUVWXYSZ"; } } readonly IReadOnlyCollection<TestObject> _testObjects; public ToDataTableTest() { _testObjects = Enumerable.Range(0, 3) .Select(i => new TestObject(i)) .ToArray(); } [Test] public void ToDataTableNullMemberExpressionMethod() { Expression<Func<TestObject, object>> expression = null; AssertThrowsArgument.Exception("expressions",() => _testObjects.ToDataTable<TestObject>(expression)); } [Test] public void ToDataTableTableWithWrongColumnNames() { var dt = new DataTable(); dt.Columns.Add("Test"); AssertThrowsArgument.Exception("table",() => _testObjects.ToDataTable(dt)); } [Test] public void ToDataTableTableWithWrongColumnDataType() { var dt = new DataTable(); dt.Columns.Add("AString", typeof(int)); AssertThrowsArgument.Exception("table",() => _testObjects.ToDataTable(dt, t=>t.AString)); } [Test] public void ToDataTableMemberExpressionMethod() { AssertThrowsArgument.Exception("lambda", () => _testObjects.ToDataTable(t => t.ToString())); } [Test] public void ToDataTableMemberExpressionNonMember() { AssertThrowsArgument.Exception("lambda", () => _testObjects.ToDataTable(t => t.ToString().Length)); } [Test] public void ToDataTableMemberExpressionIndexer() { AssertThrowsArgument.Exception("lambda",() => _testObjects.ToDataTable(t => t[0])); } [Test] public void ToDataTableSchemaInDeclarationOrder() { var dt = _testObjects.ToDataTable(); // Assert properties first, then fields, then in declaration order Assert.AreEqual("AString", dt.Columns[0].Caption); Assert.AreEqual(typeof(string), dt.Columns[0].DataType); Assert.AreEqual("ANullableDecimal", dt.Columns[1].Caption); Assert.AreEqual(typeof(decimal), dt.Columns[1].DataType); Assert.AreEqual("KeyField", dt.Columns[2].Caption); Assert.AreEqual(typeof(int), dt.Columns[2].DataType); Assert.AreEqual("ANullableGuidField", dt.Columns[3].Caption); Assert.AreEqual(typeof(Guid), dt.Columns[3].DataType); Assert.IsTrue(dt.Columns[3].AllowDBNull); Assert.AreEqual(4, dt.Columns.Count); } [Test] public void ToDataTableContainsAllElements() { var dt = _testObjects.ToDataTable(); Assert.AreEqual(_testObjects.Count, dt.Rows.Count); } [Test] public void ToDataTableWithExpression() { var dt = _testObjects.ToDataTable(t => t.AString); Assert.AreEqual("AString", dt.Columns[0].Caption); Assert.AreEqual(typeof(string), dt.Columns[0].DataType); Assert.AreEqual(1, dt.Columns.Count); } [Test] public void ToDataTableWithSchema() { var dt = new DataTable(); var columns = dt.Columns; columns.Add("Column1", typeof(int)); columns.Add("Value", typeof(string)); columns.Add("Column3", typeof(int)); columns.Add("Name", typeof(string)); var vars = Environment.GetEnvironmentVariables() .Cast<DictionaryEntry>() .ToArray(); vars.Select(e => new { Name = e.Key.ToString(), Value = e.Value.ToString() }) .ToDataTable(dt, e => e.Name, e => e.Value); var rows = dt.Rows.Cast<DataRow>().ToArray(); Assert.That(rows.Length, Is.EqualTo(vars.Length)); Assert.That(rows.Select(r => r["Name"]).ToArray(), Is.EqualTo(vars.Select(e => e.Key).ToArray())); Assert.That(rows.Select(r => r["Value"]).ToArray(), Is.EqualTo(vars.Select(e => e.Value).ToArray())); } struct Point { public static Point Empty = new Point(); public bool IsEmpty => X == 0 && Y == 0; public int X { get; } public int Y { get; } public Point(int x, int y) : this() { X = x; Y = y; } } [Test] public void ToDataTableIgnoresStaticMembers() { var points = new[] { new Point(12, 34) }.ToDataTable(); Assert.AreEqual(3, points.Columns.Count); DataColumn x, y, empty; Assert.NotNull(x = points.Columns["X"]); Assert.NotNull(y = points.Columns["Y"]); Assert.NotNull(empty = points.Columns["IsEmpty"]); var row = points.Rows.Cast<DataRow>().Single(); Assert.AreEqual(12, row[x]); Assert.AreEqual(34, row[y]); Assert.AreEqual(false, row[empty]); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using Xunit; namespace System.Linq.Expressions.Tests { public static class LambdaAddTests { #region Test methods [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddByteTest(bool useInterpreter) { byte[] values = new byte[] { 0, 1, byte.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddByte(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddDecimalTest(bool useInterpreter) { decimal[] values = new decimal[] { decimal.Zero, decimal.One, decimal.MinusOne, decimal.MinValue, decimal.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddDecimal(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddDoubleTest(bool useInterpreter) { double[] values = new double[] { 0, 1, -1, double.MinValue, double.MaxValue, double.Epsilon, double.NegativeInfinity, double.PositiveInfinity, double.NaN }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddDouble(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddFloatTest(bool useInterpreter) { float[] values = new float[] { 0, 1, -1, float.MinValue, float.MaxValue, float.Epsilon, float.NegativeInfinity, float.PositiveInfinity, float.NaN }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddFloat(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddIntTest(bool useInterpreter) { int[] values = new int[] { 0, 1, -1, int.MinValue, int.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddInt(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddLongTest(bool useInterpreter) { long[] values = new long[] { 0, 1, -1, long.MinValue, long.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddLong(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddShortTest(bool useInterpreter) { short[] values = new short[] { 0, 1, -1, short.MinValue, short.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddShort(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddUIntTest(bool useInterpreter) { uint[] values = new uint[] { 0, 1, uint.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddUInt(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddULongTest(bool useInterpreter) { ulong[] values = new ulong[] { 0, 1, ulong.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddULong(values[i], values[j], useInterpreter); } } } [Theory, ClassData(typeof(CompilationTypes))] public static void LambdaAddUShortTest(bool useInterpreter) { ushort[] values = new ushort[] { 0, 1, ushort.MaxValue }; for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { VerifyAddUShort(values[i], values[j], useInterpreter); } } } #endregion #region Test verifiers #region Verify byte private static void VerifyAddByte(byte a, byte b, bool useInterpreter) { byte expected = (byte)(a + b); ParameterExpression p0 = Expression.Parameter(typeof(int), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(int), "p1"); // verify with parameters supplied Expression<Func<int>> e1 = Expression.Lambda<Func<int>>( Expression.Invoke( Expression.Lambda<Func<int, int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant((int)a, typeof(int)), Expression.Constant((int)b, typeof(int)) }), Enumerable.Empty<ParameterExpression>()); Func<int> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, (byte)f1()); // verify with values passed to make parameters Expression<Func<int, int, Func<int>>> e2 = Expression.Lambda<Func<int, int, Func<int>>>( Expression.Lambda<Func<int>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<int, int, Func<int>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, (byte)f2(a, b)()); // verify with values directly passed Expression<Func<Func<int, int, int>>> e3 = Expression.Lambda<Func<Func<int, int, int>>>( Expression.Invoke( Expression.Lambda<Func<Func<int, int, int>>>( Expression.Lambda<Func<int, int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<int, int, int> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, (byte)f3(a, b)); // verify as a function generator Expression<Func<Func<int, int, int>>> e4 = Expression.Lambda<Func<Func<int, int, int>>>( Expression.Lambda<Func<int, int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<int, int, int>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, (byte)f4()(a, b)); // verify with currying Expression<Func<int, Func<int, int>>> e5 = Expression.Lambda<Func<int, Func<int, int>>>( Expression.Lambda<Func<int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<int, Func<int, int>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, (byte)f5(a)(b)); // verify with one parameter Expression<Func<Func<int, int>>> e6 = Expression.Lambda<Func<Func<int, int>>>( Expression.Invoke( Expression.Lambda<Func<int, Func<int, int>>>( Expression.Lambda<Func<int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant((int)a, typeof(int)) }), Enumerable.Empty<ParameterExpression>()); Func<int, int> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, (byte)f6(b)); } #endregion #region Verify decimal private static void VerifyAddDecimal(decimal a, decimal b, bool useInterpreter) { bool overflows; decimal expected; try { expected = a + b; overflows = false; } catch (OverflowException) { expected = 0; overflows = true; } ParameterExpression p0 = Expression.Parameter(typeof(decimal), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(decimal), "p1"); // verify with parameters supplied Expression<Func<decimal>> e1 = Expression.Lambda<Func<decimal>>( Expression.Invoke( Expression.Lambda<Func<decimal, decimal, decimal>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(decimal)), Expression.Constant(b, typeof(decimal)) }), Enumerable.Empty<ParameterExpression>()); Func<decimal> f1 = e1.Compile(useInterpreter); if (overflows) { Assert.Throws<OverflowException>(() => f1()); } else { Assert.Equal(expected, f1()); } // verify with values passed to make parameters Expression<Func<decimal, decimal, Func<decimal>>> e2 = Expression.Lambda<Func<decimal, decimal, Func<decimal>>>( Expression.Lambda<Func<decimal>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<decimal, decimal, Func<decimal>> f2 = e2.Compile(useInterpreter); if (overflows) { Assert.Throws<OverflowException>(() => f2(a, b)()); } else { Assert.Equal(expected, f2(a, b)()); } // verify with values directly passed Expression<Func<Func<decimal, decimal, decimal>>> e3 = Expression.Lambda<Func<Func<decimal, decimal, decimal>>>( Expression.Invoke( Expression.Lambda<Func<Func<decimal, decimal, decimal>>>( Expression.Lambda<Func<decimal, decimal, decimal>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<decimal, decimal, decimal> f3 = e3.Compile(useInterpreter)(); if (overflows) { Assert.Throws<OverflowException>(() => f3(a, b)); } else { Assert.Equal(expected, f3(a, b)); } // verify as a function generator Expression<Func<Func<decimal, decimal, decimal>>> e4 = Expression.Lambda<Func<Func<decimal, decimal, decimal>>>( Expression.Lambda<Func<decimal, decimal, decimal>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<decimal, decimal, decimal>> f4 = e4.Compile(useInterpreter); if (overflows) { Assert.Throws<OverflowException>(() => f4()(a, b)); } else { Assert.Equal(expected, f4()(a, b)); } // verify with currying Expression<Func<decimal, Func<decimal, decimal>>> e5 = Expression.Lambda<Func<decimal, Func<decimal, decimal>>>( Expression.Lambda<Func<decimal, decimal>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<decimal, Func<decimal, decimal>> f5 = e5.Compile(useInterpreter); if (overflows) { Assert.Throws<OverflowException>(() => f5(a)(b)); } else { Assert.Equal(expected, f5(a)(b)); } // verify with one parameter Expression<Func<Func<decimal, decimal>>> e6 = Expression.Lambda<Func<Func<decimal, decimal>>>( Expression.Invoke( Expression.Lambda<Func<decimal, Func<decimal, decimal>>>( Expression.Lambda<Func<decimal, decimal>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(decimal)) }), Enumerable.Empty<ParameterExpression>()); Func<decimal, decimal> f6 = e6.Compile(useInterpreter)(); if (overflows) { Assert.Throws<OverflowException>(() => f6(b)); } else { Assert.Equal(expected, f6(b)); } } #endregion #region Verify double private static void VerifyAddDouble(double a, double b, bool useInterpreter) { double expected = a + b; ParameterExpression p0 = Expression.Parameter(typeof(double), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(double), "p1"); // verify with parameters supplied Expression<Func<double>> e1 = Expression.Lambda<Func<double>>( Expression.Invoke( Expression.Lambda<Func<double, double, double>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(double)), Expression.Constant(b, typeof(double)) }), Enumerable.Empty<ParameterExpression>()); Func<double> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<double, double, Func<double>>> e2 = Expression.Lambda<Func<double, double, Func<double>>>( Expression.Lambda<Func<double>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<double, double, Func<double>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<double, double, double>>> e3 = Expression.Lambda<Func<Func<double, double, double>>>( Expression.Invoke( Expression.Lambda<Func<Func<double, double, double>>>( Expression.Lambda<Func<double, double, double>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<double, double, double> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<double, double, double>>> e4 = Expression.Lambda<Func<Func<double, double, double>>>( Expression.Lambda<Func<double, double, double>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<double, double, double>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<double, Func<double, double>>> e5 = Expression.Lambda<Func<double, Func<double, double>>>( Expression.Lambda<Func<double, double>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<double, Func<double, double>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<double, double>>> e6 = Expression.Lambda<Func<Func<double, double>>>( Expression.Invoke( Expression.Lambda<Func<double, Func<double, double>>>( Expression.Lambda<Func<double, double>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(double)) }), Enumerable.Empty<ParameterExpression>()); Func<double, double> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify float private static void VerifyAddFloat(float a, float b, bool useInterpreter) { float expected = a + b; ParameterExpression p0 = Expression.Parameter(typeof(float), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(float), "p1"); // verify with parameters supplied Expression<Func<float>> e1 = Expression.Lambda<Func<float>>( Expression.Invoke( Expression.Lambda<Func<float, float, float>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(float)), Expression.Constant(b, typeof(float)) }), Enumerable.Empty<ParameterExpression>()); Func<float> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<float, float, Func<float>>> e2 = Expression.Lambda<Func<float, float, Func<float>>>( Expression.Lambda<Func<float>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<float, float, Func<float>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<float, float, float>>> e3 = Expression.Lambda<Func<Func<float, float, float>>>( Expression.Invoke( Expression.Lambda<Func<Func<float, float, float>>>( Expression.Lambda<Func<float, float, float>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<float, float, float> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<float, float, float>>> e4 = Expression.Lambda<Func<Func<float, float, float>>>( Expression.Lambda<Func<float, float, float>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<float, float, float>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<float, Func<float, float>>> e5 = Expression.Lambda<Func<float, Func<float, float>>>( Expression.Lambda<Func<float, float>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<float, Func<float, float>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<float, float>>> e6 = Expression.Lambda<Func<Func<float, float>>>( Expression.Invoke( Expression.Lambda<Func<float, Func<float, float>>>( Expression.Lambda<Func<float, float>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(float)) }), Enumerable.Empty<ParameterExpression>()); Func<float, float> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify int private static void VerifyAddInt(int a, int b, bool useInterpreter) { int expected = a + b; ParameterExpression p0 = Expression.Parameter(typeof(int), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(int), "p1"); // verify with parameters supplied Expression<Func<int>> e1 = Expression.Lambda<Func<int>>( Expression.Invoke( Expression.Lambda<Func<int, int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(int)), Expression.Constant(b, typeof(int)) }), Enumerable.Empty<ParameterExpression>()); Func<int> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<int, int, Func<int>>> e2 = Expression.Lambda<Func<int, int, Func<int>>>( Expression.Lambda<Func<int>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<int, int, Func<int>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<int, int, int>>> e3 = Expression.Lambda<Func<Func<int, int, int>>>( Expression.Invoke( Expression.Lambda<Func<Func<int, int, int>>>( Expression.Lambda<Func<int, int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<int, int, int> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<int, int, int>>> e4 = Expression.Lambda<Func<Func<int, int, int>>>( Expression.Lambda<Func<int, int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<int, int, int>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<int, Func<int, int>>> e5 = Expression.Lambda<Func<int, Func<int, int>>>( Expression.Lambda<Func<int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<int, Func<int, int>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<int, int>>> e6 = Expression.Lambda<Func<Func<int, int>>>( Expression.Invoke( Expression.Lambda<Func<int, Func<int, int>>>( Expression.Lambda<Func<int, int>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(int)) }), Enumerable.Empty<ParameterExpression>()); Func<int, int> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify long private static void VerifyAddLong(long a, long b, bool useInterpreter) { long expected = a + b; ParameterExpression p0 = Expression.Parameter(typeof(long), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(long), "p1"); // verify with parameters supplied Expression<Func<long>> e1 = Expression.Lambda<Func<long>>( Expression.Invoke( Expression.Lambda<Func<long, long, long>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(long)), Expression.Constant(b, typeof(long)) }), Enumerable.Empty<ParameterExpression>()); Func<long> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<long, long, Func<long>>> e2 = Expression.Lambda<Func<long, long, Func<long>>>( Expression.Lambda<Func<long>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<long, long, Func<long>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<long, long, long>>> e3 = Expression.Lambda<Func<Func<long, long, long>>>( Expression.Invoke( Expression.Lambda<Func<Func<long, long, long>>>( Expression.Lambda<Func<long, long, long>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<long, long, long> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<long, long, long>>> e4 = Expression.Lambda<Func<Func<long, long, long>>>( Expression.Lambda<Func<long, long, long>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<long, long, long>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<long, Func<long, long>>> e5 = Expression.Lambda<Func<long, Func<long, long>>>( Expression.Lambda<Func<long, long>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<long, Func<long, long>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<long, long>>> e6 = Expression.Lambda<Func<Func<long, long>>>( Expression.Invoke( Expression.Lambda<Func<long, Func<long, long>>>( Expression.Lambda<Func<long, long>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(long)) }), Enumerable.Empty<ParameterExpression>()); Func<long, long> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify short private static void VerifyAddShort(short a, short b, bool useInterpreter) { short expected = (short)(a + b); ParameterExpression p0 = Expression.Parameter(typeof(short), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(short), "p1"); // verify with parameters supplied Expression<Func<short>> e1 = Expression.Lambda<Func<short>>( Expression.Invoke( Expression.Lambda<Func<short, short, short>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(short)), Expression.Constant(b, typeof(short)) }), Enumerable.Empty<ParameterExpression>()); Func<short> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<short, short, Func<short>>> e2 = Expression.Lambda<Func<short, short, Func<short>>>( Expression.Lambda<Func<short>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<short, short, Func<short>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<short, short, short>>> e3 = Expression.Lambda<Func<Func<short, short, short>>>( Expression.Invoke( Expression.Lambda<Func<Func<short, short, short>>>( Expression.Lambda<Func<short, short, short>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<short, short, short> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<short, short, short>>> e4 = Expression.Lambda<Func<Func<short, short, short>>>( Expression.Lambda<Func<short, short, short>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<short, short, short>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<short, Func<short, short>>> e5 = Expression.Lambda<Func<short, Func<short, short>>>( Expression.Lambda<Func<short, short>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<short, Func<short, short>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<short, short>>> e6 = Expression.Lambda<Func<Func<short, short>>>( Expression.Invoke( Expression.Lambda<Func<short, Func<short, short>>>( Expression.Lambda<Func<short, short>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(short)) }), Enumerable.Empty<ParameterExpression>()); Func<short, short> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify uint private static void VerifyAddUInt(uint a, uint b, bool useInterpreter) { uint expected = a + b; ParameterExpression p0 = Expression.Parameter(typeof(uint), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(uint), "p1"); // verify with parameters supplied Expression<Func<uint>> e1 = Expression.Lambda<Func<uint>>( Expression.Invoke( Expression.Lambda<Func<uint, uint, uint>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(uint)), Expression.Constant(b, typeof(uint)) }), Enumerable.Empty<ParameterExpression>()); Func<uint> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<uint, uint, Func<uint>>> e2 = Expression.Lambda<Func<uint, uint, Func<uint>>>( Expression.Lambda<Func<uint>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<uint, uint, Func<uint>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<uint, uint, uint>>> e3 = Expression.Lambda<Func<Func<uint, uint, uint>>>( Expression.Invoke( Expression.Lambda<Func<Func<uint, uint, uint>>>( Expression.Lambda<Func<uint, uint, uint>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<uint, uint, uint> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<uint, uint, uint>>> e4 = Expression.Lambda<Func<Func<uint, uint, uint>>>( Expression.Lambda<Func<uint, uint, uint>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<uint, uint, uint>> f4 = e4.Compile(useInterpreter); uint f4Result = default(uint); Exception f4Ex = null; try { f4Result = f4()(a, b); } catch (Exception ex) { f4Ex = ex; } // verify with currying Expression<Func<uint, Func<uint, uint>>> e5 = Expression.Lambda<Func<uint, Func<uint, uint>>>( Expression.Lambda<Func<uint, uint>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<uint, Func<uint, uint>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<uint, uint>>> e6 = Expression.Lambda<Func<Func<uint, uint>>>( Expression.Invoke( Expression.Lambda<Func<uint, Func<uint, uint>>>( Expression.Lambda<Func<uint, uint>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(uint)) }), Enumerable.Empty<ParameterExpression>()); Func<uint, uint> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify ulong private static void VerifyAddULong(ulong a, ulong b, bool useInterpreter) { ulong expected = a + b; ParameterExpression p0 = Expression.Parameter(typeof(ulong), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(ulong), "p1"); // verify with parameters supplied Expression<Func<ulong>> e1 = Expression.Lambda<Func<ulong>>( Expression.Invoke( Expression.Lambda<Func<ulong, ulong, ulong>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(ulong)), Expression.Constant(b, typeof(ulong)) }), Enumerable.Empty<ParameterExpression>()); Func<ulong> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<ulong, ulong, Func<ulong>>> e2 = Expression.Lambda<Func<ulong, ulong, Func<ulong>>>( Expression.Lambda<Func<ulong>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<ulong, ulong, Func<ulong>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<ulong, ulong, ulong>>> e3 = Expression.Lambda<Func<Func<ulong, ulong, ulong>>>( Expression.Invoke( Expression.Lambda<Func<Func<ulong, ulong, ulong>>>( Expression.Lambda<Func<ulong, ulong, ulong>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<ulong, ulong, ulong> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<ulong, ulong, ulong>>> e4 = Expression.Lambda<Func<Func<ulong, ulong, ulong>>>( Expression.Lambda<Func<ulong, ulong, ulong>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<ulong, ulong, ulong>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<ulong, Func<ulong, ulong>>> e5 = Expression.Lambda<Func<ulong, Func<ulong, ulong>>>( Expression.Lambda<Func<ulong, ulong>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<ulong, Func<ulong, ulong>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<ulong, ulong>>> e6 = Expression.Lambda<Func<Func<ulong, ulong>>>( Expression.Invoke( Expression.Lambda<Func<ulong, Func<ulong, ulong>>>( Expression.Lambda<Func<ulong, ulong>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(ulong)) }), Enumerable.Empty<ParameterExpression>()); Func<ulong, ulong> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #region Verify ushort private static void VerifyAddUShort(ushort a, ushort b, bool useInterpreter) { ushort expected = (ushort)(a + b); ParameterExpression p0 = Expression.Parameter(typeof(ushort), "p0"); ParameterExpression p1 = Expression.Parameter(typeof(ushort), "p1"); // verify with parameters supplied Expression<Func<ushort>> e1 = Expression.Lambda<Func<ushort>>( Expression.Invoke( Expression.Lambda<Func<ushort, ushort, ushort>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), new Expression[] { Expression.Constant(a, typeof(ushort)), Expression.Constant(b, typeof(ushort)) }), Enumerable.Empty<ParameterExpression>()); Func<ushort> f1 = e1.Compile(useInterpreter); Assert.Equal(expected, f1()); // verify with values passed to make parameters Expression<Func<ushort, ushort, Func<ushort>>> e2 = Expression.Lambda<Func<ushort, ushort, Func<ushort>>>( Expression.Lambda<Func<ushort>>( Expression.Add(p0, p1), Enumerable.Empty<ParameterExpression>()), new ParameterExpression[] { p0, p1 }); Func<ushort, ushort, Func<ushort>> f2 = e2.Compile(useInterpreter); Assert.Equal(expected, f2(a, b)()); // verify with values directly passed Expression<Func<Func<ushort, ushort, ushort>>> e3 = Expression.Lambda<Func<Func<ushort, ushort, ushort>>>( Expression.Invoke( Expression.Lambda<Func<Func<ushort, ushort, ushort>>>( Expression.Lambda<Func<ushort, ushort, ushort>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()), Enumerable.Empty<Expression>()), Enumerable.Empty<ParameterExpression>()); Func<ushort, ushort, ushort> f3 = e3.Compile(useInterpreter)(); Assert.Equal(expected, f3(a, b)); // verify as a function generator Expression<Func<Func<ushort, ushort, ushort>>> e4 = Expression.Lambda<Func<Func<ushort, ushort, ushort>>>( Expression.Lambda<Func<ushort, ushort, ushort>>( Expression.Add(p0, p1), new ParameterExpression[] { p0, p1 }), Enumerable.Empty<ParameterExpression>()); Func<Func<ushort, ushort, ushort>> f4 = e4.Compile(useInterpreter); Assert.Equal(expected, f4()(a, b)); // verify with currying Expression<Func<ushort, Func<ushort, ushort>>> e5 = Expression.Lambda<Func<ushort, Func<ushort, ushort>>>( Expression.Lambda<Func<ushort, ushort>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }); Func<ushort, Func<ushort, ushort>> f5 = e5.Compile(useInterpreter); Assert.Equal(expected, f5(a)(b)); // verify with one parameter Expression<Func<Func<ushort, ushort>>> e6 = Expression.Lambda<Func<Func<ushort, ushort>>>( Expression.Invoke( Expression.Lambda<Func<ushort, Func<ushort, ushort>>>( Expression.Lambda<Func<ushort, ushort>>( Expression.Add(p0, p1), new ParameterExpression[] { p1 }), new ParameterExpression[] { p0 }), new Expression[] { Expression.Constant(a, typeof(ushort)) }), Enumerable.Empty<ParameterExpression>()); Func<ushort, ushort> f6 = e6.Compile(useInterpreter)(); Assert.Equal(expected, f6(b)); } #endregion #endregion } }
using System; using System.Linq; using System.Data; using System.Management.Automation; using System.Data.Common; using System.Collections.Generic; using PowerShellDBDrive.DataModel.Oracle; using PowerShellDBDrive.DataModel; namespace PowerShellDBDrive.Drives { /// <summary> /// Oracle Database Implemention for DatabaseDriveInfo /// </summary> public class OracleDatabaseDriveInfo : DatabaseDriveInfo { #region SQL Queries #region Schema Queries private const string SELECT_SCHEMAS = "SELECT USER_ID, USERNAME, CREATED FROM ALL_USERS"; private const string SELECT_SCHEMA = "SELECT USER_ID, USERNAME, CREATED FROM ALL_USERS WHERE USERNAME = :schemaname"; private const string SELECT_SCHEMA_EXISTS = "SELECT 1 FROM ALL_USERS WHERE USERNAME = :schemaname"; private const string SELECT_SCHEMAS_NAMES = "SELECT USERNAME FROM ALL_USERS"; private const string SELECT_SCHEMAS_NAMES_REGEXP = "SELECT USERNAME FROM ALL_USERS WHERE REGEXP_LIKE(USERNAME, :regexp)"; #endregion Schema Queries #region Table Queries private const string SELECT_TABLES = @"SELECT OWNER , TABLE_NAME , TABLESPACE_NAME , CLUSTER_NAME , IOT_NAME , STATUS , PCT_FREE , PCT_USED , INI_TRANS , MAX_TRANS , INITIAL_EXTENT , NEXT_EXTENT , MIN_EXTENTS , MAX_EXTENTS , PCT_INCREASE , FREELISTS , FREELIST_GROUPS , LOGGING , BACKED_UP , NUM_ROWS , BLOCKS , EMPTY_BLOCKS , AVG_SPACE , CHAIN_CNT , AVG_ROW_LEN , AVG_SPACE_FREELIST_BLOCKS , NUM_FREELIST_BLOCKS , DEGREE , INSTANCES , CACHE , TABLE_LOCK , SAMPLE_SIZE , LAST_ANALYZED , PARTITIONED , IOT_TYPE , TEMPORARY , SECONDARY , NESTED , BUFFER_POOL , FLASH_CACHE , CELL_FLASH_CACHE , ROW_MOVEMENT , GLOBAL_STATS , USER_STATS , DURATION , SKIP_CORRUPT , MONITORING , CLUSTER_OWNER , DEPENDENCIES , COMPRESSION , COMPRESS_FOR , DROPPED , READ_ONLY , SEGMENT_CREATED , RESULT_CACHE FROM ALL_TABLES WHERE OWNER = :schemaname"; private const string SELECT_SINGLE_TABLE = @"SELECT OWNER , TABLE_NAME , TABLESPACE_NAME , CLUSTER_NAME , IOT_NAME , STATUS , PCT_FREE , PCT_USED , INI_TRANS , MAX_TRANS , INITIAL_EXTENT , NEXT_EXTENT , MIN_EXTENTS , MAX_EXTENTS , PCT_INCREASE , FREELISTS , FREELIST_GROUPS , LOGGING , BACKED_UP , NUM_ROWS , BLOCKS , EMPTY_BLOCKS , AVG_SPACE , CHAIN_CNT , AVG_ROW_LEN , AVG_SPACE_FREELIST_BLOCKS , NUM_FREELIST_BLOCKS , DEGREE , INSTANCES , CACHE , TABLE_LOCK , SAMPLE_SIZE , LAST_ANALYZED , PARTITIONED , IOT_TYPE , TEMPORARY , SECONDARY , NESTED , BUFFER_POOL , FLASH_CACHE , CELL_FLASH_CACHE , ROW_MOVEMENT , GLOBAL_STATS , USER_STATS , DURATION , SKIP_CORRUPT , MONITORING , CLUSTER_OWNER , DEPENDENCIES , COMPRESSION , COMPRESS_FOR , DROPPED , READ_ONLY , SEGMENT_CREATED , RESULT_CACHE FROM ALL_TABLES WHERE OWNER = :schemaname AND TABLE_NAME = :tablename "; private const string SELECT_COLUMNS = @"SELECT Owner , Table_Name , Column_Name , Data_Type , Data_Type_Mod , Data_Type_Owner , Data_Length , Data_Precision , Data_Scale , Nullable , Column_Id , Default_Length , Data_Default , Num_Distinct , Low_Value , High_Value , Density , Num_Nulls , Num_Buckets , Last_Analyzed , Sample_Size , Character_Set_Name , Char_Col_Decl_Length , Global_Stats , User_Stats , Avg_Col_Len , Char_Length , Char_Used , V80_Fmt_Image , Data_Upgraded , Histogram FROM ALL_TAB_COLUMNS WHERE OWNER = :schemaname AND TABLE_NAME = :tablename"; private const string SELECT_TABLES_NAMES = "SELECT TABLE_NAME FROM ALL_TABLES WHERE OWNER = :schemaname "; private const string SELECT_TABLES_NAMES_REGEXP = "SELECT TABLE_NAME FROM ALL_TABLES WHERE OWNER = :schemaname AND REGEXP_LIKE(TABLE_NAME, :regexp)"; private const string SELECT_TABLE_EXISTS = "SELECT 1 FROM ALL_TABLES WHERE OWNER = :schemaname AND TABLE_NAME = :tablename"; #endregion Table Queries #region View Queries private const string SELECT_VIEWS = "SELECT OWNER, VIEW_NAME, TEXT_LENGTH, TEXT, TYPE_TEXT_LENGTH, TYPE_TEXT, OID_TEXT_LENGTH, OID_TEXT, VIEW_TYPE_OWNER, VIEW_TYPE, SUPERVIEW_NAME FROM ALL_VIEWS WHERE OWNER = :schemaname"; private const string SELECT_VIEWS_NAME = "SELECT VIEW_NAME FROM ALL_VIEWS WHERE OWNER = :schemaname"; private const string SELECT_VIEWS_NAMES_REGEXP = "SELECT VIEW_NAME FROM ALL_VIEWS WHERE OWNER = :schemaname AND REGEXP_LIKE(TABLE_NAME, :regexp)"; private const string SELECT_VIEW_EXISTS = "SELECT 1 FROM ALL_VIEWS WHERE OWNER = :schemaname AND VIEW_NAME = :viewname"; #endregion View Queries #endregion SQL Queries public OracleDatabaseDriveInfo(PSDriveInfo driveInfo, DatabaseParameters parameters) : base(driveInfo, parameters) { } public override IEnumerable<IDatabaseSchemaInfo> GetSchemas() { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_SCHEMAS; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; using (DbDataReader reader = command.ExecuteReader()) { while (reader.Read()) { yield return new OracleDatabaseSchemaInfo((long)reader.GetInt64(reader.GetOrdinal("USER_ID")), reader.GetString(reader.GetOrdinal("USERNAME")), reader.GetDateTime(reader.GetOrdinal("CREATED"))); } } } } } public override IEnumerable<String> GetSchemasNames() { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_SCHEMAS_NAMES; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; using (DbDataReader reader = command.ExecuteReader()) { while (reader.Read()) { yield return reader["USERNAME"] as string; } } } } } public override IEnumerable<String> GetSchemasNames(string regexp) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_SCHEMAS_NAMES_REGEXP; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "regexp"; parameter.Value = regexp; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { while (reader.Read()) { yield return reader["USERNAME"] as string; } } } } } public override IDatabaseSchemaInfo GetSchema(string schemaName) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_SCHEMA; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "schemaname"; parameter.Value = schemaName; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { if (reader.Read()) { return new OracleDatabaseSchemaInfo((long)reader.GetInt64(reader.GetOrdinal("USER_ID")), reader.GetString(reader.GetOrdinal("USERNAME")), reader.GetDateTime(reader.GetOrdinal("CREATED"))); } } } return null; } } public override IEnumerable<ObjectType> GetSupportedObjectTypes(string schemaName) { foreach (ObjectType ot in Enum.GetValues(typeof(ObjectType))) { yield return ot; } } public override IEnumerable<IDatabaseViewInfo> GetViews(string schemaName) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_VIEWS; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "schemaname"; parameter.Value = schemaName; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { while (reader.Read()) { OracleDatabaseViewInfo dti = OracleDatabaseFactory.BuildDatabaseViewInfo(reader); yield return dti; } } } } } public override IEnumerable<string> GetViewsNames(string schemaName) { throw new NotImplementedException(); } public override IEnumerable<string> GetViewsNames(string schemaName, string viewName) { throw new NotImplementedException(); } public override IDatabaseViewInfo GetView(string schemaName, string viewName) { throw new NotImplementedException(); } public override IEnumerable<IDatabaseTableInfo> GetTables(string schemaName) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_TABLES; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "schemaname"; parameter.Value = schemaName; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { while (reader.Read()) { OracleDatabaseTableInfo dti = OracleDatabaseFactory.BuildDatabaseTableInfo(reader); dti.Columns = GetDatabaseColumnsInfo(schemaName, dti.TableName).ToArray(); yield return dti; } } } } } public override IEnumerable<String> GetTablesNames(string schemaName) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_TABLES_NAMES; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "schemaname"; parameter.Value = schemaName; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { while (reader.Read()) { yield return reader["TABLE_NAME"] as string; } } } } } public override IEnumerable<String> GetTablesNames(string schemaName, string tableName) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_TABLES_NAMES_REGEXP; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "schemaname"; parameter.Value = schemaName; command.Parameters.Add(parameter); parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "regexp"; parameter.Value = schemaName; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { while (reader.Read()) { yield return reader["TABLE_NAME"] as string; } } } } } private IEnumerable<IDatabaseColumnInfo> GetDatabaseColumnsInfo(string schemaName, string tableName) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_COLUMNS; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "schemaname"; parameter.Value = schemaName; command.Parameters.Add(parameter); parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "tablename"; parameter.Value = tableName; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { while (reader.Read()) { yield return OracleDatabaseFactory.BuildDatabaseColumnInfo(reader); } } } } } public override IDatabaseTableInfo GetTable(string schemaName, string tableName) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_SINGLE_TABLE; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "schemaname"; parameter.Value = schemaName; command.Parameters.Add(parameter); parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "tablename"; parameter.Value = tableName; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { if (reader.Read()) { OracleDatabaseTableInfo dti = OracleDatabaseFactory.BuildDatabaseTableInfo(reader); dti.Columns = GetDatabaseColumnsInfo(schemaName, dti.TableName).ToArray(); return dti; } return null; } } } } public override IEnumerable<PSObject> GetRows(string schemaName, string tableName, int maxResult) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = DatabaseUtils.GetSelectStringForTable(schemaName, tableName); command.CommandTimeout = Timeout; using (DbDataReader reader = command.ExecuteReader()) { PSObjectBuilder builder = new PSObjectBuilder(); while (reader.Read()) { if (maxResult > 0) { builder.NewInstance(); for (int i = 0; i < reader.FieldCount; i++) { builder.AddField(reader.GetName(i), reader.GetValue(i)); } yield return builder.Build(); } else { yield break; } maxResult--; } } } } } public override bool IsSchemaExist(string schemaName) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_SCHEMA_EXISTS; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "schemaname"; parameter.Value = schemaName; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { if (reader.Read()) { return true; } } } return false; } } public override bool IsObjectExist(string schemaName, ObjectType objectType, string[] objectPath) { if (objectType == ObjectType.TABLE) { return IsTableExist(schemaName, objectPath[0]); } return false; } private bool IsTableExist(string schemaName, string tableName) { using (DbConnection connection = GetConnection()) { connection.Open(); using (DbCommand command = connection.CreateCommand()) { command.CommandText = SELECT_TABLE_EXISTS; command.CommandTimeout = Timeout; command.CommandType = CommandType.Text; DbParameter parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "schemaname"; parameter.Value = schemaName; command.Parameters.Add(parameter); parameter = command.CreateParameter(); parameter.DbType = DbType.String; parameter.ParameterName = "tablename"; parameter.Value = tableName; command.Parameters.Add(parameter); using (DbDataReader reader = command.ExecuteReader()) { if (reader.Read()) { return true; } return false; } } } } } }
using UnityEngine; //using Windows.Kinect; using System.Collections; using System.Collections.Generic; /// <summary> /// KinectGestures is utility class that processes programmatic Kinect gestures /// </summary> public class KinectGestures { /// <summary> /// This interface needs to be implemented by all Kinect gesture listeners /// </summary> public interface GestureListenerInterface { /// <summary> /// Invoked when a new user is detected. Here you can start gesture tracking by invoking KinectManager.DetectGesture()-function. /// </summary> /// <param name="userId">User ID</param> /// <param name="userIndex">User index</param> void UserDetected(long userId, int userIndex); /// <summary> /// Invoked when a user gets lost. All tracked gestures for this user are cleared automatically. /// </summary> /// <param name="userId">User ID</param> /// <param name="userIndex">User index</param> void UserLost(long userId, int userIndex); /// <summary> /// Invoked when a gesture is in progress. /// </summary> /// <param name="userId">User ID</param> /// <param name="userIndex">User index</param> /// <param name="gesture">Gesture type</param> /// <param name="progress">Gesture progress [0..1]</param> /// <param name="joint">Joint type</param> /// <param name="screenPos">Normalized viewport position</param> void GestureInProgress(long userId, int userIndex, Gestures gesture, float progress, KinectInterop.JointType joint, Vector3 screenPos); /// <summary> /// Invoked if a gesture is completed. /// </summary> /// <returns><c>true</c>, if the gesture detection must be restarted, <c>false</c> otherwise.</returns> /// <param name="userId">User ID</param> /// <param name="userIndex">User index</param> /// <param name="gesture">Gesture type</param> /// <param name="joint">Joint type</param> /// <param name="screenPos">Normalized viewport position</param> bool GestureCompleted(long userId, int userIndex, Gestures gesture, KinectInterop.JointType joint, Vector3 screenPos); /// <summary> /// Invoked if a gesture is cancelled. /// </summary> /// <returns><c>true</c>, if the gesture detection must be retarted, <c>false</c> otherwise.</returns> /// <param name="userId">User ID</param> /// <param name="userIndex">User index</param> /// <param name="gesture">Gesture type</param> /// <param name="joint">Joint type</param> bool GestureCancelled(long userId, int userIndex, Gestures gesture, KinectInterop.JointType joint); } /// <summary> /// The gesture types. /// </summary> public enum Gestures { None = 0, RaiseRightHand, RaiseLeftHand, Psi, Tpose, Stop, Wave, // Click, SwipeLeft, SwipeRight, SwipeUp, SwipeDown, // RightHandCursor, // LeftHandCursor, ZoomIn, ZoomOut, Wheel, Jump, Squat, Push, Pull, ShoulderLeftFront, ShoulderRightFront, LeanLeft, LeanRight, KickLeft, KickRight } /// <summary> /// Gesture data structure. /// </summary> public struct GestureData { public long userId; public Gestures gesture; public int state; public float timestamp; public int joint; public Vector3 jointPos; public Vector3 screenPos; public float tagFloat; public Vector3 tagVector; public Vector3 tagVector2; public float progress; public bool complete; public bool cancelled; public List<Gestures> checkForGestures; public float startTrackingAtTime; } // Gesture related constants, variables and functions private static int leftHandIndex; private static int rightHandIndex; private static int leftElbowIndex; private static int rightElbowIndex; private static int leftShoulderIndex; private static int rightShoulderIndex; private static int hipCenterIndex; private static int shoulderCenterIndex; private static int leftHipIndex; private static int rightHipIndex; private static int leftAnkleIndex; private static int rightAnkleIndex; /// <summary> /// Gets the list of gesture joint indexes. /// </summary> /// <returns>The needed joint indexes.</returns> /// <param name="manager">The KinectManager instance</param> public static int[] GetNeededJointIndexes(KinectManager manager) { leftHandIndex = manager.GetJointIndex(KinectInterop.JointType.HandLeft); rightHandIndex = manager.GetJointIndex(KinectInterop.JointType.HandRight); leftElbowIndex = manager.GetJointIndex(KinectInterop.JointType.ElbowLeft); rightElbowIndex = manager.GetJointIndex(KinectInterop.JointType.ElbowRight); leftShoulderIndex = manager.GetJointIndex(KinectInterop.JointType.ShoulderLeft); rightShoulderIndex = manager.GetJointIndex(KinectInterop.JointType.ShoulderRight); hipCenterIndex = manager.GetJointIndex(KinectInterop.JointType.SpineBase); shoulderCenterIndex = manager.GetJointIndex(KinectInterop.JointType.SpineShoulder); leftHipIndex = manager.GetJointIndex(KinectInterop.JointType.HipLeft); rightHipIndex = manager.GetJointIndex(KinectInterop.JointType.HipRight); leftAnkleIndex = manager.GetJointIndex(KinectInterop.JointType.AnkleLeft); rightAnkleIndex = manager.GetJointIndex(KinectInterop.JointType.AnkleRight); int[] neededJointIndexes = { leftHandIndex, rightHandIndex, leftElbowIndex, rightElbowIndex, leftShoulderIndex, rightShoulderIndex, hipCenterIndex, shoulderCenterIndex, leftHipIndex, rightHipIndex, leftAnkleIndex, rightAnkleIndex }; return neededJointIndexes; } private static void SetGestureJoint(ref GestureData gestureData, float timestamp, int joint, Vector3 jointPos) { gestureData.joint = joint; gestureData.jointPos = jointPos; gestureData.timestamp = timestamp; gestureData.state++; } private static void SetGestureCancelled(ref GestureData gestureData) { gestureData.state = 0; gestureData.progress = 0f; gestureData.cancelled = true; } private static void CheckPoseComplete(ref GestureData gestureData, float timestamp, Vector3 jointPos, bool isInPose, float durationToComplete) { if(isInPose) { float timeLeft = timestamp - gestureData.timestamp; gestureData.progress = durationToComplete > 0f ? Mathf.Clamp01(timeLeft / durationToComplete) : 1.0f; if(timeLeft >= durationToComplete) { gestureData.timestamp = timestamp; gestureData.jointPos = jointPos; gestureData.state++; gestureData.complete = true; } } else { SetGestureCancelled(ref gestureData); } } private static void SetScreenPos(long userId, ref GestureData gestureData, ref Vector3[] jointsPos, ref bool[] jointsTracked) { Vector3 handPos = jointsPos[rightHandIndex]; // Vector3 elbowPos = jointsPos[rightElbowIndex]; // Vector3 shoulderPos = jointsPos[rightShoulderIndex]; bool calculateCoords = false; if(gestureData.joint == rightHandIndex) { if(jointsTracked[rightHandIndex] /**&& jointsTracked[rightElbowIndex] && jointsTracked[rightShoulderIndex]*/) { calculateCoords = true; } } else if(gestureData.joint == leftHandIndex) { if(jointsTracked[leftHandIndex] /**&& jointsTracked[leftElbowIndex] && jointsTracked[leftShoulderIndex]*/) { handPos = jointsPos[leftHandIndex]; // elbowPos = jointsPos[leftElbowIndex]; // shoulderPos = jointsPos[leftShoulderIndex]; calculateCoords = true; } } if(calculateCoords) { // if(gestureData.tagFloat == 0f || gestureData.userId != userId) // { // // get length from shoulder to hand (screen range) // Vector3 shoulderToElbow = elbowPos - shoulderPos; // Vector3 elbowToHand = handPos - elbowPos; // gestureData.tagFloat = (shoulderToElbow.magnitude + elbowToHand.magnitude); // } if(jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftShoulderIndex] && jointsTracked[rightShoulderIndex]) { Vector3 shoulderToHips = jointsPos[shoulderCenterIndex] - jointsPos[hipCenterIndex]; Vector3 rightToLeft = jointsPos[rightShoulderIndex] - jointsPos[leftShoulderIndex]; gestureData.tagVector2.x = rightToLeft.x; // * 1.2f; gestureData.tagVector2.y = shoulderToHips.y; // * 1.2f; if(gestureData.joint == rightHandIndex) { gestureData.tagVector.x = jointsPos[rightShoulderIndex].x - gestureData.tagVector2.x / 2; gestureData.tagVector.y = jointsPos[hipCenterIndex].y; } else { gestureData.tagVector.x = jointsPos[leftShoulderIndex].x - gestureData.tagVector2.x / 2; gestureData.tagVector.y = jointsPos[hipCenterIndex].y; } } // Vector3 shoulderToHand = handPos - shoulderPos; // gestureData.screenPos.x = Mathf.Clamp01((gestureData.tagFloat / 2 + shoulderToHand.x) / gestureData.tagFloat); // gestureData.screenPos.y = Mathf.Clamp01((gestureData.tagFloat / 2 + shoulderToHand.y) / gestureData.tagFloat); if(gestureData.tagVector2.x != 0 && gestureData.tagVector2.y != 0) { Vector3 relHandPos = handPos - gestureData.tagVector; gestureData.screenPos.x = Mathf.Clamp01(relHandPos.x / gestureData.tagVector2.x); gestureData.screenPos.y = Mathf.Clamp01(relHandPos.y / gestureData.tagVector2.y); } //Debug.Log(string.Format("{0} - S: {1}, H: {2}, SH: {3}, L : {4}", gestureData.gesture, shoulderPos, handPos, shoulderToHand, gestureData.tagFloat)); } } private static void SetZoomFactor(long userId, ref GestureData gestureData, float initialZoom, ref Vector3[] jointsPos, ref bool[] jointsTracked) { Vector3 vectorZooming = jointsPos[rightHandIndex] - jointsPos[leftHandIndex]; if(gestureData.tagFloat == 0f || gestureData.userId != userId) { gestureData.tagFloat = 0.5f; // this is 100% } float distZooming = vectorZooming.magnitude; gestureData.screenPos.z = initialZoom + (distZooming / gestureData.tagFloat); } private static void SetWheelRotation(long userId, ref GestureData gestureData, Vector3 initialPos, Vector3 currentPos) { float angle = Vector3.Angle(initialPos, currentPos) * Mathf.Sign(currentPos.y - initialPos.y); gestureData.screenPos.z = angle; } // estimate the next state and completeness of the gesture /// <summary> /// estimate the state and progress of the given gesture. /// </summary> /// <param name="userId">User ID</param> /// <param name="gestureData">Gesture-data structure</param> /// <param name="timestamp">Current time</param> /// <param name="jointsPos">Joints-position array</param> /// <param name="jointsTracked">Joints-tracked array</param> public static void CheckForGesture(long userId, ref GestureData gestureData, float timestamp, ref Vector3[] jointsPos, ref bool[] jointsTracked) { if(gestureData.complete) return; float bandSize = (jointsPos[shoulderCenterIndex].y - jointsPos[hipCenterIndex].y); float gestureTop = jointsPos[shoulderCenterIndex].y + bandSize * 1.2f / 3f; float gestureBottom = jointsPos[shoulderCenterIndex].y - bandSize * 1.8f / 3f; float gestureRight = jointsPos[rightHipIndex].x; float gestureLeft = jointsPos[leftHipIndex].x; switch(gestureData.gesture) { // check for RaiseRightHand case Gestures.RaiseRightHand: switch(gestureData.state) { case 0: // gesture detection if(jointsTracked[rightHandIndex] && jointsTracked[leftHandIndex] && jointsTracked[leftShoulderIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftShoulderIndex].y) > 0.1f && (jointsPos[leftHandIndex].y - jointsPos[leftShoulderIndex].y) < 0f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); } break; case 1: // gesture complete bool isInPose = jointsTracked[rightHandIndex] && jointsTracked[leftHandIndex] && jointsTracked[leftShoulderIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftShoulderIndex].y) > 0.1f && (jointsPos[leftHandIndex].y - jointsPos[leftShoulderIndex].y) < 0f; Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, KinectInterop.Constants.PoseCompleteDuration); break; } break; // check for RaiseLeftHand case Gestures.RaiseLeftHand: switch(gestureData.state) { case 0: // gesture detection if(jointsTracked[leftHandIndex] && jointsTracked[rightHandIndex] && jointsTracked[rightShoulderIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightShoulderIndex].y) > 0.1f && (jointsPos[rightHandIndex].y - jointsPos[rightShoulderIndex].y) < 0f) { SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); } break; case 1: // gesture complete bool isInPose = jointsTracked[leftHandIndex] && jointsTracked[rightHandIndex] && jointsTracked[rightShoulderIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightShoulderIndex].y) > 0.1f && (jointsPos[rightHandIndex].y - jointsPos[rightShoulderIndex].y) < 0f; Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, KinectInterop.Constants.PoseCompleteDuration); break; } break; // check for Psi case Gestures.Psi: switch(gestureData.state) { case 0: // gesture detection if(jointsTracked[rightHandIndex] && jointsTracked[leftHandIndex] && jointsTracked[shoulderCenterIndex] && (jointsPos[rightHandIndex].y - jointsPos[shoulderCenterIndex].y) > 0.1f && (jointsPos[leftHandIndex].y - jointsPos[shoulderCenterIndex].y) > 0.1f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); } break; case 1: // gesture complete bool isInPose = jointsTracked[rightHandIndex] && jointsTracked[leftHandIndex] && jointsTracked[shoulderCenterIndex] && (jointsPos[rightHandIndex].y - jointsPos[shoulderCenterIndex].y) > 0.1f && (jointsPos[leftHandIndex].y - jointsPos[shoulderCenterIndex].y) > 0.1f; Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, KinectInterop.Constants.PoseCompleteDuration); break; } break; // check for Tpose case Gestures.Tpose: switch(gestureData.state) { case 0: // gesture detection if(jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && jointsTracked[rightShoulderIndex] && Mathf.Abs(jointsPos[rightElbowIndex].y - jointsPos[rightShoulderIndex].y) < 0.1f && // 0.07f Mathf.Abs(jointsPos[rightHandIndex].y - jointsPos[rightShoulderIndex].y) < 0.1f && // 0.7f jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && jointsTracked[leftShoulderIndex] && Mathf.Abs(jointsPos[leftElbowIndex].y - jointsPos[leftShoulderIndex].y) < 0.1f && Mathf.Abs(jointsPos[leftHandIndex].y - jointsPos[leftShoulderIndex].y) < 0.1f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); } break; case 1: // gesture complete bool isInPose = jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && jointsTracked[rightShoulderIndex] && Mathf.Abs(jointsPos[rightElbowIndex].y - jointsPos[rightShoulderIndex].y) < 0.1f && // 0.7f Mathf.Abs(jointsPos[rightHandIndex].y - jointsPos[rightShoulderIndex].y) < 0.1f && // 0.7f jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && jointsTracked[leftShoulderIndex] && Mathf.Abs(jointsPos[leftElbowIndex].y - jointsPos[leftShoulderIndex].y) < 0.1f && Mathf.Abs(jointsPos[leftHandIndex].y - jointsPos[leftShoulderIndex].y) < 0.1f; Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, KinectInterop.Constants.PoseCompleteDuration); break; } break; // check for Stop case Gestures.Stop: switch(gestureData.state) { case 0: // gesture detection if(jointsTracked[rightHandIndex] && jointsTracked[rightHipIndex] && (jointsPos[rightHandIndex].y - jointsPos[rightHipIndex].y) < 0.2f && (jointsPos[rightHandIndex].x - jointsPos[rightHipIndex].x) >= 0.4f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); } else if(jointsTracked[leftHandIndex] && jointsTracked[leftHipIndex] && (jointsPos[leftHandIndex].y - jointsPos[leftHipIndex].y) < 0.2f && (jointsPos[leftHandIndex].x - jointsPos[leftHipIndex].x) <= -0.4f) { SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); } break; case 1: // gesture complete bool isInPose = (gestureData.joint == rightHandIndex) ? (jointsTracked[rightHandIndex] && jointsTracked[rightHipIndex] && (jointsPos[rightHandIndex].y - jointsPos[rightHipIndex].y) < 0.2f && (jointsPos[rightHandIndex].x - jointsPos[rightHipIndex].x) >= 0.4f) : (jointsTracked[leftHandIndex] && jointsTracked[leftHipIndex] && (jointsPos[leftHandIndex].y - jointsPos[leftHipIndex].y) < 0.2f && (jointsPos[leftHandIndex].x - jointsPos[leftHipIndex].x) <= -0.4f); Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, KinectInterop.Constants.PoseCompleteDuration); break; } break; // check for Wave case Gestures.Wave: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && (jointsPos[rightHandIndex].y - jointsPos[rightElbowIndex].y) > 0.1f && (jointsPos[rightHandIndex].x - jointsPos[rightElbowIndex].x) > 0.05f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); gestureData.progress = 0.3f; } else if(jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && (jointsPos[leftHandIndex].y - jointsPos[leftElbowIndex].y) > 0.1f && (jointsPos[leftHandIndex].x - jointsPos[leftElbowIndex].x) < -0.05f) { SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); gestureData.progress = 0.3f; } break; case 1: // gesture - phase 2 if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = gestureData.joint == rightHandIndex ? jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && (jointsPos[rightHandIndex].y - jointsPos[rightElbowIndex].y) > 0.1f && (jointsPos[rightHandIndex].x - jointsPos[rightElbowIndex].x) < -0.05f : jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && (jointsPos[leftHandIndex].y - jointsPos[leftElbowIndex].y) > 0.1f && (jointsPos[leftHandIndex].x - jointsPos[leftElbowIndex].x) > 0.05f; if(isInPose) { gestureData.timestamp = timestamp; gestureData.state++; gestureData.progress = 0.7f; } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; case 2: // gesture phase 3 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = gestureData.joint == rightHandIndex ? jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && (jointsPos[rightHandIndex].y - jointsPos[rightElbowIndex].y) > 0.1f && (jointsPos[rightHandIndex].x - jointsPos[rightElbowIndex].x) > 0.05f : jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && (jointsPos[leftHandIndex].y - jointsPos[leftElbowIndex].y) > 0.1f && (jointsPos[leftHandIndex].x - jointsPos[leftElbowIndex].x) < -0.05f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // // check for Click // case Gestures.Click: // switch(gestureData.state) // { // case 0: // gesture detection - phase 1 // if(jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && // (jointsPos[rightHandIndex].y - jointsPos[rightElbowIndex].y) > -0.1f) // { // SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); // gestureData.progress = 0.3f; // // // set screen position at the start, because this is the most accurate click position // SetScreenPos(userId, ref gestureData, ref jointsPos, ref jointsTracked); // } // else if(jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && // (jointsPos[leftHandIndex].y - jointsPos[leftElbowIndex].y) > -0.1f) // { // SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); // gestureData.progress = 0.3f; // // // set screen position at the start, because this is the most accurate click position // SetScreenPos(userId, ref gestureData, ref jointsPos, ref jointsTracked); // } // break; // // case 1: // gesture - phase 2 //// if((timestamp - gestureData.timestamp) < 1.0f) //// { //// bool isInPose = gestureData.joint == rightHandIndex ? //// jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && //// //(jointsPos[rightHandIndex].y - jointsPos[rightElbowIndex].y) > -0.1f && //// Mathf.Abs(jointsPos[rightHandIndex].x - gestureData.jointPos.x) < 0.08f && //// (jointsPos[rightHandIndex].z - gestureData.jointPos.z) < -0.05f : //// jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && //// //(jointsPos[leftHandIndex].y - jointsPos[leftElbowIndex].y) > -0.1f && //// Mathf.Abs(jointsPos[leftHandIndex].x - gestureData.jointPos.x) < 0.08f && //// (jointsPos[leftHandIndex].z - gestureData.jointPos.z) < -0.05f; //// //// if(isInPose) //// { //// gestureData.timestamp = timestamp; //// gestureData.jointPos = jointsPos[gestureData.joint]; //// gestureData.state++; //// gestureData.progress = 0.7f; //// } //// else //// { //// // check for stay-in-place //// Vector3 distVector = jointsPos[gestureData.joint] - gestureData.jointPos; //// isInPose = distVector.magnitude < 0.05f; //// //// Vector3 jointPos = jointsPos[gestureData.joint]; //// CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, Constants.ClickStayDuration); //// } //// } //// else // { // // check for stay-in-place // Vector3 distVector = jointsPos[gestureData.joint] - gestureData.jointPos; // bool isInPose = distVector.magnitude < 0.05f; // // Vector3 jointPos = jointsPos[gestureData.joint]; // CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, KinectInterop.Constants.ClickStayDuration); //// SetGestureCancelled(gestureData); // } // break; // //// case 2: // gesture phase 3 = complete //// if((timestamp - gestureData.timestamp) < 1.0f) //// { //// bool isInPose = gestureData.joint == rightHandIndex ? //// jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && //// //(jointsPos[rightHandIndex].y - jointsPos[rightElbowIndex].y) > -0.1f && //// Mathf.Abs(jointsPos[rightHandIndex].x - gestureData.jointPos.x) < 0.08f && //// (jointsPos[rightHandIndex].z - gestureData.jointPos.z) > 0.05f : //// jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && //// //(jointsPos[leftHandIndex].y - jointsPos[leftElbowIndex].y) > -0.1f && //// Mathf.Abs(jointsPos[leftHandIndex].x - gestureData.jointPos.x) < 0.08f && //// (jointsPos[leftHandIndex].z - gestureData.jointPos.z) > 0.05f; //// //// if(isInPose) //// { //// Vector3 jointPos = jointsPos[gestureData.joint]; //// CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); //// } //// } //// else //// { //// // cancel the gesture //// SetGestureCancelled(ref gestureData); //// } //// break; // } // break; // check for SwipeLeft case Gestures.SwipeLeft: switch(gestureData.state) { case 0: // gesture detection - phase 1 // if(jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && // (jointsPos[rightHandIndex].y - jointsPos[rightElbowIndex].y) > -0.05f && // (jointsPos[rightHandIndex].x - jointsPos[rightElbowIndex].x) > 0f) // { // SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); // gestureData.progress = 0.5f; // } if(jointsTracked[rightHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[rightHandIndex].y >= gestureBottom && jointsPos[rightHandIndex].y <= gestureTop && jointsPos[rightHandIndex].x <= gestureRight && jointsPos[rightHandIndex].x > gestureLeft) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); gestureData.progress = 0.1f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { // bool isInPose = jointsTracked[rightHandIndex] && jointsTracked[rightElbowIndex] && // Mathf.Abs(jointsPos[rightHandIndex].y - jointsPos[rightElbowIndex].y) < 0.1f && // Mathf.Abs(jointsPos[rightHandIndex].y - gestureData.jointPos.y) < 0.08f && // (jointsPos[rightHandIndex].x - gestureData.jointPos.x) < -0.15f; // // if(isInPose) // { // Vector3 jointPos = jointsPos[gestureData.joint]; // CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); // } bool isInPose = jointsTracked[rightHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[rightHandIndex].y >= gestureBottom && jointsPos[rightHandIndex].y <= gestureTop && jointsPos[rightHandIndex].x <= gestureLeft; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } else if(jointsPos[rightHandIndex].x <= gestureRight) { float gestureSize = gestureRight - gestureLeft; gestureData.progress = gestureSize > 0.01f ? (gestureRight - jointsPos[rightHandIndex].x) / gestureSize : 0f; } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for SwipeRight case Gestures.SwipeRight: switch(gestureData.state) { case 0: // gesture detection - phase 1 // if(jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && // (jointsPos[leftHandIndex].y - jointsPos[leftElbowIndex].y) > -0.05f && // (jointsPos[leftHandIndex].x - jointsPos[leftElbowIndex].x) < 0f) // { // SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); // gestureData.progress = 0.5f; // } if(jointsTracked[leftHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[leftHandIndex].y >= gestureBottom && jointsPos[leftHandIndex].y <= gestureTop && jointsPos[leftHandIndex].x >= gestureLeft && jointsPos[leftHandIndex].x < gestureRight) { SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); gestureData.progress = 0.1f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { // bool isInPose = jointsTracked[leftHandIndex] && jointsTracked[leftElbowIndex] && // Mathf.Abs(jointsPos[leftHandIndex].y - jointsPos[leftElbowIndex].y) < 0.1f && // Mathf.Abs(jointsPos[leftHandIndex].y - gestureData.jointPos.y) < 0.08f && // (jointsPos[leftHandIndex].x - gestureData.jointPos.x) > 0.15f; // // if(isInPose) // { // Vector3 jointPos = jointsPos[gestureData.joint]; // CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); // } bool isInPose = jointsTracked[leftHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[leftHandIndex].y >= gestureBottom && jointsPos[leftHandIndex].y <= gestureTop && jointsPos[leftHandIndex].x >= gestureRight; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } else if(jointsPos[leftHandIndex].x >= gestureLeft) { float gestureSize = gestureRight - gestureLeft; gestureData.progress = gestureSize > 0.01f ? (jointsPos[leftHandIndex].x - gestureLeft) / gestureSize : 0f; } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for SwipeUp case Gestures.SwipeUp: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[rightHandIndex] && jointsTracked[leftElbowIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftElbowIndex].y) < -0.0f && (jointsPos[rightHandIndex].y - jointsPos[leftElbowIndex].y) > -0.15f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); gestureData.progress = 0.5f; } else if(jointsTracked[leftHandIndex] && jointsTracked[rightElbowIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightElbowIndex].y) < -0.0f && (jointsPos[leftHandIndex].y - jointsPos[rightElbowIndex].y) > -0.15f) { SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = gestureData.joint == rightHandIndex ? jointsTracked[rightHandIndex] && jointsTracked[leftShoulderIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftShoulderIndex].y) > 0.05f && Mathf.Abs(jointsPos[rightHandIndex].x - gestureData.jointPos.x) <= 0.1f : jointsTracked[leftHandIndex] && jointsTracked[rightShoulderIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightShoulderIndex].y) > 0.05f && Mathf.Abs(jointsPos[leftHandIndex].x - gestureData.jointPos.x) <= 0.1f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for SwipeDown case Gestures.SwipeDown: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[rightHandIndex] && jointsTracked[leftShoulderIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftShoulderIndex].y) >= 0.05f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); gestureData.progress = 0.5f; } else if(jointsTracked[leftHandIndex] && jointsTracked[rightShoulderIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightShoulderIndex].y) >= 0.05f) { SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = gestureData.joint == rightHandIndex ? jointsTracked[rightHandIndex] && jointsTracked[leftElbowIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftElbowIndex].y) < -0.15f && Mathf.Abs(jointsPos[rightHandIndex].x - gestureData.jointPos.x) <= 0.1f : jointsTracked[leftHandIndex] && jointsTracked[rightElbowIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightElbowIndex].y) < -0.15f && Mathf.Abs(jointsPos[leftHandIndex].x - gestureData.jointPos.x) <= 0.1f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // // check for RightHandCursor // case Gestures.RightHandCursor: // switch(gestureData.state) // { // case 0: // gesture detection - phase 1 (perpetual) // if(jointsTracked[rightHandIndex] && jointsTracked[rightHipIndex] && // //(jointsPos[rightHandIndex].y - jointsPos[rightHipIndex].y) > -0.1f) // (jointsPos[rightHandIndex].y - jointsPos[hipCenterIndex].y) >= 0f) // { // gestureData.joint = rightHandIndex; // gestureData.timestamp = timestamp; // gestureData.jointPos = jointsPos[rightHandIndex]; // // SetScreenPos(userId, ref gestureData, ref jointsPos, ref jointsTracked); // gestureData.progress = 0.7f; // } // else // { // // cancel the gesture // //SetGestureCancelled(ref gestureData); // gestureData.progress = 0f; // } // break; // // } // break; // // // check for LeftHandCursor // case Gestures.LeftHandCursor: // switch(gestureData.state) // { // case 0: // gesture detection - phase 1 (perpetual) // if(jointsTracked[leftHandIndex] && jointsTracked[leftHipIndex] && // //(jointsPos[leftHandIndex].y - jointsPos[leftHipIndex].y) > -0.1f) // (jointsPos[leftHandIndex].y - jointsPos[hipCenterIndex].y) >= 0f) // { // gestureData.joint = leftHandIndex; // gestureData.timestamp = timestamp; // gestureData.jointPos = jointsPos[leftHandIndex]; // // SetScreenPos(userId, ref gestureData, ref jointsPos, ref jointsTracked); // gestureData.progress = 0.7f; // } // else // { // // cancel the gesture // //SetGestureCancelled(ref gestureData); // gestureData.progress = 0f; // } // break; // // } // break; // check for ZoomIn case Gestures.ZoomIn: Vector3 vectorZoomOut = (Vector3)jointsPos[rightHandIndex] - jointsPos[leftHandIndex]; float distZoomOut = vectorZoomOut.magnitude; switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[leftHandIndex] && jointsTracked[rightHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[leftHandIndex].y >= gestureBottom && jointsPos[leftHandIndex].y <= gestureTop && jointsPos[rightHandIndex].y >= gestureBottom && jointsPos[rightHandIndex].y <= gestureTop && distZoomOut < 0.3f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); gestureData.tagVector = Vector3.right; gestureData.tagFloat = 0f; gestureData.progress = 0.3f; } break; case 1: // gesture phase 2 = zooming if((timestamp - gestureData.timestamp) < 1.0f) { float angleZoomOut = Vector3.Angle(gestureData.tagVector, vectorZoomOut) * Mathf.Sign(vectorZoomOut.y - gestureData.tagVector.y); bool isInPose = jointsTracked[leftHandIndex] && jointsTracked[rightHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[leftHandIndex].y >= gestureBottom && jointsPos[leftHandIndex].y <= gestureTop && jointsPos[rightHandIndex].y >= gestureBottom && jointsPos[rightHandIndex].y <= gestureTop && distZoomOut < 1.5f && Mathf.Abs(angleZoomOut) < 20f; if(isInPose) { SetZoomFactor(userId, ref gestureData, 1.0f, ref jointsPos, ref jointsTracked); gestureData.timestamp = timestamp; gestureData.progress = 0.7f; } // else // { // // cancel the gesture // SetGestureCancelled(ref gestureData); // } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for ZoomOut case Gestures.ZoomOut: Vector3 vectorZoomIn = (Vector3)jointsPos[rightHandIndex] - jointsPos[leftHandIndex]; float distZoomIn = vectorZoomIn.magnitude; switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[leftHandIndex] && jointsTracked[rightHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[leftHandIndex].y >= gestureBottom && jointsPos[leftHandIndex].y <= gestureTop && jointsPos[rightHandIndex].y >= gestureBottom && jointsPos[rightHandIndex].y <= gestureTop && distZoomIn >= 0.7f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); gestureData.tagVector = Vector3.right; gestureData.tagFloat = distZoomIn; gestureData.progress = 0.3f; } break; case 1: // gesture phase 2 = zooming if((timestamp - gestureData.timestamp) < 1.0f) { float angleZoomIn = Vector3.Angle(gestureData.tagVector, vectorZoomIn) * Mathf.Sign(vectorZoomIn.y - gestureData.tagVector.y); bool isInPose = jointsTracked[leftHandIndex] && jointsTracked[rightHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[leftHandIndex].y >= gestureBottom && jointsPos[leftHandIndex].y <= gestureTop && jointsPos[rightHandIndex].y >= gestureBottom && jointsPos[rightHandIndex].y <= gestureTop && distZoomIn >= 0.2f && Mathf.Abs(angleZoomIn) < 20f; if(isInPose) { SetZoomFactor(userId, ref gestureData, 0.0f, ref jointsPos, ref jointsTracked); gestureData.timestamp = timestamp; gestureData.progress = 0.7f; } // else // { // // cancel the gesture // SetGestureCancelled(ref gestureData); // } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for Wheel case Gestures.Wheel: Vector3 vectorWheel = (Vector3)jointsPos[rightHandIndex] - jointsPos[leftHandIndex]; float distWheel = vectorWheel.magnitude; // Debug.Log(string.Format("{0}. Dist: {1:F1}, Tag: {2:F1}, Diff: {3:F1}", gestureData.state, // distWheel, gestureData.tagFloat, Mathf.Abs(distWheel - gestureData.tagFloat))); switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[leftHandIndex] && jointsTracked[rightHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[leftHandIndex].y >= gestureBottom && jointsPos[leftHandIndex].y <= gestureTop && jointsPos[rightHandIndex].y >= gestureBottom && jointsPos[rightHandIndex].y <= gestureTop && distWheel >= 0.3f && distWheel < 0.7f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); gestureData.tagVector = Vector3.right; gestureData.tagFloat = distWheel; gestureData.progress = 0.3f; } break; case 1: // gesture phase 2 = zooming if((timestamp - gestureData.timestamp) < 0.5f) { float angle = Vector3.Angle(gestureData.tagVector, vectorWheel) * Mathf.Sign(vectorWheel.y - gestureData.tagVector.y); bool isInPose = jointsTracked[leftHandIndex] && jointsTracked[rightHandIndex] && jointsTracked[hipCenterIndex] && jointsTracked[shoulderCenterIndex] && jointsTracked[leftHipIndex] && jointsTracked[rightHipIndex] && jointsPos[leftHandIndex].y >= gestureBottom && jointsPos[leftHandIndex].y <= gestureTop && jointsPos[rightHandIndex].y >= gestureBottom && jointsPos[rightHandIndex].y <= gestureTop && distWheel >= 0.3f && distWheel < 0.7f && Mathf.Abs(distWheel - gestureData.tagFloat) < 0.1f; if(isInPose) { //SetWheelRotation(userId, ref gestureData, gestureData.tagVector, vectorWheel); gestureData.screenPos.z = angle; // wheel angle gestureData.timestamp = timestamp; gestureData.tagFloat = distWheel; gestureData.progress = 0.7f; } // else // { // // cancel the gesture // SetGestureCancelled(ref gestureData); // } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for Jump case Gestures.Jump: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[hipCenterIndex] && (jointsPos[hipCenterIndex].y > 0.6f) && (jointsPos[hipCenterIndex].y < 1.2f)) { SetGestureJoint(ref gestureData, timestamp, hipCenterIndex, jointsPos[hipCenterIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = jointsTracked[hipCenterIndex] && (jointsPos[hipCenterIndex].y - gestureData.jointPos.y) > 0.15f && Mathf.Abs(jointsPos[hipCenterIndex].x - gestureData.jointPos.x) < 0.2f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for Squat case Gestures.Squat: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[hipCenterIndex] && (jointsPos[hipCenterIndex].y <= 0.7f)) { SetGestureJoint(ref gestureData, timestamp, hipCenterIndex, jointsPos[hipCenterIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = jointsTracked[hipCenterIndex] && (jointsPos[hipCenterIndex].y - gestureData.jointPos.y) < -0.15f && Mathf.Abs(jointsPos[hipCenterIndex].x - gestureData.jointPos.x) < 0.2f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for Push case Gestures.Push: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[rightHandIndex] && jointsTracked[leftElbowIndex] && jointsTracked[rightShoulderIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftElbowIndex].y) > -0.1f && Mathf.Abs(jointsPos[rightHandIndex].x - jointsPos[rightShoulderIndex].x) < 0.2f && (jointsPos[rightHandIndex].z - jointsPos[leftElbowIndex].z) < -0.2f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); gestureData.progress = 0.5f; } else if(jointsTracked[leftHandIndex] && jointsTracked[rightElbowIndex] && jointsTracked[leftShoulderIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightElbowIndex].y) > -0.1f && Mathf.Abs(jointsPos[leftHandIndex].x - jointsPos[leftShoulderIndex].x) < 0.2f && (jointsPos[leftHandIndex].z - jointsPos[rightElbowIndex].z) < -0.2f) { SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = gestureData.joint == rightHandIndex ? jointsTracked[rightHandIndex] && jointsTracked[leftElbowIndex] && jointsTracked[rightShoulderIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftElbowIndex].y) > -0.1f && Mathf.Abs(jointsPos[rightHandIndex].x - gestureData.jointPos.x) < 0.2f && (jointsPos[rightHandIndex].z - gestureData.jointPos.z) < -0.2f : jointsTracked[leftHandIndex] && jointsTracked[rightElbowIndex] && jointsTracked[leftShoulderIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightElbowIndex].y) > -0.1f && Mathf.Abs(jointsPos[leftHandIndex].x - gestureData.jointPos.x) < 0.2f && (jointsPos[leftHandIndex].z - gestureData.jointPos.z) < -0.2f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for Pull case Gestures.Pull: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[rightHandIndex] && jointsTracked[leftElbowIndex] && jointsTracked[rightShoulderIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftElbowIndex].y) > -0.1f && Mathf.Abs(jointsPos[rightHandIndex].x - jointsPos[rightShoulderIndex].x) < 0.2f && (jointsPos[rightHandIndex].z - jointsPos[leftElbowIndex].z) < -0.3f) { SetGestureJoint(ref gestureData, timestamp, rightHandIndex, jointsPos[rightHandIndex]); gestureData.progress = 0.5f; } else if(jointsTracked[leftHandIndex] && jointsTracked[rightElbowIndex] && jointsTracked[leftShoulderIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightElbowIndex].y) > -0.1f && Mathf.Abs(jointsPos[leftHandIndex].x - jointsPos[leftShoulderIndex].x) < 0.2f && (jointsPos[leftHandIndex].z - jointsPos[rightElbowIndex].z) < -0.3f) { SetGestureJoint(ref gestureData, timestamp, leftHandIndex, jointsPos[leftHandIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = gestureData.joint == rightHandIndex ? jointsTracked[rightHandIndex] && jointsTracked[leftElbowIndex] && jointsTracked[rightShoulderIndex] && (jointsPos[rightHandIndex].y - jointsPos[leftElbowIndex].y) > -0.1f && Mathf.Abs(jointsPos[rightHandIndex].x - gestureData.jointPos.x) < 0.2f && (jointsPos[rightHandIndex].z - gestureData.jointPos.z) > 0.25f : jointsTracked[leftHandIndex] && jointsTracked[rightElbowIndex] && jointsTracked[leftShoulderIndex] && (jointsPos[leftHandIndex].y - jointsPos[rightElbowIndex].y) > -0.1f && Mathf.Abs(jointsPos[leftHandIndex].x - gestureData.jointPos.x) < 0.2f && (jointsPos[leftHandIndex].z - gestureData.jointPos.z) > 0.25f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for ShoulderLeftFron case Gestures.ShoulderLeftFront: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[leftShoulderIndex] && jointsTracked[rightShoulderIndex] && jointsTracked[leftHipIndex] && (jointsPos[rightShoulderIndex].z - jointsPos[leftHipIndex].z) < 0f && (jointsPos[rightShoulderIndex].z - jointsPos[leftShoulderIndex].z) > -0.15f) { SetGestureJoint(ref gestureData, timestamp, rightShoulderIndex, jointsPos[rightShoulderIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = jointsTracked[leftShoulderIndex] && jointsTracked[rightShoulderIndex] && jointsTracked[leftHipIndex] && (jointsPos[rightShoulderIndex].z - jointsPos[leftShoulderIndex].z) < -0.2f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for ShoulderRightFront case Gestures.ShoulderRightFront: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[leftShoulderIndex] && jointsTracked[rightShoulderIndex] && jointsTracked[rightHipIndex] && (jointsPos[leftShoulderIndex].z - jointsPos[rightHipIndex].z) < 0f && (jointsPos[leftShoulderIndex].z - jointsPos[rightShoulderIndex].z) > -0.15f) { SetGestureJoint(ref gestureData, timestamp, leftShoulderIndex, jointsPos[leftShoulderIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = jointsTracked[leftShoulderIndex] && jointsTracked[rightShoulderIndex] && jointsTracked[rightHipIndex] && (jointsPos[leftShoulderIndex].z - jointsPos[rightShoulderIndex].z) < -0.2f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for LeanLeft case Gestures.LeanLeft: switch(gestureData.state) { case 0: // gesture detection - phase 1 (right shoulder is left of the right hip, means leaning left) if(jointsTracked[rightShoulderIndex] && jointsTracked[rightHipIndex] && (jointsPos[rightShoulderIndex].x - jointsPos[rightHipIndex].x) < 0f) { SetGestureJoint(ref gestureData, timestamp, rightShoulderIndex, jointsPos[rightShoulderIndex]); gestureData.progress = 0.3f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 0.5f) { // check if right shoulder is still left of the right hip (leaning left) bool isInPose = jointsTracked[rightShoulderIndex] && jointsTracked[rightHipIndex] && (jointsPos[rightShoulderIndex].x - jointsPos[rightHipIndex].x) < 0f; if(isInPose) { // calculate lean angle Vector3 vSpineLL = jointsPos[shoulderCenterIndex] - jointsPos[hipCenterIndex]; gestureData.screenPos.z = Vector3.Angle(Vector3.up, vSpineLL); gestureData.timestamp = timestamp; gestureData.progress = 0.7f; } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for LeanRight case Gestures.LeanRight: switch(gestureData.state) { case 0: // gesture detection - phase 1 (left shoulder is right of the left hip, means leaning right) if(jointsTracked[leftShoulderIndex] && jointsTracked[leftHipIndex] && (jointsPos[leftShoulderIndex].x - jointsPos[leftHipIndex].x) > 0f) { SetGestureJoint(ref gestureData, timestamp, leftShoulderIndex, jointsPos[leftShoulderIndex]); gestureData.progress = 0.3f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 0.5f) { // check if left shoulder is still right of the left hip (leaning right) bool isInPose = jointsTracked[leftShoulderIndex] && jointsTracked[leftHipIndex] && (jointsPos[leftShoulderIndex].x - jointsPos[leftHipIndex].x) > 0f; if(isInPose) { // calculate lean angle Vector3 vSpineLL = jointsPos[shoulderCenterIndex] - jointsPos[hipCenterIndex]; gestureData.screenPos.z = Vector3.Angle(Vector3.up, vSpineLL); gestureData.timestamp = timestamp; gestureData.progress = 0.7f; } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for KickLeft case Gestures.KickLeft: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[leftAnkleIndex] && jointsTracked[rightAnkleIndex] && jointsTracked[leftHipIndex] && (jointsPos[leftAnkleIndex].z - jointsPos[leftHipIndex].z) < 0f && (jointsPos[leftAnkleIndex].z - jointsPos[rightAnkleIndex].z) > -0.2f) { SetGestureJoint(ref gestureData, timestamp, leftAnkleIndex, jointsPos[leftAnkleIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = jointsTracked[leftAnkleIndex] && jointsTracked[rightAnkleIndex] && jointsTracked[leftHipIndex] && (jointsPos[leftAnkleIndex].z - jointsPos[rightAnkleIndex].z) < -0.4f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // check for KickRight case Gestures.KickRight: switch(gestureData.state) { case 0: // gesture detection - phase 1 if(jointsTracked[leftAnkleIndex] && jointsTracked[rightAnkleIndex] && jointsTracked[rightHipIndex] && (jointsPos[rightAnkleIndex].z - jointsPos[rightHipIndex].z) < 0f && (jointsPos[rightAnkleIndex].z - jointsPos[leftAnkleIndex].z) > -0.2f) { SetGestureJoint(ref gestureData, timestamp, rightAnkleIndex, jointsPos[rightAnkleIndex]); gestureData.progress = 0.5f; } break; case 1: // gesture phase 2 = complete if((timestamp - gestureData.timestamp) < 1.5f) { bool isInPose = jointsTracked[leftAnkleIndex] && jointsTracked[rightAnkleIndex] && jointsTracked[rightHipIndex] && (jointsPos[rightAnkleIndex].z - jointsPos[leftAnkleIndex].z) < -0.4f; if(isInPose) { Vector3 jointPos = jointsPos[gestureData.joint]; CheckPoseComplete(ref gestureData, timestamp, jointPos, isInPose, 0f); } } else { // cancel the gesture SetGestureCancelled(ref gestureData); } break; } break; // here come more gesture-cases } } }