context
stringlengths
2.52k
185k
gt
stringclasses
1 value
using UnityEngine; using UnityEditor; //using System.Linq; using System.Collections.Generic; namespace UMA.Editors { [CustomEditor(typeof(SlotLibrary))] [CanEditMultipleObjects] public class SlotLibraryEditor : Editor { private SerializedObject m_Object; private SlotLibrary slotLibrary; private SerializedProperty m_SlotDataAssetCount; private const string kArraySizePath = "slotElementList.Array.size"; private const string kArrayData = "slotElementList.Array.data[{0}]"; private bool canUpdate; private bool isDirty; public void OnEnable() { m_Object = new SerializedObject(target); slotLibrary = m_Object.targetObject as SlotLibrary; m_SlotDataAssetCount = m_Object.FindProperty(kArraySizePath); } private SlotDataAsset[] GetSlotDataAssetArray() { int arrayCount = m_SlotDataAssetCount.intValue; SlotDataAsset[] SlotDataAssetArray = new SlotDataAsset[arrayCount]; for (int i = 0; i < arrayCount; i++) { SlotDataAssetArray[i] = m_Object.FindProperty(string.Format(kArrayData, i)).objectReferenceValue as SlotDataAsset; } return SlotDataAssetArray; } private void SetSlotDataAsset(int index, SlotDataAsset slotElement) { m_Object.FindProperty(string.Format(kArrayData, index)).objectReferenceValue = slotElement; isDirty = true; } private SlotDataAsset GetSlotDataAssetAtIndex(int index) { return m_Object.FindProperty(string.Format(kArrayData, index)).objectReferenceValue as SlotDataAsset; } private void AddSlotDataAsset(SlotDataAsset slotElement) { m_SlotDataAssetCount.intValue++; SetSlotDataAsset(m_SlotDataAssetCount.intValue - 1, slotElement); } private void RemoveSlotDataAssetAtIndex(int index) { for (int i = index; i < m_SlotDataAssetCount.intValue - 1; i++) { SetSlotDataAsset(i, GetSlotDataAssetAtIndex(i + 1)); } m_SlotDataAssetCount.intValue--; } private void DropAreaGUI(Rect dropArea) { var evt = Event.current; if (evt.type == EventType.DragUpdated) { if (dropArea.Contains(evt.mousePosition)) { DragAndDrop.visualMode = DragAndDropVisualMode.Copy; } } if (evt.type == EventType.DragPerform) { if (dropArea.Contains(evt.mousePosition)) { DragAndDrop.AcceptDrag(); UnityEngine.Object[] draggedObjects = DragAndDrop.objectReferences as UnityEngine.Object[]; for (int i = 0; i < draggedObjects.Length; i++) { if (draggedObjects[i]) { SlotDataAsset tempSlotDataAsset = draggedObjects[i] as SlotDataAsset; if (tempSlotDataAsset) { AddSlotDataAsset(tempSlotDataAsset); continue; } var path = AssetDatabase.GetAssetPath(draggedObjects[i]); if (System.IO.Directory.Exists(path)) { RecursiveScanFoldersForAssets(path); } } } } } } private void RecursiveScanFoldersForAssets(string path) { var assetFiles = System.IO.Directory.GetFiles(path, "*.asset"); foreach (var assetFile in assetFiles) { var tempSlotDataAsset = AssetDatabase.LoadAssetAtPath(assetFile, typeof(SlotDataAsset)) as SlotDataAsset; if (tempSlotDataAsset) { AddSlotDataAsset(tempSlotDataAsset); } } foreach (var subFolder in System.IO.Directory.GetDirectories(path)) { RecursiveScanFoldersForAssets(subFolder.Replace('\\', '/')); } } public override void OnInspectorGUI() { m_Object.Update(); GUILayout.Label("slotElementList", EditorStyles.boldLabel); SlotDataAsset[] slotElementList = GetSlotDataAssetArray(); GUILayout.BeginHorizontal(); if (GUILayout.Button("Order by Name")) { canUpdate = false; List<SlotDataAsset> SlotDataAssetTemp = new List<SlotDataAsset>(); SlotDataAssetTemp.AddRange(slotElementList); //Make sure there's no invalid data for (int i = 0; i < SlotDataAssetTemp.Count; i++) { if (SlotDataAssetTemp[i] == null) { SlotDataAssetTemp.RemoveAt(i); i--; } } SlotDataAssetTemp.Sort((x, y) => x.name.CompareTo(y.name)); for (int i = 0; i < SlotDataAssetTemp.Count; i++) { SetSlotDataAsset(i, SlotDataAssetTemp[i]); } } if (GUILayout.Button("Update List")) { isDirty = true; canUpdate = false; } if (GUILayout.Button("Remove Duplicates")) { HashSet<SlotDataAsset> Slots = new HashSet<SlotDataAsset>(); foreach(SlotDataAsset osa in slotElementList) { Slots.Add(osa); } List<SlotDataAsset> sd = new List<SlotDataAsset>(); sd.AddRange(Slots); m_SlotDataAssetCount.intValue = Slots.Count; for(int i=0;i<sd.Count;i++) { SetSlotDataAsset(i,sd[i]); } isDirty = true; canUpdate = false; } GUILayout.EndHorizontal(); GUILayout.Space(20); Rect dropArea = GUILayoutUtility.GetRect(0.0f, 50.0f, GUILayout.ExpandWidth(true)); GUI.Box(dropArea, "Drag Slots here"); GUILayout.Space(20); for (int i = 0; i < m_SlotDataAssetCount.intValue; i++) { GUILayout.BeginHorizontal(); SlotDataAsset result = EditorGUILayout.ObjectField(slotElementList[i], typeof(SlotDataAsset), true) as SlotDataAsset; if (GUI.changed && canUpdate) { SetSlotDataAsset(i, result); } if (GUILayout.Button("-", GUILayout.Width(20.0f))) { canUpdate = false; RemoveSlotDataAssetAtIndex(i); } GUILayout.EndHorizontal(); if (i == m_SlotDataAssetCount.intValue - 1) { canUpdate = true; if (isDirty) { slotLibrary.UpdateDictionary(); isDirty = false; } } } DropAreaGUI(dropArea); if (GUILayout.Button("Add SlotDataAsset")) { AddSlotDataAsset(null); } if (GUILayout.Button("Clear List")) { m_SlotDataAssetCount.intValue = 0; } if (GUILayout.Button("Remove Invalid Slot Data")) { RemoveInvalidSlotDataAsset(slotElementList); } m_Object.ApplyModifiedProperties(); } private void RemoveInvalidSlotDataAsset(SlotDataAsset[] slotElementList) { for (int i = m_SlotDataAssetCount.intValue - 1; i >= 0; i--) { if (slotElementList[i] == null) { RemoveSlotDataAssetAtIndex(i); } } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Logic { using Azure; using Management; using Rest; using Rest.Azure; using Rest.Azure.OData; using Models; using System.Threading; using System.Threading.Tasks; /// <summary> /// Extension methods for SessionsOperations. /// </summary> public static partial class SessionsOperationsExtensions { /// <summary> /// Gets a list of integration account sessions. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='integrationAccountName'> /// The integration account name. /// </param> /// <param name='odataQuery'> /// OData parameters to apply to the operation. /// </param> public static IPage<IntegrationAccountSession> ListByIntegrationAccounts(this ISessionsOperations operations, string resourceGroupName, string integrationAccountName, ODataQuery<IntegrationAccountSessionFilter> odataQuery = default(ODataQuery<IntegrationAccountSessionFilter>)) { return operations.ListByIntegrationAccountsAsync(resourceGroupName, integrationAccountName, odataQuery).GetAwaiter().GetResult(); } /// <summary> /// Gets a list of integration account sessions. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='integrationAccountName'> /// The integration account name. /// </param> /// <param name='odataQuery'> /// OData parameters to apply to the operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<IntegrationAccountSession>> ListByIntegrationAccountsAsync(this ISessionsOperations operations, string resourceGroupName, string integrationAccountName, ODataQuery<IntegrationAccountSessionFilter> odataQuery = default(ODataQuery<IntegrationAccountSessionFilter>), CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.ListByIntegrationAccountsWithHttpMessagesAsync(resourceGroupName, integrationAccountName, odataQuery, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Gets an integration account session. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='integrationAccountName'> /// The integration account name. /// </param> /// <param name='sessionName'> /// The integration account session name. /// </param> public static object Get(this ISessionsOperations operations, string resourceGroupName, string integrationAccountName, string sessionName) { return operations.GetAsync(resourceGroupName, integrationAccountName, sessionName).GetAwaiter().GetResult(); } /// <summary> /// Gets an integration account session. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='integrationAccountName'> /// The integration account name. /// </param> /// <param name='sessionName'> /// The integration account session name. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<object> GetAsync(this ISessionsOperations operations, string resourceGroupName, string integrationAccountName, string sessionName, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, integrationAccountName, sessionName, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Creates or updates an integration account session. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='integrationAccountName'> /// The integration account name. /// </param> /// <param name='sessionName'> /// The integration account session name. /// </param> /// <param name='session'> /// The integration account session. /// </param> public static IntegrationAccountSession CreateOrUpdate(this ISessionsOperations operations, string resourceGroupName, string integrationAccountName, string sessionName, IntegrationAccountSession session) { return operations.CreateOrUpdateAsync(resourceGroupName, integrationAccountName, sessionName, session).GetAwaiter().GetResult(); } /// <summary> /// Creates or updates an integration account session. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='integrationAccountName'> /// The integration account name. /// </param> /// <param name='sessionName'> /// The integration account session name. /// </param> /// <param name='session'> /// The integration account session. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IntegrationAccountSession> CreateOrUpdateAsync(this ISessionsOperations operations, string resourceGroupName, string integrationAccountName, string sessionName, IntegrationAccountSession session, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.CreateOrUpdateWithHttpMessagesAsync(resourceGroupName, integrationAccountName, sessionName, session, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } /// <summary> /// Deletes an integration account session. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='integrationAccountName'> /// The integration account name. /// </param> /// <param name='sessionName'> /// The integration account session name. /// </param> public static void Delete(this ISessionsOperations operations, string resourceGroupName, string integrationAccountName, string sessionName) { operations.DeleteAsync(resourceGroupName, integrationAccountName, sessionName).GetAwaiter().GetResult(); } /// <summary> /// Deletes an integration account session. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='resourceGroupName'> /// The resource group name. /// </param> /// <param name='integrationAccountName'> /// The integration account name. /// </param> /// <param name='sessionName'> /// The integration account session name. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task DeleteAsync(this ISessionsOperations operations, string resourceGroupName, string integrationAccountName, string sessionName, CancellationToken cancellationToken = default(CancellationToken)) { await operations.DeleteWithHttpMessagesAsync(resourceGroupName, integrationAccountName, sessionName, null, cancellationToken).ConfigureAwait(false); } /// <summary> /// Gets a list of integration account sessions. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> public static IPage<IntegrationAccountSession> ListByIntegrationAccountsNext(this ISessionsOperations operations, string nextPageLink) { return operations.ListByIntegrationAccountsNextAsync(nextPageLink).GetAwaiter().GetResult(); } /// <summary> /// Gets a list of integration account sessions. /// </summary> /// <param name='operations'> /// The operations group for this extension method. /// </param> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> public static async Task<IPage<IntegrationAccountSession>> ListByIntegrationAccountsNextAsync(this ISessionsOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken)) { using (var _result = await operations.ListByIntegrationAccountsNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false)) { return _result.Body; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; namespace System.Numerics.Tests { public class divremTest { private static int s_samples = 10; private static Random s_random = new Random(100); [Fact] public static void RunDivRem_TwoLargeBI() { byte[] tempByteArray1 = new byte[0]; byte[] tempByteArray2 = new byte[0]; // DivRem Method - Two Large BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomByteArray(s_random); VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); } } [Fact] public static void RunDivRem_TwoSmallBI() { byte[] tempByteArray1 = new byte[0]; byte[] tempByteArray2 = new byte[0]; // DivRem Method - Two Small BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomByteArray(s_random, 2); VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); } } [Fact] public static void RunDivRem_OneSmallOneLargeBI() { byte[] tempByteArray1 = new byte[0]; byte[] tempByteArray2 = new byte[0]; // DivRem Method - One Large and one small BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomByteArray(s_random, 2); VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomByteArray(s_random); VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); } } [ConditionalFact(nameof(PlatformDetection) + "." + nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // https://github.com/Microsoft/BashOnWindows/issues/513 public static void RunDivRem_OneLargeOne0BI() { byte[] tempByteArray1 = new byte[0]; byte[] tempByteArray2 = new byte[0]; // DivRem Method - One Large BigIntegers and zero for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = new byte[] { 0 }; VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); Assert.Throws<DivideByZeroException>(() => { VerifyDivRemString(Print(tempByteArray2) + Print(tempByteArray1) + "bDivRem"); }); } } [ConditionalFact(nameof(PlatformDetection) + "." + nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // https://github.com/Microsoft/BashOnWindows/issues/513 public static void RunDivRem_OneSmallOne0BI() { byte[] tempByteArray1 = new byte[0]; byte[] tempByteArray2 = new byte[0]; // DivRem Method - One small BigIntegers and zero for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = new byte[] { 0 }; VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); Assert.Throws<DivideByZeroException>(() => { VerifyDivRemString(Print(tempByteArray2) + Print(tempByteArray1) + "bDivRem"); }); } } [Fact] public static void Boundary() { byte[] tempByteArray1 = new byte[0]; byte[] tempByteArray2 = new byte[0]; // Check interesting cases for boundary conditions // You'll either be shifting a 0 or 1 across the boundary // 32 bit boundary n2=0 VerifyDivRemString(Math.Pow(2, 32) + " 2 bDivRem"); // 32 bit boundary n1=0 n2=1 VerifyDivRemString(Math.Pow(2, 33) + " 2 bDivRem"); } [ConditionalFact(nameof(PlatformDetection) + "." + nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // https://github.com/Microsoft/BashOnWindows/issues/513 public static void RunDivRemTests() { byte[] tempByteArray1 = new byte[0]; byte[] tempByteArray2 = new byte[0]; // DivRem Method - Two Large BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomByteArray(s_random); VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); } // DivRem Method - Two Small BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomByteArray(s_random, 2); VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); } // DivRem Method - One Large and one small BigIntegers for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = GetRandomByteArray(s_random, 2); VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = GetRandomByteArray(s_random); VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); } // DivRem Method - One Large BigIntegers and zero for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random); tempByteArray2 = new byte[] { 0 }; VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); Assert.Throws<DivideByZeroException>(() => { VerifyDivRemString(Print(tempByteArray2) + Print(tempByteArray1) + "bDivRem"); }); } // DivRem Method - One small BigIntegers and zero for (int i = 0; i < s_samples; i++) { tempByteArray1 = GetRandomByteArray(s_random, 2); tempByteArray2 = new byte[] { 0 }; VerifyDivRemString(Print(tempByteArray1) + Print(tempByteArray2) + "bDivRem"); Assert.Throws<DivideByZeroException>(() => { VerifyDivRemString(Print(tempByteArray2) + Print(tempByteArray1) + "bDivRem"); }); } // Check interesting cases for boundary conditions // You'll either be shifting a 0 or 1 across the boundary // 32 bit boundary n2=0 VerifyDivRemString(Math.Pow(2, 32) + " 2 bDivRem"); // 32 bit boundary n1=0 n2=1 VerifyDivRemString(Math.Pow(2, 33) + " 2 bDivRem"); } private static void VerifyDivRemString(string opstring) { try { StackCalc sc = new StackCalc(opstring); while (sc.DoNextOperation()) { Assert.Equal(sc.snCalc.Peek().ToString(), sc.myCalc.Peek().ToString()); sc.VerifyOutParameter(); } } catch(Exception e) when (!(e is DivideByZeroException)) { // Log the original parameters, so we can reproduce any failure given the log throw new Exception($"VerifyDivRemString failed: {opstring} {e.ToString()}", e); } } private static byte[] GetRandomByteArray(Random random) { return GetRandomByteArray(random, random.Next(1, 100)); } private static byte[] GetRandomByteArray(Random random, int size) { return MyBigIntImp.GetNonZeroRandomByteArray(random, size); } private static String Print(byte[] bytes) { return MyBigIntImp.Print(bytes); } } }
using Microsoft.IdentityModel; using Microsoft.IdentityModel.S2S.Protocols.OAuth2; using Microsoft.IdentityModel.S2S.Tokens; using Microsoft.SharePoint.Client; using Microsoft.SharePoint.Client.EventReceivers; using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Globalization; using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.IO; using System.Linq; using System.Net; using System.Security.Cryptography.X509Certificates; using System.Security.Principal; using System.ServiceModel; using System.Text; using System.Web; using System.Web.Configuration; using System.Web.Script.Serialization; using AudienceRestriction = Microsoft.IdentityModel.Tokens.AudienceRestriction; using AudienceUriValidationFailedException = Microsoft.IdentityModel.Tokens.AudienceUriValidationFailedException; using SecurityTokenHandlerConfiguration = Microsoft.IdentityModel.Tokens.SecurityTokenHandlerConfiguration; using X509SigningCredentials = Microsoft.IdentityModel.SecurityTokenService.X509SigningCredentials; namespace Provisioning.Cloud.Async.WebJob.Common { public static class TokenHelper { #region public fields /// <summary> /// SharePoint principal. /// </summary> public const string SharePointPrincipal = "00000003-0000-0ff1-ce00-000000000000"; /// <summary> /// Lifetime of HighTrust access token, 12 hours. /// </summary> public static readonly TimeSpan HighTrustAccessTokenLifetime = TimeSpan.FromHours(12.0); #endregion public fields #region public methods /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequest request) { return GetContextTokenFromRequest(new HttpRequestWrapper(request)); } /// <summary> /// Retrieves the context token string from the specified request by looking for well-known parameter names in the /// POSTed form parameters and the querystring. Returns null if no context token is found. /// </summary> /// <param name="request">HttpRequest in which to look for a context token</param> /// <returns>The context token string</returns> public static string GetContextTokenFromRequest(HttpRequestBase request) { string[] paramNames = { "AppContext", "AppContextToken", "AccessToken", "SPAppToken" }; foreach (string paramName in paramNames) { if (!string.IsNullOrEmpty(request.Form[paramName])) { return request.Form[paramName]; } if (!string.IsNullOrEmpty(request.QueryString[paramName])) { return request.QueryString[paramName]; } } return null; } /// <summary> /// Validate that a specified context token string is intended for this application based on the parameters /// specified in web.config. Parameters used from web.config used for validation include ClientId, /// HostedAppHostNameOverride, HostedAppHostName, ClientSecret, and Realm (if it is specified). If HostedAppHostNameOverride is present, /// it will be used for validation. Otherwise, if the <paramref name="appHostName"/> is not /// null, it is used for validation instead of the web.config's HostedAppHostName. If the token is invalid, an /// exception is thrown. If the token is valid, TokenHelper's static STS metadata url is updated based on the token contents /// and a JsonWebSecurityToken based on the context token is returned. /// </summary> /// <param name="contextTokenString">The context token to validate</param> /// <param name="appHostName">The URL authority, consisting of Domain Name System (DNS) host name or IP address and the port number, to use for token audience validation. /// If null, HostedAppHostName web.config setting is used instead. HostedAppHostNameOverride web.config setting, if present, will be used /// for validation instead of <paramref name="appHostName"/> .</param> /// <returns>A JsonWebSecurityToken based on the context token.</returns> public static SharePointContextToken ReadAndValidateContextToken(string contextTokenString, string appHostName = null) { JsonWebSecurityTokenHandler tokenHandler = CreateJsonWebSecurityTokenHandler(); SecurityToken securityToken = tokenHandler.ReadToken(contextTokenString); JsonWebSecurityToken jsonToken = securityToken as JsonWebSecurityToken; SharePointContextToken token = SharePointContextToken.Create(jsonToken); string stsAuthority = (new Uri(token.SecurityTokenServiceUri)).Authority; int firstDot = stsAuthority.IndexOf('.'); GlobalEndPointPrefix = stsAuthority.Substring(0, firstDot); AcsHostUrl = stsAuthority.Substring(firstDot + 1); tokenHandler.ValidateToken(jsonToken); string[] acceptableAudiences; if (!String.IsNullOrEmpty(HostedAppHostNameOverride)) { acceptableAudiences = HostedAppHostNameOverride.Split(';'); } else if (appHostName == null) { acceptableAudiences = new[] { HostedAppHostName }; } else { acceptableAudiences = new[] { appHostName }; } bool validationSuccessful = false; string realm = Realm ?? token.Realm; foreach (var audience in acceptableAudiences) { string principal = GetFormattedPrincipal(ClientId, audience, realm); if (StringComparer.OrdinalIgnoreCase.Equals(token.Audience, principal)) { validationSuccessful = true; break; } } if (!validationSuccessful) { throw new AudienceUriValidationFailedException( String.Format(CultureInfo.CurrentCulture, "\"{0}\" is not the intended audience \"{1}\"", String.Join(";", acceptableAudiences), token.Audience)); } return token; } /// <summary> /// Retrieves an access token from ACS to call the source of the specified context token at the specified /// targetHost. The targetHost must be registered for the principal that sent the context token. /// </summary> /// <param name="contextToken">Context token issued by the intended access token audience</param> /// <param name="targetHost">Url authority of the target principal</param> /// <returns>An access token with an audience matching the context token's source</returns> public static OAuth2AccessTokenResponse GetAccessToken(SharePointContextToken contextToken, string targetHost) { string targetPrincipalName = contextToken.TargetPrincipalName; // Extract the refreshToken from the context token string refreshToken = contextToken.RefreshToken; if (String.IsNullOrEmpty(refreshToken)) { return null; } string targetRealm = Realm ?? contextToken.Realm; return GetAccessToken(refreshToken, targetPrincipalName, targetHost, targetRealm); } /// <summary> /// Uses the specified authorization code to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="authorizationCode">Authorization code to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string authorizationCode, string targetPrincipalName, string targetHost, string targetRealm, Uri redirectUri) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); // Create request for token. The RedirectUri is null here. This will fail if redirect uri is registered OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithAuthorizationCode( clientId, ClientSecret, authorizationCode, redirectUri, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Uses the specified refresh token to retrieve an access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="refreshToken">Refresh token to exchange for access token</param> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAccessToken( string refreshToken, string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, null, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithRefreshToken(clientId, ClientSecret, refreshToken, resource); // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Retrieves an app-only access token from ACS to call the specified principal /// at the specified targetHost. The targetHost must be registered for target principal. If specified realm is /// null, the "Realm" setting in web.config will be used instead. /// </summary> /// <param name="targetPrincipalName">Name of the target principal to retrieve an access token for</param> /// <param name="targetHost">Url authority of the target principal</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <returns>An access token with an audience of the target principal</returns> public static OAuth2AccessTokenResponse GetAppOnlyAccessToken( string targetPrincipalName, string targetHost, string targetRealm) { if (targetRealm == null) { targetRealm = Realm; } string resource = GetFormattedPrincipal(targetPrincipalName, targetHost, targetRealm); string clientId = GetFormattedPrincipal(ClientId, HostedAppHostName, targetRealm); OAuth2AccessTokenRequest oauth2Request = OAuth2MessageFactory.CreateAccessTokenRequestWithClientCredentials(clientId, ClientSecret, resource); oauth2Request.Resource = resource; // Get token OAuth2S2SClient client = new OAuth2S2SClient(); OAuth2AccessTokenResponse oauth2Response; try { oauth2Response = client.Issue(AcsMetadataParser.GetStsUrl(targetRealm), oauth2Request) as OAuth2AccessTokenResponse; } catch (WebException wex) { using (StreamReader sr = new StreamReader(wex.Response.GetResponseStream())) { string responseText = sr.ReadToEnd(); throw new WebException(wex.Message + " - " + responseText, wex); } } return oauth2Response; } /// <summary> /// Creates a client context based on the properties of a remote event receiver /// </summary> /// <param name="properties">Properties of a remote event receiver</param> /// <returns>A ClientContext ready to call the web where the event originated</returns> public static ClientContext CreateRemoteEventReceiverClientContext(SPRemoteEventProperties properties) { Uri sharepointUrl; if (properties.ListEventProperties != null) { sharepointUrl = new Uri(properties.ListEventProperties.WebUrl); } else if (properties.ItemEventProperties != null) { sharepointUrl = new Uri(properties.ItemEventProperties.WebUrl); } else if (properties.WebEventProperties != null) { sharepointUrl = new Uri(properties.WebEventProperties.FullUrl); } else { return null; } if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Creates a client context based on the properties of an app event /// </summary> /// <param name="properties">Properties of an app event</param> /// <param name="useAppWeb">True to target the app web, false to target the host web</param> /// <returns>A ClientContext ready to call the app web or the parent web</returns> public static ClientContext CreateAppEventClientContext(SPRemoteEventProperties properties, bool useAppWeb) { if (properties.AppEventProperties == null) { return null; } Uri sharepointUrl = useAppWeb ? properties.AppEventProperties.AppWebFullUrl : properties.AppEventProperties.HostWebFullUrl; if (IsHighTrustApp()) { return GetS2SClientContextWithWindowsIdentity(sharepointUrl, null); } return CreateAcsClientContextForUrl(properties, sharepointUrl); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string authorizationCode, Uri redirectUri) { return GetClientContextWithAuthorizationCode(targetUrl, SharePointPrincipal, authorizationCode, GetRealmFromTargetUrl(new Uri(targetUrl)), redirectUri); } /// <summary> /// Retrieves an access token from ACS using the specified authorization code, and uses that access token to /// create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="targetPrincipalName">Name of the target SharePoint principal</param> /// <param name="authorizationCode">Authorization code to use when retrieving the access token from ACS</param> /// <param name="targetRealm">Realm to use for the access token's nameid and audience</param> /// <param name="redirectUri">Redirect URI registerd for this app</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithAuthorizationCode( string targetUrl, string targetPrincipalName, string authorizationCode, string targetRealm, Uri redirectUri) { Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(authorizationCode, targetPrincipalName, targetUri.Authority, targetRealm, redirectUri).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Uses the specified access token to create a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="accessToken">Access token to be used when calling the specified targetUrl</param> /// <returns>A ClientContext ready to call targetUrl with the specified access token</returns> public static ClientContext GetClientContextWithAccessToken(string targetUrl, string accessToken) { ClientContext clientContext = new ClientContext(targetUrl); clientContext.AuthenticationMode = ClientAuthenticationMode.Anonymous; clientContext.FormDigestHandlingEnabled = false; clientContext.ExecutingWebRequest += delegate(object oSender, WebRequestEventArgs webRequestEventArgs) { webRequestEventArgs.WebRequestExecutor.RequestHeaders["Authorization"] = "Bearer " + accessToken; }; return clientContext; } /// <summary> /// Retrieves an access token from ACS using the specified context token, and uses that access token to create /// a client context /// </summary> /// <param name="targetUrl">Url of the target SharePoint site</param> /// <param name="contextTokenString">Context token received from the target SharePoint site</param> /// <param name="appHostUrl">Url authority of the hosted app. If this is null, the value in the HostedAppHostName /// of web.config will be used instead</param> /// <returns>A ClientContext ready to call targetUrl with a valid access token</returns> public static ClientContext GetClientContextWithContextToken( string targetUrl, string contextTokenString, string appHostUrl) { SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, appHostUrl); Uri targetUri = new Uri(targetUrl); string accessToken = GetAccessToken(contextToken, targetUri.Authority).AccessToken; return GetClientContextWithAccessToken(targetUrl, accessToken); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request consent and get back /// an authorization code. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="scope">Space-delimited permissions to request from the SharePoint site in "shorthand" format /// (e.g. "Web.Read Site.Write")</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to after consent is /// granted</param> /// <returns>Url of the SharePoint site's OAuth authorization page</returns> public static string GetAuthorizationUrl(string contextUrl, string scope, string redirectUri) { return string.Format( "{0}{1}?IsDlg=1&client_id={2}&scope={3}&response_type=code&redirect_uri={4}", EnsureTrailingSlash(contextUrl), AuthorizationPage, ClientId, scope, redirectUri); } /// <summary> /// Returns the SharePoint url to which the app should redirect the browser to request a new context token. /// </summary> /// <param name="contextUrl">Absolute Url of the SharePoint site</param> /// <param name="redirectUri">Uri to which SharePoint should redirect the browser to with a context token</param> /// <returns>Url of the SharePoint site's context token redirect page</returns> public static string GetAppContextTokenRequestUrl(string contextUrl, string redirectUri) { return string.Format( "{0}{1}?client_id={2}&redirect_uri={3}", EnsureTrailingSlash(contextUrl), RedirectPage, ClientId, redirectUri); } /// <summary> /// Retrieves an S2S access token signed by the application's private certificate on behalf of the specified /// WindowsIdentity and intended for the SharePoint at the targetApplicationUri. If no Realm is specified in /// web.config, an auth challenge will be issued to the targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>An access token with an audience of the target principal</returns> public static string GetS2SAccessTokenWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; return GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); } /// <summary> /// Retrieves an S2S client context with an access token signed by the application's private certificate on /// behalf of the specified WindowsIdentity and intended for application at the targetApplicationUri using the /// targetRealm. If no Realm is specified in web.config, an auth challenge will be issued to the /// targetApplicationUri to discover it. /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <param name="identity">Windows identity of the user on whose behalf to create the access token</param> /// <returns>A ClientContext using an access token with an audience of the target application</returns> public static ClientContext GetS2SClientContextWithWindowsIdentity( Uri targetApplicationUri, WindowsIdentity identity) { string realm = string.IsNullOrEmpty(Realm) ? GetRealmFromTargetUrl(targetApplicationUri) : Realm; JsonWebTokenClaim[] claims = identity != null ? GetClaimsWithWindowsIdentity(identity) : null; string accessToken = GetS2SAccessTokenWithClaims(targetApplicationUri.Authority, realm, claims); return GetClientContextWithAccessToken(targetApplicationUri.ToString(), accessToken); } /// <summary> /// Get authentication realm from SharePoint /// </summary> /// <param name="targetApplicationUri">Url of the target SharePoint site</param> /// <returns>String representation of the realm GUID</returns> public static string GetRealmFromTargetUrl(Uri targetApplicationUri) { WebRequest request = WebRequest.Create(targetApplicationUri + "/_vti_bin/client.svc"); request.Headers.Add("Authorization: Bearer "); try { using (request.GetResponse()) { } } catch (WebException e) { if (e.Response == null) { return null; } string bearerResponseHeader = e.Response.Headers["WWW-Authenticate"]; if (string.IsNullOrEmpty(bearerResponseHeader)) { return null; } const string bearer = "Bearer realm=\""; int bearerIndex = bearerResponseHeader.IndexOf(bearer, StringComparison.Ordinal); if (bearerIndex < 0) { return null; } int realmIndex = bearerIndex + bearer.Length; if (bearerResponseHeader.Length >= realmIndex + 36) { string targetRealm = bearerResponseHeader.Substring(realmIndex, 36); Guid realmGuid; if (Guid.TryParse(targetRealm, out realmGuid)) { return targetRealm; } } } return null; } /// <summary> /// Determines if this is a high trust app. /// </summary> /// <returns>True if this is a high trust app.</returns> public static bool IsHighTrustApp() { return SigningCredentials != null; } /// <summary> /// Ensures that the specified URL ends with '/' if it is not null or empty. /// </summary> /// <param name="url">The url.</param> /// <returns>The url ending with '/' if it is not null or empty.</returns> public static string EnsureTrailingSlash(string url) { if (!string.IsNullOrEmpty(url) && url[url.Length - 1] != '/') { return url + "/"; } return url; } #endregion #region private fields // // Configuration Constants // private const string AuthorizationPage = "_layouts/15/OAuthAuthorize.aspx"; private const string RedirectPage = "_layouts/15/AppRedirect.aspx"; private const string AcsPrincipalName = "00000001-0000-0000-c000-000000000000"; private const string AcsMetadataEndPointRelativeUrl = "metadata/json/1"; private const string S2SProtocol = "OAuth2"; private const string DelegationIssuance = "DelegationIssuance1.0"; private const string NameIdentifierClaimType = JsonWebTokenConstants.ReservedClaims.NameIdentifier; private const string TrustedForImpersonationClaimType = "trustedfordelegation"; private const string ActorTokenClaimType = JsonWebTokenConstants.ReservedClaims.ActorToken; // // Environment Constants // private static string GlobalEndPointPrefix = "accounts"; private static string AcsHostUrl = "accesscontrol.windows.net"; // // Hosted app configuration // private static readonly string ClientId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientId")) ? WebConfigurationManager.AppSettings.Get("HostedAppName") : WebConfigurationManager.AppSettings.Get("ClientId"); private static readonly string IssuerId = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("IssuerId")) ? ClientId : WebConfigurationManager.AppSettings.Get("IssuerId"); private static readonly string HostedAppHostNameOverride = WebConfigurationManager.AppSettings.Get("HostedAppHostNameOverride"); private static readonly string HostedAppHostName = WebConfigurationManager.AppSettings.Get("HostedAppHostName"); private static readonly string ClientSecret = string.IsNullOrEmpty(WebConfigurationManager.AppSettings.Get("ClientSecret")) ? WebConfigurationManager.AppSettings.Get("HostedAppSigningKey") : WebConfigurationManager.AppSettings.Get("ClientSecret"); private static readonly string SecondaryClientSecret = WebConfigurationManager.AppSettings.Get("SecondaryClientSecret"); private static readonly string Realm = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ServiceNamespace = WebConfigurationManager.AppSettings.Get("Realm"); private static readonly string ClientSigningCertificatePath = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePath"); private static readonly string ClientSigningCertificatePassword = WebConfigurationManager.AppSettings.Get("ClientSigningCertificatePassword"); private static readonly X509Certificate2 ClientCertificate = (string.IsNullOrEmpty(ClientSigningCertificatePath) || string.IsNullOrEmpty(ClientSigningCertificatePassword)) ? null : new X509Certificate2(ClientSigningCertificatePath, ClientSigningCertificatePassword); private static readonly X509SigningCredentials SigningCredentials = (ClientCertificate == null) ? null : new X509SigningCredentials(ClientCertificate, SecurityAlgorithms.RsaSha256Signature, SecurityAlgorithms.Sha256Digest); #endregion #region private methods private static ClientContext CreateAcsClientContextForUrl(SPRemoteEventProperties properties, Uri sharepointUrl) { string contextTokenString = properties.ContextToken; if (String.IsNullOrEmpty(contextTokenString)) { return null; } SharePointContextToken contextToken = ReadAndValidateContextToken(contextTokenString, OperationContext.Current.IncomingMessageHeaders.To.Host); string accessToken = GetAccessToken(contextToken, sharepointUrl.Authority).AccessToken; return GetClientContextWithAccessToken(sharepointUrl.ToString(), accessToken); } private static string GetAcsMetadataEndpointUrl() { return Path.Combine(GetAcsGlobalEndpointUrl(), AcsMetadataEndPointRelativeUrl); } private static string GetFormattedPrincipal(string principalName, string hostName, string realm) { if (!String.IsNullOrEmpty(hostName)) { return String.Format(CultureInfo.InvariantCulture, "{0}/{1}@{2}", principalName, hostName, realm); } return String.Format(CultureInfo.InvariantCulture, "{0}@{1}", principalName, realm); } private static string GetAcsPrincipalName(string realm) { return GetFormattedPrincipal(AcsPrincipalName, new Uri(GetAcsGlobalEndpointUrl()).Host, realm); } private static string GetAcsGlobalEndpointUrl() { return String.Format(CultureInfo.InvariantCulture, "https://{0}.{1}/", GlobalEndPointPrefix, AcsHostUrl); } private static JsonWebSecurityTokenHandler CreateJsonWebSecurityTokenHandler() { JsonWebSecurityTokenHandler handler = new JsonWebSecurityTokenHandler(); handler.Configuration = new SecurityTokenHandlerConfiguration(); handler.Configuration.AudienceRestriction = new AudienceRestriction(AudienceUriMode.Never); handler.Configuration.CertificateValidator = X509CertificateValidator.None; List<byte[]> securityKeys = new List<byte[]>(); securityKeys.Add(Convert.FromBase64String(ClientSecret)); if (!string.IsNullOrEmpty(SecondaryClientSecret)) { securityKeys.Add(Convert.FromBase64String(SecondaryClientSecret)); } List<SecurityToken> securityTokens = new List<SecurityToken>(); securityTokens.Add(new MultipleSymmetricKeySecurityToken(securityKeys)); handler.Configuration.IssuerTokenResolver = SecurityTokenResolver.CreateDefaultSecurityTokenResolver( new ReadOnlyCollection<SecurityToken>(securityTokens), false); SymmetricKeyIssuerNameRegistry issuerNameRegistry = new SymmetricKeyIssuerNameRegistry(); foreach (byte[] securitykey in securityKeys) { issuerNameRegistry.AddTrustedIssuer(securitykey, GetAcsPrincipalName(ServiceNamespace)); } handler.Configuration.IssuerNameRegistry = issuerNameRegistry; return handler; } private static string GetS2SAccessTokenWithClaims( string targetApplicationHostName, string targetRealm, IEnumerable<JsonWebTokenClaim> claims) { return IssueToken( ClientId, IssuerId, targetRealm, SharePointPrincipal, targetRealm, targetApplicationHostName, true, claims, claims == null); } private static JsonWebTokenClaim[] GetClaimsWithWindowsIdentity(WindowsIdentity identity) { JsonWebTokenClaim[] claims = new JsonWebTokenClaim[] { new JsonWebTokenClaim(NameIdentifierClaimType, identity.User.Value.ToLower()), new JsonWebTokenClaim("nii", "urn:office:idp:activedirectory") }; return claims; } private static string IssueToken( string sourceApplication, string issuerApplication, string sourceRealm, string targetApplication, string targetRealm, string targetApplicationHostName, bool trustedForDelegation, IEnumerable<JsonWebTokenClaim> claims, bool appOnly = false) { if (null == SigningCredentials) { throw new InvalidOperationException("SigningCredentials was not initialized"); } #region Actor token string issuer = string.IsNullOrEmpty(sourceRealm) ? issuerApplication : string.Format("{0}@{1}", issuerApplication, sourceRealm); string nameid = string.IsNullOrEmpty(sourceRealm) ? sourceApplication : string.Format("{0}@{1}", sourceApplication, sourceRealm); string audience = string.Format("{0}/{1}@{2}", targetApplication, targetApplicationHostName, targetRealm); List<JsonWebTokenClaim> actorClaims = new List<JsonWebTokenClaim>(); actorClaims.Add(new JsonWebTokenClaim(JsonWebTokenConstants.ReservedClaims.NameIdentifier, nameid)); if (trustedForDelegation && !appOnly) { actorClaims.Add(new JsonWebTokenClaim(TrustedForImpersonationClaimType, "true")); } // Create token JsonWebSecurityToken actorToken = new JsonWebSecurityToken( issuer: issuer, audience: audience, validFrom: DateTime.UtcNow, validTo: DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), signingCredentials: SigningCredentials, claims: actorClaims); string actorTokenString = new JsonWebSecurityTokenHandler().WriteTokenAsString(actorToken); if (appOnly) { // App-only token is the same as actor token for delegated case return actorTokenString; } #endregion Actor token #region Outer token List<JsonWebTokenClaim> outerClaims = null == claims ? new List<JsonWebTokenClaim>() : new List<JsonWebTokenClaim>(claims); outerClaims.Add(new JsonWebTokenClaim(ActorTokenClaimType, actorTokenString)); JsonWebSecurityToken jsonToken = new JsonWebSecurityToken( nameid, // outer token issuer should match actor token nameid audience, DateTime.UtcNow, DateTime.UtcNow.Add(HighTrustAccessTokenLifetime), outerClaims); string accessToken = new JsonWebSecurityTokenHandler().WriteTokenAsString(jsonToken); #endregion Outer token return accessToken; } #endregion #region AcsMetadataParser // This class is used to get MetaData document from the global STS endpoint. It contains // methods to parse the MetaData document and get endpoints and STS certificate. public static class AcsMetadataParser { public static X509Certificate2 GetAcsSigningCert(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); if (null != document.keys && document.keys.Count > 0) { JsonKey signingKey = document.keys[0]; if (null != signingKey && null != signingKey.keyValue) { return new X509Certificate2(Encoding.UTF8.GetBytes(signingKey.keyValue.value)); } } throw new Exception("Metadata document does not contain ACS signing certificate."); } public static string GetDelegationServiceUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint delegationEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == DelegationIssuance); if (null != delegationEndpoint) { return delegationEndpoint.location; } throw new Exception("Metadata document does not contain Delegation Service endpoint Url"); } private static JsonMetadataDocument GetMetadataDocument(string realm) { string acsMetadataEndpointUrlWithRealm = String.Format(CultureInfo.InvariantCulture, "{0}?realm={1}", GetAcsMetadataEndpointUrl(), realm); byte[] acsMetadata; using (WebClient webClient = new WebClient()) { acsMetadata = webClient.DownloadData(acsMetadataEndpointUrlWithRealm); } string jsonResponseString = Encoding.UTF8.GetString(acsMetadata); JavaScriptSerializer serializer = new JavaScriptSerializer(); JsonMetadataDocument document = serializer.Deserialize<JsonMetadataDocument>(jsonResponseString); if (null == document) { throw new Exception("No metadata document found at the global endpoint " + acsMetadataEndpointUrlWithRealm); } return document; } public static string GetStsUrl(string realm) { JsonMetadataDocument document = GetMetadataDocument(realm); JsonEndpoint s2sEndpoint = document.endpoints.SingleOrDefault(e => e.protocol == S2SProtocol); if (null != s2sEndpoint) { return s2sEndpoint.location; } throw new Exception("Metadata document does not contain STS endpoint url"); } private class JsonMetadataDocument { public string serviceName { get; set; } public List<JsonEndpoint> endpoints { get; set; } public List<JsonKey> keys { get; set; } } private class JsonEndpoint { public string location { get; set; } public string protocol { get; set; } public string usage { get; set; } } private class JsonKeyValue { public string type { get; set; } public string value { get; set; } } private class JsonKey { public string usage { get; set; } public JsonKeyValue keyValue { get; set; } } } #endregion } /// <summary> /// A JsonWebSecurityToken generated by SharePoint to authenticate to a 3rd party application and allow callbacks using a refresh token /// </summary> public class SharePointContextToken : JsonWebSecurityToken { public static SharePointContextToken Create(JsonWebSecurityToken contextToken) { return new SharePointContextToken(contextToken.Issuer, contextToken.Audience, contextToken.ValidFrom, contextToken.ValidTo, contextToken.Claims); } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims) : base(issuer, audience, validFrom, validTo, claims) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SecurityToken issuerToken, JsonWebSecurityToken actorToken) : base(issuer, audience, validFrom, validTo, claims, issuerToken, actorToken) { } public SharePointContextToken(string issuer, string audience, DateTime validFrom, DateTime validTo, IEnumerable<JsonWebTokenClaim> claims, SigningCredentials signingCredentials) : base(issuer, audience, validFrom, validTo, claims, signingCredentials) { } public string NameId { get { return GetClaimValue(this, "nameid"); } } /// <summary> /// The principal name portion of the context token's "appctxsender" claim /// </summary> public string TargetPrincipalName { get { string appctxsender = GetClaimValue(this, "appctxsender"); if (appctxsender == null) { return null; } return appctxsender.Split('@')[0]; } } /// <summary> /// The context token's "refreshtoken" claim /// </summary> public string RefreshToken { get { return GetClaimValue(this, "refreshtoken"); } } /// <summary> /// The context token's "CacheKey" claim /// </summary> public string CacheKey { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string cacheKey = (string)dict["CacheKey"]; return cacheKey; } } /// <summary> /// The context token's "SecurityTokenServiceUri" claim /// </summary> public string SecurityTokenServiceUri { get { string appctx = GetClaimValue(this, "appctx"); if (appctx == null) { return null; } ClientContext ctx = new ClientContext("http://tempuri.org"); Dictionary<string, object> dict = (Dictionary<string, object>)ctx.ParseObjectFromJsonString(appctx); string securityTokenServiceUri = (string)dict["SecurityTokenServiceUri"]; return securityTokenServiceUri; } } /// <summary> /// The realm portion of the context token's "audience" claim /// </summary> public string Realm { get { string aud = Audience; if (aud == null) { return null; } string tokenRealm = aud.Substring(aud.IndexOf('@') + 1); return tokenRealm; } } private static string GetClaimValue(JsonWebSecurityToken token, string claimType) { if (token == null) { throw new ArgumentNullException("token"); } foreach (JsonWebTokenClaim claim in token.Claims) { if (StringComparer.Ordinal.Equals(claim.ClaimType, claimType)) { return claim.Value; } } return null; } } /// <summary> /// Represents a security token which contains multiple security keys that are generated using symmetric algorithms. /// </summary> public class MultipleSymmetricKeySecurityToken : SecurityToken { /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(IEnumerable<byte[]> keys) : this(UniqueId.CreateUniqueId(), keys) { } /// <summary> /// Initializes a new instance of the MultipleSymmetricKeySecurityToken class. /// </summary> /// <param name="tokenId">The unique identifier of the security token.</param> /// <param name="keys">An enumeration of Byte arrays that contain the symmetric keys.</param> public MultipleSymmetricKeySecurityToken(string tokenId, IEnumerable<byte[]> keys) { if (keys == null) { throw new ArgumentNullException("keys"); } if (String.IsNullOrEmpty(tokenId)) { throw new ArgumentException("Value cannot be a null or empty string.", "tokenId"); } foreach (byte[] key in keys) { if (key.Length <= 0) { throw new ArgumentException("The key length must be greater then zero.", "keys"); } } id = tokenId; effectiveTime = DateTime.UtcNow; securityKeys = CreateSymmetricSecurityKeys(keys); } /// <summary> /// Gets the unique identifier of the security token. /// </summary> public override string Id { get { return id; } } /// <summary> /// Gets the cryptographic keys associated with the security token. /// </summary> public override ReadOnlyCollection<SecurityKey> SecurityKeys { get { return securityKeys.AsReadOnly(); } } /// <summary> /// Gets the first instant in time at which this security token is valid. /// </summary> public override DateTime ValidFrom { get { return effectiveTime; } } /// <summary> /// Gets the last instant in time at which this security token is valid. /// </summary> public override DateTime ValidTo { get { // Never expire return DateTime.MaxValue; } } /// <summary> /// Returns a value that indicates whether the key identifier for this instance can be resolved to the specified key identifier. /// </summary> /// <param name="keyIdentifierClause">A SecurityKeyIdentifierClause to compare to this instance</param> /// <returns>true if keyIdentifierClause is a SecurityKeyIdentifierClause and it has the same unique identifier as the Id property; otherwise, false.</returns> public override bool MatchesKeyIdentifierClause(SecurityKeyIdentifierClause keyIdentifierClause) { if (keyIdentifierClause == null) { throw new ArgumentNullException("keyIdentifierClause"); } // Since this is a symmetric token and we do not have IDs to distinguish tokens, we just check for the // presence of a SymmetricIssuerKeyIdentifier. The actual mapping to the issuer takes place later // when the key is matched to the issuer. if (keyIdentifierClause is SymmetricIssuerKeyIdentifierClause) { return true; } return base.MatchesKeyIdentifierClause(keyIdentifierClause); } #region private members private List<SecurityKey> CreateSymmetricSecurityKeys(IEnumerable<byte[]> keys) { List<SecurityKey> symmetricKeys = new List<SecurityKey>(); foreach (byte[] key in keys) { symmetricKeys.Add(new InMemorySymmetricSecurityKey(key)); } return symmetricKeys; } private string id; private DateTime effectiveTime; private List<SecurityKey> securityKeys; #endregion } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ using NPOI; namespace TestCases.SS.Format { using System; using System.Globalization; using System.Text; using System.Threading; using NPOI.HSSF.UserModel; using NPOI.SS.Format; using NPOI.SS.UserModel; using NPOI.SS.Util; using NUnit.Framework; [TestFixture] public class TestCellFormat { private static String _255_POUND_SIGNS; static TestCellFormat() { StringBuilder sb = new StringBuilder(); for (int i = 1; i <= 255; i++) { sb.Append('#'); } _255_POUND_SIGNS = sb.ToString(); } [Test] public void TestSome() { Label l = new Label(); CellFormat fmt = CellFormat.GetInstance( "\"$\"#,##0.00_);[Red]\\(\"$\"#,##0.00\\)"); fmt.Apply(l, 1.1); } [Test] public void TestPositiveFormatHasOnePart() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat fmt = CellFormat.GetInstance("0.00"); CellFormatResult result = fmt.Apply(12.345); Assert.AreEqual("12.35", result.Text); } [Test] public void TestNegativeFormatHasOnePart() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat fmt = CellFormat.GetInstance("0.00"); CellFormatResult result = fmt.Apply(-12.345); Assert.AreEqual("-12.35", result.Text); } [Test] public void TestZeroFormatHasOnePart() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat fmt = CellFormat.GetInstance("0.00"); CellFormatResult result = fmt.Apply(0.0); Assert.AreEqual("0.00", result.Text); } [Test] public void TestPositiveFormatHasPosAndNegParts() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat fmt = CellFormat.GetInstance("0.00;-0.00"); CellFormatResult result = fmt.Apply(12.345); Assert.AreEqual("12.35", result.Text); } [Test] public void TestNegativeFormatHasPosAndNegParts() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat fmt = CellFormat.GetInstance("0.00;-0.00"); CellFormatResult result = fmt.Apply(-12.345); Assert.AreEqual("-12.35", result.Text); } [Test] public void TestNegativeFormatHasPosAndNegParts2() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat fmt = CellFormat.GetInstance("0.00;(0.00)"); CellFormatResult result = fmt.Apply(-12.345); Assert.AreEqual("(12.35)", result.Text); } [Test] public void TestZeroFormatHasPosAndNegParts() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat fmt = CellFormat.GetInstance("0.00;-0.00"); CellFormatResult result = fmt.Apply(0.0); Assert.AreEqual("0.00", result.Text); } [Test] public void TestFormatWithThreeSections() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat fmt = CellFormat.GetInstance("0.00;-0.00;-"); Assert.AreEqual("12.35", fmt.Apply(12.345).Text); Assert.AreEqual("-12.35", fmt.Apply(-12.345).Text); Assert.AreEqual("-", fmt.Apply(0.0).Text); Assert.AreEqual("abc", fmt.Apply("abc").Text); } [Test] public void TestFormatWithFourSections() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat fmt = CellFormat.GetInstance("0.00;-0.00;-; @ "); Assert.AreEqual("12.35", fmt.Apply(12.345).Text); Assert.AreEqual("-12.35", fmt.Apply(-12.345).Text); Assert.AreEqual("-", fmt.Apply(0.0).Text); Assert.AreEqual(" abc ", fmt.Apply("abc").Text); } [Test] public void TestApplyCellForGeneralFormat() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell0 = row.CreateCell(0); ICell cell1 = row.CreateCell(1); ICell cell2 = row.CreateCell(2); ICell cell3 = row.CreateCell(3); ICell cell4 = row.CreateCell(4); CellFormat cf = CellFormat.GetInstance("General"); // case Cell.CELL_TYPE_BLANK CellFormatResult result0 = cf.Apply(cell0); Assert.AreEqual(string.Empty, result0.Text); // case Cell.CELL_TYPE_BOOLEAN cell1.SetCellValue(true); CellFormatResult result1 = cf.Apply(cell1); Assert.AreEqual("TRUE", result1.Text); // case Cell.CELL_TYPE_NUMERIC cell2.SetCellValue(1.23); CellFormatResult result2 = cf.Apply(cell2); Assert.AreEqual("1.23", result2.Text); cell3.SetCellValue(123.0); CellFormatResult result3 = cf.Apply(cell3); Assert.AreEqual("123", result3.Text); // case Cell.CELL_TYPE_STRING cell4.SetCellValue("abc"); CellFormatResult result4 = cf.Apply(cell4); Assert.AreEqual("abc", result4.Text); } [Test] public void TestApplyCellForAtFormat() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell0 = row.CreateCell(0); ICell cell1 = row.CreateCell(1); ICell cell2 = row.CreateCell(2); ICell cell3 = row.CreateCell(3); ICell cell4 = row.CreateCell(4); CellFormat cf = CellFormat.GetInstance("@"); // case Cell.CELL_TYPE_BLANK CellFormatResult result0 = cf.Apply(cell0); Assert.AreEqual(string.Empty, result0.Text); // case Cell.CELL_TYPE_BOOLEAN cell1.SetCellValue(true); CellFormatResult result1 = cf.Apply(cell1); Assert.AreEqual("TRUE", result1.Text); // case Cell.CELL_TYPE_NUMERIC cell2.SetCellValue(1.23); CellFormatResult result2 = cf.Apply(cell2); Assert.AreEqual("1.23", result2.Text); cell3.SetCellValue(123.0); CellFormatResult result3 = cf.Apply(cell3); Assert.AreEqual("123", result3.Text); // case Cell.CELL_TYPE_STRING cell4.SetCellValue("abc"); CellFormatResult result4 = cf.Apply(cell4); Assert.AreEqual("abc", result4.Text); } [Test] public void TestApplyCellForDateFormat() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell0 = row.CreateCell(0); ICell cell1 = row.CreateCell(1); CellFormat cf = CellFormat.GetInstance("dd/mm/yyyy"); cell0.SetCellValue(10); CellFormatResult result0 = cf.Apply(cell0); Assert.AreEqual("10/01/1900", result0.Text); cell1.SetCellValue(-1); CellFormatResult result1 = cf.Apply(cell1); Assert.AreEqual(_255_POUND_SIGNS, result1.Text); } [Test] public void TestApplyCellForTimeFormat() { // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("hh:mm"); cell.SetCellValue(DateUtil.ConvertTime("03:04:05")); CellFormatResult result = cf.Apply(cell); Assert.AreEqual("03:04", result.Text); } [Test] public void TestApplyCellForDateFormatAndNegativeFormat() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell0 = row.CreateCell(0); ICell cell1 = row.CreateCell(1); CellFormat cf = CellFormat.GetInstance("dd/mm/yyyy;(0)"); cell0.SetCellValue(10); CellFormatResult result0 = cf.Apply(cell0); Assert.AreEqual("10/01/1900", result0.Text); cell1.SetCellValue(-1); CellFormatResult result1 = cf.Apply(cell1); Assert.AreEqual("(1)", result1.Text); } [Test] public void TestApplyLabelCellForGeneralFormat() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell0 = row.CreateCell(0); ICell cell1 = row.CreateCell(1); ICell cell2 = row.CreateCell(2); ICell cell3 = row.CreateCell(3); ICell cell4 = row.CreateCell(4); CellFormat cf = CellFormat.GetInstance("General"); Label label0 = new Label(); Label label1 = new Label(); Label label2 = new Label(); Label label3 = new Label(); Label label4 = new Label(); // case Cell.CELL_TYPE_BLANK CellFormatResult result0 = cf.Apply(label0, cell0); Assert.AreEqual(string.Empty, result0.Text); Assert.AreEqual(string.Empty, label0.Text); // case Cell.CELL_TYPE_BOOLEAN cell1.SetCellValue(true); CellFormatResult result1 = cf.Apply(label1, cell1); Assert.AreEqual("TRUE", result1.Text); Assert.AreEqual("TRUE", label1.Text); // case Cell.CELL_TYPE_NUMERIC cell2.SetCellValue(1.23); CellFormatResult result2 = cf.Apply(label2, cell2); Assert.AreEqual("1.23", result2.Text); Assert.AreEqual("1.23", label2.Text); cell3.SetCellValue(123.0); CellFormatResult result3 = cf.Apply(label3, cell3); Assert.AreEqual("123", result3.Text); Assert.AreEqual("123", label3.Text); // case Cell.CELL_TYPE_STRING cell4.SetCellValue("abc"); CellFormatResult result4 = cf.Apply(label4, cell4); Assert.AreEqual("abc", result4.Text); Assert.AreEqual("abc", label4.Text); } [Test] public void TestApplyLabelCellForAtFormat() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell0 = row.CreateCell(0); ICell cell1 = row.CreateCell(1); ICell cell2 = row.CreateCell(2); ICell cell3 = row.CreateCell(3); ICell cell4 = row.CreateCell(4); CellFormat cf = CellFormat.GetInstance("@"); Label label0 = new Label(); Label label1 = new Label(); Label label2 = new Label(); Label label3 = new Label(); Label label4 = new Label(); // case Cell.CELL_TYPE_BLANK CellFormatResult result0 = cf.Apply(label0, cell0); Assert.AreEqual(string.Empty, result0.Text); Assert.AreEqual(string.Empty, label0.Text); // case Cell.CELL_TYPE_BOOLEAN cell1.SetCellValue(true); CellFormatResult result1 = cf.Apply(label1, cell1); Assert.AreEqual("TRUE", result1.Text); Assert.AreEqual("TRUE", label1.Text); // case Cell.CELL_TYPE_NUMERIC cell2.SetCellValue(1.23); CellFormatResult result2 = cf.Apply(label2, cell2); Assert.AreEqual("1.23", result2.Text); Assert.AreEqual("1.23", label2.Text); cell3.SetCellValue(123.0); CellFormatResult result3 = cf.Apply(label3, cell3); Assert.AreEqual("123", result3.Text); Assert.AreEqual("123", label3.Text); // case Cell.CELL_TYPE_STRING cell4.SetCellValue("abc"); CellFormatResult result4 = cf.Apply(label4, cell4); Assert.AreEqual("abc", result4.Text); Assert.AreEqual("abc", label4.Text); } [Test] public void TestApplyLabelCellForDateFormat() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell0 = row.CreateCell(0); ICell cell1 = row.CreateCell(1); CellFormat cf = CellFormat.GetInstance("dd/mm/yyyy"); Label label0 = new Label(); Label label1 = new Label(); cell0.SetCellValue(10); CellFormatResult result0 = cf.Apply(label0, cell0); Assert.AreEqual("10/01/1900", result0.Text); Assert.AreEqual("10/01/1900", label0.Text); cell1.SetCellValue(-1); CellFormatResult result1 = cf.Apply(label1, cell1); Assert.AreEqual(_255_POUND_SIGNS, result1.Text); Assert.AreEqual(_255_POUND_SIGNS, label1.Text); } [Test] public void TestApplyLabelCellForTimeFormat() { // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("hh:mm"); Label label = new Label(); cell.SetCellValue(DateUtil.ConvertTime("03:04:05")); CellFormatResult result = cf.Apply(label, cell); Assert.AreEqual("03:04", result.Text); Assert.AreEqual("03:04", label.Text); } [Test] public void TestApplyLabelCellForDateFormatAndNegativeFormat() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell0 = row.CreateCell(0); ICell cell1 = row.CreateCell(1); CellFormat cf = CellFormat.GetInstance("dd/mm/yyyy;(0)"); Label label0 = new Label(); Label label1 = new Label(); cell0.SetCellValue(10); CellFormatResult result0 = cf.Apply(label0, cell0); Assert.AreEqual("10/01/1900", result0.Text); Assert.AreEqual("10/01/1900", label0.Text); cell1.SetCellValue(-1); CellFormatResult result1 = cf.Apply(label1, cell1); Assert.AreEqual("(1)", result1.Text); Assert.AreEqual("(1)", label1.Text); } [Test] public void TestApplyFormatHasOnePartAndPartHasCondition() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10", cf.Apply(cell).Text); cell.SetCellValue(0.123456789012345); Assert.AreEqual("0.123456789", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("abc", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasTwoPartsFirstHasCondition() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00;0.000"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10.000", cf.Apply(cell).Text); cell.SetCellValue(0.123456789012345); Assert.AreEqual("0.123", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0.000", cf.Apply(cell).Text); cell.SetCellValue(-10); Assert.AreEqual("-10.000", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("abc", cf.Apply(cell).Text); cell.SetCellValue("TRUE"); Assert.AreEqual("TRUE", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasTwoPartsBothHaveCondition() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00;[>=10]0.000"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10.000", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual(_255_POUND_SIGNS, cf.Apply(cell).Text); cell.SetCellValue(-0.123456789012345); Assert.AreEqual(_255_POUND_SIGNS, cf.Apply(cell).Text); cell.SetCellValue(-10); Assert.AreEqual(_255_POUND_SIGNS, cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("abc", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasThreePartsFirstHasCondition() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00;0.000;0.0000"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10.0000", cf.Apply(cell).Text); cell.SetCellValue(0.123456789012345); Assert.AreEqual("0.1235", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0.0000", cf.Apply(cell).Text); // Second format part ('0.000') is used for negative numbers // so result does not have a minus sign cell.SetCellValue(-10); Assert.AreEqual("10.000", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("abc", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasThreePartsFirstTwoHaveCondition() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00;[>=10]0.000;0.0000"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10.000", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0.0000", cf.Apply(cell).Text); cell.SetCellValue(-10); Assert.AreEqual("-10.0000", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("abc", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasThreePartsFirstIsDateFirstTwoHaveCondition() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00;[>=10]dd/mm/yyyy;0.0"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10/01/1900", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0.0", cf.Apply(cell).Text); cell.SetCellValue(-10); Assert.AreEqual("-10.0", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("abc", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasTwoPartsFirstHasConditionSecondIsGeneral() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00;General"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0", cf.Apply(cell).Text); cell.SetCellValue(-10); Assert.AreEqual("-10", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("abc", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasThreePartsFirstTwoHaveConditionThirdIsGeneral() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00;[>=10]0.000;General"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10.000", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0", cf.Apply(cell).Text); cell.SetCellValue(-10); Assert.AreEqual("-10", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("abc", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasFourPartsFirstHasCondition() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00;0.000;0.0000;~~@~~"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10.0000", cf.Apply(cell).Text); cell.SetCellValue(0.123456789012345); Assert.AreEqual("0.1235", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0.0000", cf.Apply(cell).Text); // Second format part ('0.000') is used for negative numbers // so result does not have a minus sign cell.SetCellValue(-10); Assert.AreEqual("10.000", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("~~abc~~", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasFourPartsSecondHasCondition() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("0.00;[>=100]0.000;0.0000;~~@~~"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10.00", cf.Apply(cell).Text); cell.SetCellValue(0.123456789012345); Assert.AreEqual("0.12", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0.0000", cf.Apply(cell).Text); cell.SetCellValue(-10); Assert.AreEqual("-10.0000", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("~~abc~~", cf.Apply(cell).Text); cell.SetCellValue(true); Assert.AreEqual("~~TRUE~~", cf.Apply(cell).Text); } [Test] public void TestApplyFormatHasFourPartsFirstTwoHaveCondition() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[>=100]0.00;[>=10]0.000;0.0000;~~@~~"); cell.SetCellValue(100); Assert.AreEqual("100.00", cf.Apply(cell).Text); cell.SetCellValue(10); Assert.AreEqual("10.000", cf.Apply(cell).Text); cell.SetCellValue(0); Assert.AreEqual("0.0000", cf.Apply(cell).Text); cell.SetCellValue(-10); Assert.AreEqual("-10.0000", cf.Apply(cell).Text); cell.SetCellValue("abc"); Assert.AreEqual("~~abc~~", cf.Apply(cell).Text); cell.SetCellValue(true); Assert.AreEqual("~~TRUE~~", cf.Apply(cell).Text); } /* * Test apply(Object value) with a number as parameter */ [Test] public void TestApplyObjectNumber() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat cf1 = CellFormat.GetInstance("0.000"); Assert.AreEqual("1.235", cf1.Apply(1.2345).Text); Assert.AreEqual("-1.235", cf1.Apply(-1.2345).Text); CellFormat cf2 = CellFormat.GetInstance("0.000;(0.000)"); Assert.AreEqual("1.235", cf2.Apply(1.2345).Text); Assert.AreEqual("(1.235)", cf2.Apply(-1.2345).Text); CellFormat cf3 = CellFormat.GetInstance("[>1]0.000;0.0000"); Assert.AreEqual("1.235", cf3.Apply(1.2345).Text); Assert.AreEqual("-1.2345", cf3.Apply(-1.2345).Text); CellFormat cf4 = CellFormat.GetInstance("0.000;[>1]0.0000"); Assert.AreEqual("1.235", cf4.Apply(1.2345).Text); Assert.AreEqual(_255_POUND_SIGNS, cf4.Apply(-1.2345).Text); } /* * Test apply(Object value) with a Date as parameter */ [Test] public void TestApplyObjectDate() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); CellFormat cf1 = CellFormat.GetInstance("m/d/yyyy"); DateTime date1 = new SimpleDateFormat("M/d/y").Parse("01/11/2012"); Assert.AreEqual("1/11/2012", cf1.Apply(date1).Text); } [Test] public void TestApplyCellForDateFormatWithConditions() { Thread.CurrentThread.CurrentCulture = CultureInfo.GetCultureInfo("en-US"); // Create a workbook, IRow and ICell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); CellFormat cf = CellFormat.GetInstance("[<1]hh:mm:ss AM/PM;[>=1]dd/mm/yyyy hh:mm:ss AM/PM;General"); cell.SetCellValue(0.5); Assert.AreEqual("12:00:00 PM", cf.Apply(cell).Text); cell.SetCellValue(1.5); Assert.AreEqual("01/01/1900 12:00:00 PM", cf.Apply(cell).Text); cell.SetCellValue(-1); Assert.AreEqual(_255_POUND_SIGNS, cf.Apply(cell).Text); } /* * Test apply(Object value) with a String as parameter */ [Test] public void TestApplyObjectString() { CellFormat cf = CellFormat.GetInstance("0.00"); Assert.AreEqual("abc", cf.Apply("abc").Text); } /* * Test apply(Object value) with a Boolean as parameter */ [Test] public void TestApplyObjectBoolean() { CellFormat cf1 = CellFormat.GetInstance("0"); CellFormat cf2 = CellFormat.GetInstance("General"); CellFormat cf3 = CellFormat.GetInstance("@"); Assert.AreEqual("TRUE", cf1.Apply(true).Text); Assert.AreEqual("FALSE", cf2.Apply(false).Text); Assert.AreEqual("TRUE", cf3.Apply(true).Text); } [Test] public void TestSimpleFractionFormat() { CellFormat cf1 = CellFormat.GetInstance("# ?/?"); // Create a workbook, row and cell to test with IWorkbook wb = new HSSFWorkbook(); ISheet sheet = wb.CreateSheet(); IRow row = sheet.CreateRow(0); ICell cell = row.CreateCell(0); cell.SetCellValue(123456.6); //System.out.println(cf1.apply(cell).text); Assert.AreEqual("123456 3/5", cf1.Apply(cell).Text); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/bigtable/v1/bigtable_service_messages.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Google.Bigtable.V1 { [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public static partial class BigtableServiceMessages { #region Descriptor public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static BigtableServiceMessages() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "CjJnb29nbGUvYmlndGFibGUvdjEvYmlndGFibGVfc2VydmljZV9tZXNzYWdl", "cy5wcm90bxISZ29vZ2xlLmJpZ3RhYmxlLnYxGiZnb29nbGUvYmlndGFibGUv", "djEvYmlndGFibGVfZGF0YS5wcm90byLcAQoPUmVhZFJvd3NSZXF1ZXN0EhIK", "CnRhYmxlX25hbWUYASABKAkSEQoHcm93X2tleRgCIAEoDEgAEjEKCXJvd19y", "YW5nZRgDIAEoCzIcLmdvb2dsZS5iaWd0YWJsZS52MS5Sb3dSYW5nZUgAEi0K", "BmZpbHRlchgFIAEoCzIdLmdvb2dsZS5iaWd0YWJsZS52MS5Sb3dGaWx0ZXIS", "HgoWYWxsb3dfcm93X2ludGVybGVhdmluZxgGIAEoCBIWCg5udW1fcm93c19s", "aW1pdBgHIAEoA0IICgZ0YXJnZXQi0AEKEFJlYWRSb3dzUmVzcG9uc2USDwoH", "cm93X2tleRgBIAEoDBI6CgZjaHVua3MYAiADKAsyKi5nb29nbGUuYmlndGFi", "bGUudjEuUmVhZFJvd3NSZXNwb25zZS5DaHVuaxpvCgVDaHVuaxIyCgxyb3df", "Y29udGVudHMYASABKAsyGi5nb29nbGUuYmlndGFibGUudjEuRmFtaWx5SAAS", "EwoJcmVzZXRfcm93GAIgASgISAASFAoKY29tbWl0X3JvdxgDIAEoCEgAQgcK", "BWNodW5rIioKFFNhbXBsZVJvd0tleXNSZXF1ZXN0EhIKCnRhYmxlX25hbWUY", "ASABKAkiPgoVU2FtcGxlUm93S2V5c1Jlc3BvbnNlEg8KB3Jvd19rZXkYASAB", "KAwSFAoMb2Zmc2V0X2J5dGVzGAIgASgDImgKEE11dGF0ZVJvd1JlcXVlc3QS", "EgoKdGFibGVfbmFtZRgBIAEoCRIPCgdyb3dfa2V5GAIgASgMEi8KCW11dGF0", "aW9ucxgDIAMoCzIcLmdvb2dsZS5iaWd0YWJsZS52MS5NdXRhdGlvbiLlAQoY", "Q2hlY2tBbmRNdXRhdGVSb3dSZXF1ZXN0EhIKCnRhYmxlX25hbWUYASABKAkS", "DwoHcm93X2tleRgCIAEoDBI3ChBwcmVkaWNhdGVfZmlsdGVyGAYgASgLMh0u", "Z29vZ2xlLmJpZ3RhYmxlLnYxLlJvd0ZpbHRlchI0Cg50cnVlX211dGF0aW9u", "cxgEIAMoCzIcLmdvb2dsZS5iaWd0YWJsZS52MS5NdXRhdGlvbhI1Cg9mYWxz", "ZV9tdXRhdGlvbnMYBSADKAsyHC5nb29nbGUuYmlndGFibGUudjEuTXV0YXRp", "b24iNgoZQ2hlY2tBbmRNdXRhdGVSb3dSZXNwb25zZRIZChFwcmVkaWNhdGVf", "bWF0Y2hlZBgBIAEoCCJ4ChlSZWFkTW9kaWZ5V3JpdGVSb3dSZXF1ZXN0EhIK", "CnRhYmxlX25hbWUYASABKAkSDwoHcm93X2tleRgCIAEoDBI2CgVydWxlcxgD", "IAMoCzInLmdvb2dsZS5iaWd0YWJsZS52MS5SZWFkTW9kaWZ5V3JpdGVSdWxl", "QjgKFmNvbS5nb29nbGUuYmlndGFibGUudjFCHEJpZ3RhYmxlU2VydmljZU1l", "c3NhZ2VzUHJvdG9QAWIGcHJvdG8z")); descriptor = pbr::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData, new pbr::FileDescriptor[] { global::Google.Bigtable.V1.BigtableData.Descriptor, }, new pbr::GeneratedCodeInfo(null, new pbr::GeneratedCodeInfo[] { new pbr::GeneratedCodeInfo(typeof(global::Google.Bigtable.V1.ReadRowsRequest), new[]{ "TableName", "RowKey", "RowRange", "Filter", "AllowRowInterleaving", "NumRowsLimit" }, new[]{ "Target" }, null, null), new pbr::GeneratedCodeInfo(typeof(global::Google.Bigtable.V1.ReadRowsResponse), new[]{ "RowKey", "Chunks" }, null, null, new pbr::GeneratedCodeInfo[] { new pbr::GeneratedCodeInfo(typeof(global::Google.Bigtable.V1.ReadRowsResponse.Types.Chunk), new[]{ "RowContents", "ResetRow", "CommitRow" }, new[]{ "Chunk" }, null, null)}), new pbr::GeneratedCodeInfo(typeof(global::Google.Bigtable.V1.SampleRowKeysRequest), new[]{ "TableName" }, null, null, null), new pbr::GeneratedCodeInfo(typeof(global::Google.Bigtable.V1.SampleRowKeysResponse), new[]{ "RowKey", "OffsetBytes" }, null, null, null), new pbr::GeneratedCodeInfo(typeof(global::Google.Bigtable.V1.MutateRowRequest), new[]{ "TableName", "RowKey", "Mutations" }, null, null, null), new pbr::GeneratedCodeInfo(typeof(global::Google.Bigtable.V1.CheckAndMutateRowRequest), new[]{ "TableName", "RowKey", "PredicateFilter", "TrueMutations", "FalseMutations" }, null, null, null), new pbr::GeneratedCodeInfo(typeof(global::Google.Bigtable.V1.CheckAndMutateRowResponse), new[]{ "PredicateMatched" }, null, null, null), new pbr::GeneratedCodeInfo(typeof(global::Google.Bigtable.V1.ReadModifyWriteRowRequest), new[]{ "TableName", "RowKey", "Rules" }, null, null, null) })); } #endregion } #region Messages [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class ReadRowsRequest : pb::IMessage<ReadRowsRequest> { private static readonly pb::MessageParser<ReadRowsRequest> _parser = new pb::MessageParser<ReadRowsRequest>(() => new ReadRowsRequest()); public static pb::MessageParser<ReadRowsRequest> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Bigtable.V1.BigtableServiceMessages.Descriptor.MessageTypes[0]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public ReadRowsRequest() { OnConstruction(); } partial void OnConstruction(); public ReadRowsRequest(ReadRowsRequest other) : this() { tableName_ = other.tableName_; Filter = other.filter_ != null ? other.Filter.Clone() : null; allowRowInterleaving_ = other.allowRowInterleaving_; numRowsLimit_ = other.numRowsLimit_; switch (other.TargetCase) { case TargetOneofCase.RowKey: RowKey = other.RowKey; break; case TargetOneofCase.RowRange: RowRange = other.RowRange.Clone(); break; } } public ReadRowsRequest Clone() { return new ReadRowsRequest(this); } public const int TableNameFieldNumber = 1; private string tableName_ = ""; public string TableName { get { return tableName_; } set { tableName_ = pb::Preconditions.CheckNotNull(value, "value"); } } public const int RowKeyFieldNumber = 2; public pb::ByteString RowKey { get { return targetCase_ == TargetOneofCase.RowKey ? (pb::ByteString) target_ : pb::ByteString.Empty; } set { target_ = pb::Preconditions.CheckNotNull(value, "value"); targetCase_ = TargetOneofCase.RowKey; } } public const int RowRangeFieldNumber = 3; public global::Google.Bigtable.V1.RowRange RowRange { get { return targetCase_ == TargetOneofCase.RowRange ? (global::Google.Bigtable.V1.RowRange) target_ : null; } set { target_ = value; targetCase_ = value == null ? TargetOneofCase.None : TargetOneofCase.RowRange; } } public const int FilterFieldNumber = 5; private global::Google.Bigtable.V1.RowFilter filter_; public global::Google.Bigtable.V1.RowFilter Filter { get { return filter_; } set { filter_ = value; } } public const int AllowRowInterleavingFieldNumber = 6; private bool allowRowInterleaving_; public bool AllowRowInterleaving { get { return allowRowInterleaving_; } set { allowRowInterleaving_ = value; } } public const int NumRowsLimitFieldNumber = 7; private long numRowsLimit_; public long NumRowsLimit { get { return numRowsLimit_; } set { numRowsLimit_ = value; } } private object target_; public enum TargetOneofCase { None = 0, RowKey = 2, RowRange = 3, } private TargetOneofCase targetCase_ = TargetOneofCase.None; public TargetOneofCase TargetCase { get { return targetCase_; } } public void ClearTarget() { targetCase_ = TargetOneofCase.None; target_ = null; } public override bool Equals(object other) { return Equals(other as ReadRowsRequest); } public bool Equals(ReadRowsRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (TableName != other.TableName) return false; if (RowKey != other.RowKey) return false; if (!object.Equals(RowRange, other.RowRange)) return false; if (!object.Equals(Filter, other.Filter)) return false; if (AllowRowInterleaving != other.AllowRowInterleaving) return false; if (NumRowsLimit != other.NumRowsLimit) return false; return true; } public override int GetHashCode() { int hash = 1; if (TableName.Length != 0) hash ^= TableName.GetHashCode(); if (targetCase_ == TargetOneofCase.RowKey) hash ^= RowKey.GetHashCode(); if (targetCase_ == TargetOneofCase.RowRange) hash ^= RowRange.GetHashCode(); if (filter_ != null) hash ^= Filter.GetHashCode(); if (AllowRowInterleaving != false) hash ^= AllowRowInterleaving.GetHashCode(); if (NumRowsLimit != 0L) hash ^= NumRowsLimit.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.Default.Format(this); } public void WriteTo(pb::CodedOutputStream output) { if (TableName.Length != 0) { output.WriteRawTag(10); output.WriteString(TableName); } if (targetCase_ == TargetOneofCase.RowKey) { output.WriteRawTag(18); output.WriteBytes(RowKey); } if (targetCase_ == TargetOneofCase.RowRange) { output.WriteRawTag(26); output.WriteMessage(RowRange); } if (filter_ != null) { output.WriteRawTag(42); output.WriteMessage(Filter); } if (AllowRowInterleaving != false) { output.WriteRawTag(48); output.WriteBool(AllowRowInterleaving); } if (NumRowsLimit != 0L) { output.WriteRawTag(56); output.WriteInt64(NumRowsLimit); } } public int CalculateSize() { int size = 0; if (TableName.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(TableName); } if (targetCase_ == TargetOneofCase.RowKey) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(RowKey); } if (targetCase_ == TargetOneofCase.RowRange) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(RowRange); } if (filter_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(Filter); } if (AllowRowInterleaving != false) { size += 1 + 1; } if (NumRowsLimit != 0L) { size += 1 + pb::CodedOutputStream.ComputeInt64Size(NumRowsLimit); } return size; } public void MergeFrom(ReadRowsRequest other) { if (other == null) { return; } if (other.TableName.Length != 0) { TableName = other.TableName; } if (other.filter_ != null) { if (filter_ == null) { filter_ = new global::Google.Bigtable.V1.RowFilter(); } Filter.MergeFrom(other.Filter); } if (other.AllowRowInterleaving != false) { AllowRowInterleaving = other.AllowRowInterleaving; } if (other.NumRowsLimit != 0L) { NumRowsLimit = other.NumRowsLimit; } switch (other.TargetCase) { case TargetOneofCase.RowKey: RowKey = other.RowKey; break; case TargetOneofCase.RowRange: RowRange = other.RowRange; break; } } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { TableName = input.ReadString(); break; } case 18: { RowKey = input.ReadBytes(); break; } case 26: { global::Google.Bigtable.V1.RowRange subBuilder = new global::Google.Bigtable.V1.RowRange(); if (targetCase_ == TargetOneofCase.RowRange) { subBuilder.MergeFrom(RowRange); } input.ReadMessage(subBuilder); RowRange = subBuilder; break; } case 42: { if (filter_ == null) { filter_ = new global::Google.Bigtable.V1.RowFilter(); } input.ReadMessage(filter_); break; } case 48: { AllowRowInterleaving = input.ReadBool(); break; } case 56: { NumRowsLimit = input.ReadInt64(); break; } } } } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class ReadRowsResponse : pb::IMessage<ReadRowsResponse> { private static readonly pb::MessageParser<ReadRowsResponse> _parser = new pb::MessageParser<ReadRowsResponse>(() => new ReadRowsResponse()); public static pb::MessageParser<ReadRowsResponse> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Bigtable.V1.BigtableServiceMessages.Descriptor.MessageTypes[1]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public ReadRowsResponse() { OnConstruction(); } partial void OnConstruction(); public ReadRowsResponse(ReadRowsResponse other) : this() { rowKey_ = other.rowKey_; chunks_ = other.chunks_.Clone(); } public ReadRowsResponse Clone() { return new ReadRowsResponse(this); } public const int RowKeyFieldNumber = 1; private pb::ByteString rowKey_ = pb::ByteString.Empty; public pb::ByteString RowKey { get { return rowKey_; } set { rowKey_ = pb::Preconditions.CheckNotNull(value, "value"); } } public const int ChunksFieldNumber = 2; private static readonly pb::FieldCodec<global::Google.Bigtable.V1.ReadRowsResponse.Types.Chunk> _repeated_chunks_codec = pb::FieldCodec.ForMessage(18, global::Google.Bigtable.V1.ReadRowsResponse.Types.Chunk.Parser); private readonly pbc::RepeatedField<global::Google.Bigtable.V1.ReadRowsResponse.Types.Chunk> chunks_ = new pbc::RepeatedField<global::Google.Bigtable.V1.ReadRowsResponse.Types.Chunk>(); public pbc::RepeatedField<global::Google.Bigtable.V1.ReadRowsResponse.Types.Chunk> Chunks { get { return chunks_; } } public override bool Equals(object other) { return Equals(other as ReadRowsResponse); } public bool Equals(ReadRowsResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (RowKey != other.RowKey) return false; if(!chunks_.Equals(other.chunks_)) return false; return true; } public override int GetHashCode() { int hash = 1; if (RowKey.Length != 0) hash ^= RowKey.GetHashCode(); hash ^= chunks_.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.Default.Format(this); } public void WriteTo(pb::CodedOutputStream output) { if (RowKey.Length != 0) { output.WriteRawTag(10); output.WriteBytes(RowKey); } chunks_.WriteTo(output, _repeated_chunks_codec); } public int CalculateSize() { int size = 0; if (RowKey.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(RowKey); } size += chunks_.CalculateSize(_repeated_chunks_codec); return size; } public void MergeFrom(ReadRowsResponse other) { if (other == null) { return; } if (other.RowKey.Length != 0) { RowKey = other.RowKey; } chunks_.Add(other.chunks_); } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { RowKey = input.ReadBytes(); break; } case 18: { chunks_.AddEntriesFrom(input, _repeated_chunks_codec); break; } } } } #region Nested types [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public static partial class Types { [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class Chunk : pb::IMessage<Chunk> { private static readonly pb::MessageParser<Chunk> _parser = new pb::MessageParser<Chunk>(() => new Chunk()); public static pb::MessageParser<Chunk> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Bigtable.V1.ReadRowsResponse.Descriptor.NestedTypes[0]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public Chunk() { OnConstruction(); } partial void OnConstruction(); public Chunk(Chunk other) : this() { switch (other.ChunkCase) { case ChunkOneofCase.RowContents: RowContents = other.RowContents.Clone(); break; case ChunkOneofCase.ResetRow: ResetRow = other.ResetRow; break; case ChunkOneofCase.CommitRow: CommitRow = other.CommitRow; break; } } public Chunk Clone() { return new Chunk(this); } public const int RowContentsFieldNumber = 1; public global::Google.Bigtable.V1.Family RowContents { get { return chunkCase_ == ChunkOneofCase.RowContents ? (global::Google.Bigtable.V1.Family) chunk_ : null; } set { chunk_ = value; chunkCase_ = value == null ? ChunkOneofCase.None : ChunkOneofCase.RowContents; } } public const int ResetRowFieldNumber = 2; public bool ResetRow { get { return chunkCase_ == ChunkOneofCase.ResetRow ? (bool) chunk_ : false; } set { chunk_ = value; chunkCase_ = ChunkOneofCase.ResetRow; } } public const int CommitRowFieldNumber = 3; public bool CommitRow { get { return chunkCase_ == ChunkOneofCase.CommitRow ? (bool) chunk_ : false; } set { chunk_ = value; chunkCase_ = ChunkOneofCase.CommitRow; } } private object chunk_; public enum ChunkOneofCase { None = 0, RowContents = 1, ResetRow = 2, CommitRow = 3, } private ChunkOneofCase chunkCase_ = ChunkOneofCase.None; public ChunkOneofCase ChunkCase { get { return chunkCase_; } } public void ClearChunk() { chunkCase_ = ChunkOneofCase.None; chunk_ = null; } public override bool Equals(object other) { return Equals(other as Chunk); } public bool Equals(Chunk other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (!object.Equals(RowContents, other.RowContents)) return false; if (ResetRow != other.ResetRow) return false; if (CommitRow != other.CommitRow) return false; return true; } public override int GetHashCode() { int hash = 1; if (chunkCase_ == ChunkOneofCase.RowContents) hash ^= RowContents.GetHashCode(); if (chunkCase_ == ChunkOneofCase.ResetRow) hash ^= ResetRow.GetHashCode(); if (chunkCase_ == ChunkOneofCase.CommitRow) hash ^= CommitRow.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.Default.Format(this); } public void WriteTo(pb::CodedOutputStream output) { if (chunkCase_ == ChunkOneofCase.RowContents) { output.WriteRawTag(10); output.WriteMessage(RowContents); } if (chunkCase_ == ChunkOneofCase.ResetRow) { output.WriteRawTag(16); output.WriteBool(ResetRow); } if (chunkCase_ == ChunkOneofCase.CommitRow) { output.WriteRawTag(24); output.WriteBool(CommitRow); } } public int CalculateSize() { int size = 0; if (chunkCase_ == ChunkOneofCase.RowContents) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(RowContents); } if (chunkCase_ == ChunkOneofCase.ResetRow) { size += 1 + 1; } if (chunkCase_ == ChunkOneofCase.CommitRow) { size += 1 + 1; } return size; } public void MergeFrom(Chunk other) { if (other == null) { return; } switch (other.ChunkCase) { case ChunkOneofCase.RowContents: RowContents = other.RowContents; break; case ChunkOneofCase.ResetRow: ResetRow = other.ResetRow; break; case ChunkOneofCase.CommitRow: CommitRow = other.CommitRow; break; } } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { global::Google.Bigtable.V1.Family subBuilder = new global::Google.Bigtable.V1.Family(); if (chunkCase_ == ChunkOneofCase.RowContents) { subBuilder.MergeFrom(RowContents); } input.ReadMessage(subBuilder); RowContents = subBuilder; break; } case 16: { ResetRow = input.ReadBool(); break; } case 24: { CommitRow = input.ReadBool(); break; } } } } } } #endregion } [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class SampleRowKeysRequest : pb::IMessage<SampleRowKeysRequest> { private static readonly pb::MessageParser<SampleRowKeysRequest> _parser = new pb::MessageParser<SampleRowKeysRequest>(() => new SampleRowKeysRequest()); public static pb::MessageParser<SampleRowKeysRequest> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Bigtable.V1.BigtableServiceMessages.Descriptor.MessageTypes[2]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public SampleRowKeysRequest() { OnConstruction(); } partial void OnConstruction(); public SampleRowKeysRequest(SampleRowKeysRequest other) : this() { tableName_ = other.tableName_; } public SampleRowKeysRequest Clone() { return new SampleRowKeysRequest(this); } public const int TableNameFieldNumber = 1; private string tableName_ = ""; public string TableName { get { return tableName_; } set { tableName_ = pb::Preconditions.CheckNotNull(value, "value"); } } public override bool Equals(object other) { return Equals(other as SampleRowKeysRequest); } public bool Equals(SampleRowKeysRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (TableName != other.TableName) return false; return true; } public override int GetHashCode() { int hash = 1; if (TableName.Length != 0) hash ^= TableName.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.Default.Format(this); } public void WriteTo(pb::CodedOutputStream output) { if (TableName.Length != 0) { output.WriteRawTag(10); output.WriteString(TableName); } } public int CalculateSize() { int size = 0; if (TableName.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(TableName); } return size; } public void MergeFrom(SampleRowKeysRequest other) { if (other == null) { return; } if (other.TableName.Length != 0) { TableName = other.TableName; } } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { TableName = input.ReadString(); break; } } } } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class SampleRowKeysResponse : pb::IMessage<SampleRowKeysResponse> { private static readonly pb::MessageParser<SampleRowKeysResponse> _parser = new pb::MessageParser<SampleRowKeysResponse>(() => new SampleRowKeysResponse()); public static pb::MessageParser<SampleRowKeysResponse> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Bigtable.V1.BigtableServiceMessages.Descriptor.MessageTypes[3]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public SampleRowKeysResponse() { OnConstruction(); } partial void OnConstruction(); public SampleRowKeysResponse(SampleRowKeysResponse other) : this() { rowKey_ = other.rowKey_; offsetBytes_ = other.offsetBytes_; } public SampleRowKeysResponse Clone() { return new SampleRowKeysResponse(this); } public const int RowKeyFieldNumber = 1; private pb::ByteString rowKey_ = pb::ByteString.Empty; public pb::ByteString RowKey { get { return rowKey_; } set { rowKey_ = pb::Preconditions.CheckNotNull(value, "value"); } } public const int OffsetBytesFieldNumber = 2; private long offsetBytes_; public long OffsetBytes { get { return offsetBytes_; } set { offsetBytes_ = value; } } public override bool Equals(object other) { return Equals(other as SampleRowKeysResponse); } public bool Equals(SampleRowKeysResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (RowKey != other.RowKey) return false; if (OffsetBytes != other.OffsetBytes) return false; return true; } public override int GetHashCode() { int hash = 1; if (RowKey.Length != 0) hash ^= RowKey.GetHashCode(); if (OffsetBytes != 0L) hash ^= OffsetBytes.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.Default.Format(this); } public void WriteTo(pb::CodedOutputStream output) { if (RowKey.Length != 0) { output.WriteRawTag(10); output.WriteBytes(RowKey); } if (OffsetBytes != 0L) { output.WriteRawTag(16); output.WriteInt64(OffsetBytes); } } public int CalculateSize() { int size = 0; if (RowKey.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(RowKey); } if (OffsetBytes != 0L) { size += 1 + pb::CodedOutputStream.ComputeInt64Size(OffsetBytes); } return size; } public void MergeFrom(SampleRowKeysResponse other) { if (other == null) { return; } if (other.RowKey.Length != 0) { RowKey = other.RowKey; } if (other.OffsetBytes != 0L) { OffsetBytes = other.OffsetBytes; } } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { RowKey = input.ReadBytes(); break; } case 16: { OffsetBytes = input.ReadInt64(); break; } } } } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class MutateRowRequest : pb::IMessage<MutateRowRequest> { private static readonly pb::MessageParser<MutateRowRequest> _parser = new pb::MessageParser<MutateRowRequest>(() => new MutateRowRequest()); public static pb::MessageParser<MutateRowRequest> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Bigtable.V1.BigtableServiceMessages.Descriptor.MessageTypes[4]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public MutateRowRequest() { OnConstruction(); } partial void OnConstruction(); public MutateRowRequest(MutateRowRequest other) : this() { tableName_ = other.tableName_; rowKey_ = other.rowKey_; mutations_ = other.mutations_.Clone(); } public MutateRowRequest Clone() { return new MutateRowRequest(this); } public const int TableNameFieldNumber = 1; private string tableName_ = ""; public string TableName { get { return tableName_; } set { tableName_ = pb::Preconditions.CheckNotNull(value, "value"); } } public const int RowKeyFieldNumber = 2; private pb::ByteString rowKey_ = pb::ByteString.Empty; public pb::ByteString RowKey { get { return rowKey_; } set { rowKey_ = pb::Preconditions.CheckNotNull(value, "value"); } } public const int MutationsFieldNumber = 3; private static readonly pb::FieldCodec<global::Google.Bigtable.V1.Mutation> _repeated_mutations_codec = pb::FieldCodec.ForMessage(26, global::Google.Bigtable.V1.Mutation.Parser); private readonly pbc::RepeatedField<global::Google.Bigtable.V1.Mutation> mutations_ = new pbc::RepeatedField<global::Google.Bigtable.V1.Mutation>(); public pbc::RepeatedField<global::Google.Bigtable.V1.Mutation> Mutations { get { return mutations_; } } public override bool Equals(object other) { return Equals(other as MutateRowRequest); } public bool Equals(MutateRowRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (TableName != other.TableName) return false; if (RowKey != other.RowKey) return false; if(!mutations_.Equals(other.mutations_)) return false; return true; } public override int GetHashCode() { int hash = 1; if (TableName.Length != 0) hash ^= TableName.GetHashCode(); if (RowKey.Length != 0) hash ^= RowKey.GetHashCode(); hash ^= mutations_.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.Default.Format(this); } public void WriteTo(pb::CodedOutputStream output) { if (TableName.Length != 0) { output.WriteRawTag(10); output.WriteString(TableName); } if (RowKey.Length != 0) { output.WriteRawTag(18); output.WriteBytes(RowKey); } mutations_.WriteTo(output, _repeated_mutations_codec); } public int CalculateSize() { int size = 0; if (TableName.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(TableName); } if (RowKey.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(RowKey); } size += mutations_.CalculateSize(_repeated_mutations_codec); return size; } public void MergeFrom(MutateRowRequest other) { if (other == null) { return; } if (other.TableName.Length != 0) { TableName = other.TableName; } if (other.RowKey.Length != 0) { RowKey = other.RowKey; } mutations_.Add(other.mutations_); } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { TableName = input.ReadString(); break; } case 18: { RowKey = input.ReadBytes(); break; } case 26: { mutations_.AddEntriesFrom(input, _repeated_mutations_codec); break; } } } } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class CheckAndMutateRowRequest : pb::IMessage<CheckAndMutateRowRequest> { private static readonly pb::MessageParser<CheckAndMutateRowRequest> _parser = new pb::MessageParser<CheckAndMutateRowRequest>(() => new CheckAndMutateRowRequest()); public static pb::MessageParser<CheckAndMutateRowRequest> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Bigtable.V1.BigtableServiceMessages.Descriptor.MessageTypes[5]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public CheckAndMutateRowRequest() { OnConstruction(); } partial void OnConstruction(); public CheckAndMutateRowRequest(CheckAndMutateRowRequest other) : this() { tableName_ = other.tableName_; rowKey_ = other.rowKey_; PredicateFilter = other.predicateFilter_ != null ? other.PredicateFilter.Clone() : null; trueMutations_ = other.trueMutations_.Clone(); falseMutations_ = other.falseMutations_.Clone(); } public CheckAndMutateRowRequest Clone() { return new CheckAndMutateRowRequest(this); } public const int TableNameFieldNumber = 1; private string tableName_ = ""; public string TableName { get { return tableName_; } set { tableName_ = pb::Preconditions.CheckNotNull(value, "value"); } } public const int RowKeyFieldNumber = 2; private pb::ByteString rowKey_ = pb::ByteString.Empty; public pb::ByteString RowKey { get { return rowKey_; } set { rowKey_ = pb::Preconditions.CheckNotNull(value, "value"); } } public const int PredicateFilterFieldNumber = 6; private global::Google.Bigtable.V1.RowFilter predicateFilter_; public global::Google.Bigtable.V1.RowFilter PredicateFilter { get { return predicateFilter_; } set { predicateFilter_ = value; } } public const int TrueMutationsFieldNumber = 4; private static readonly pb::FieldCodec<global::Google.Bigtable.V1.Mutation> _repeated_trueMutations_codec = pb::FieldCodec.ForMessage(34, global::Google.Bigtable.V1.Mutation.Parser); private readonly pbc::RepeatedField<global::Google.Bigtable.V1.Mutation> trueMutations_ = new pbc::RepeatedField<global::Google.Bigtable.V1.Mutation>(); public pbc::RepeatedField<global::Google.Bigtable.V1.Mutation> TrueMutations { get { return trueMutations_; } } public const int FalseMutationsFieldNumber = 5; private static readonly pb::FieldCodec<global::Google.Bigtable.V1.Mutation> _repeated_falseMutations_codec = pb::FieldCodec.ForMessage(42, global::Google.Bigtable.V1.Mutation.Parser); private readonly pbc::RepeatedField<global::Google.Bigtable.V1.Mutation> falseMutations_ = new pbc::RepeatedField<global::Google.Bigtable.V1.Mutation>(); public pbc::RepeatedField<global::Google.Bigtable.V1.Mutation> FalseMutations { get { return falseMutations_; } } public override bool Equals(object other) { return Equals(other as CheckAndMutateRowRequest); } public bool Equals(CheckAndMutateRowRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (TableName != other.TableName) return false; if (RowKey != other.RowKey) return false; if (!object.Equals(PredicateFilter, other.PredicateFilter)) return false; if(!trueMutations_.Equals(other.trueMutations_)) return false; if(!falseMutations_.Equals(other.falseMutations_)) return false; return true; } public override int GetHashCode() { int hash = 1; if (TableName.Length != 0) hash ^= TableName.GetHashCode(); if (RowKey.Length != 0) hash ^= RowKey.GetHashCode(); if (predicateFilter_ != null) hash ^= PredicateFilter.GetHashCode(); hash ^= trueMutations_.GetHashCode(); hash ^= falseMutations_.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.Default.Format(this); } public void WriteTo(pb::CodedOutputStream output) { if (TableName.Length != 0) { output.WriteRawTag(10); output.WriteString(TableName); } if (RowKey.Length != 0) { output.WriteRawTag(18); output.WriteBytes(RowKey); } trueMutations_.WriteTo(output, _repeated_trueMutations_codec); falseMutations_.WriteTo(output, _repeated_falseMutations_codec); if (predicateFilter_ != null) { output.WriteRawTag(50); output.WriteMessage(PredicateFilter); } } public int CalculateSize() { int size = 0; if (TableName.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(TableName); } if (RowKey.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(RowKey); } if (predicateFilter_ != null) { size += 1 + pb::CodedOutputStream.ComputeMessageSize(PredicateFilter); } size += trueMutations_.CalculateSize(_repeated_trueMutations_codec); size += falseMutations_.CalculateSize(_repeated_falseMutations_codec); return size; } public void MergeFrom(CheckAndMutateRowRequest other) { if (other == null) { return; } if (other.TableName.Length != 0) { TableName = other.TableName; } if (other.RowKey.Length != 0) { RowKey = other.RowKey; } if (other.predicateFilter_ != null) { if (predicateFilter_ == null) { predicateFilter_ = new global::Google.Bigtable.V1.RowFilter(); } PredicateFilter.MergeFrom(other.PredicateFilter); } trueMutations_.Add(other.trueMutations_); falseMutations_.Add(other.falseMutations_); } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { TableName = input.ReadString(); break; } case 18: { RowKey = input.ReadBytes(); break; } case 34: { trueMutations_.AddEntriesFrom(input, _repeated_trueMutations_codec); break; } case 42: { falseMutations_.AddEntriesFrom(input, _repeated_falseMutations_codec); break; } case 50: { if (predicateFilter_ == null) { predicateFilter_ = new global::Google.Bigtable.V1.RowFilter(); } input.ReadMessage(predicateFilter_); break; } } } } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class CheckAndMutateRowResponse : pb::IMessage<CheckAndMutateRowResponse> { private static readonly pb::MessageParser<CheckAndMutateRowResponse> _parser = new pb::MessageParser<CheckAndMutateRowResponse>(() => new CheckAndMutateRowResponse()); public static pb::MessageParser<CheckAndMutateRowResponse> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Bigtable.V1.BigtableServiceMessages.Descriptor.MessageTypes[6]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public CheckAndMutateRowResponse() { OnConstruction(); } partial void OnConstruction(); public CheckAndMutateRowResponse(CheckAndMutateRowResponse other) : this() { predicateMatched_ = other.predicateMatched_; } public CheckAndMutateRowResponse Clone() { return new CheckAndMutateRowResponse(this); } public const int PredicateMatchedFieldNumber = 1; private bool predicateMatched_; public bool PredicateMatched { get { return predicateMatched_; } set { predicateMatched_ = value; } } public override bool Equals(object other) { return Equals(other as CheckAndMutateRowResponse); } public bool Equals(CheckAndMutateRowResponse other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (PredicateMatched != other.PredicateMatched) return false; return true; } public override int GetHashCode() { int hash = 1; if (PredicateMatched != false) hash ^= PredicateMatched.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.Default.Format(this); } public void WriteTo(pb::CodedOutputStream output) { if (PredicateMatched != false) { output.WriteRawTag(8); output.WriteBool(PredicateMatched); } } public int CalculateSize() { int size = 0; if (PredicateMatched != false) { size += 1 + 1; } return size; } public void MergeFrom(CheckAndMutateRowResponse other) { if (other == null) { return; } if (other.PredicateMatched != false) { PredicateMatched = other.PredicateMatched; } } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 8: { PredicateMatched = input.ReadBool(); break; } } } } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public sealed partial class ReadModifyWriteRowRequest : pb::IMessage<ReadModifyWriteRowRequest> { private static readonly pb::MessageParser<ReadModifyWriteRowRequest> _parser = new pb::MessageParser<ReadModifyWriteRowRequest>(() => new ReadModifyWriteRowRequest()); public static pb::MessageParser<ReadModifyWriteRowRequest> Parser { get { return _parser; } } public static pbr::MessageDescriptor Descriptor { get { return global::Google.Bigtable.V1.BigtableServiceMessages.Descriptor.MessageTypes[7]; } } pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } public ReadModifyWriteRowRequest() { OnConstruction(); } partial void OnConstruction(); public ReadModifyWriteRowRequest(ReadModifyWriteRowRequest other) : this() { tableName_ = other.tableName_; rowKey_ = other.rowKey_; rules_ = other.rules_.Clone(); } public ReadModifyWriteRowRequest Clone() { return new ReadModifyWriteRowRequest(this); } public const int TableNameFieldNumber = 1; private string tableName_ = ""; public string TableName { get { return tableName_; } set { tableName_ = pb::Preconditions.CheckNotNull(value, "value"); } } public const int RowKeyFieldNumber = 2; private pb::ByteString rowKey_ = pb::ByteString.Empty; public pb::ByteString RowKey { get { return rowKey_; } set { rowKey_ = pb::Preconditions.CheckNotNull(value, "value"); } } public const int RulesFieldNumber = 3; private static readonly pb::FieldCodec<global::Google.Bigtable.V1.ReadModifyWriteRule> _repeated_rules_codec = pb::FieldCodec.ForMessage(26, global::Google.Bigtable.V1.ReadModifyWriteRule.Parser); private readonly pbc::RepeatedField<global::Google.Bigtable.V1.ReadModifyWriteRule> rules_ = new pbc::RepeatedField<global::Google.Bigtable.V1.ReadModifyWriteRule>(); public pbc::RepeatedField<global::Google.Bigtable.V1.ReadModifyWriteRule> Rules { get { return rules_; } } public override bool Equals(object other) { return Equals(other as ReadModifyWriteRowRequest); } public bool Equals(ReadModifyWriteRowRequest other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (TableName != other.TableName) return false; if (RowKey != other.RowKey) return false; if(!rules_.Equals(other.rules_)) return false; return true; } public override int GetHashCode() { int hash = 1; if (TableName.Length != 0) hash ^= TableName.GetHashCode(); if (RowKey.Length != 0) hash ^= RowKey.GetHashCode(); hash ^= rules_.GetHashCode(); return hash; } public override string ToString() { return pb::JsonFormatter.Default.Format(this); } public void WriteTo(pb::CodedOutputStream output) { if (TableName.Length != 0) { output.WriteRawTag(10); output.WriteString(TableName); } if (RowKey.Length != 0) { output.WriteRawTag(18); output.WriteBytes(RowKey); } rules_.WriteTo(output, _repeated_rules_codec); } public int CalculateSize() { int size = 0; if (TableName.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(TableName); } if (RowKey.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeBytesSize(RowKey); } size += rules_.CalculateSize(_repeated_rules_codec); return size; } public void MergeFrom(ReadModifyWriteRowRequest other) { if (other == null) { return; } if (other.TableName.Length != 0) { TableName = other.TableName; } if (other.RowKey.Length != 0) { RowKey = other.RowKey; } rules_.Add(other.rules_); } public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { TableName = input.ReadString(); break; } case 18: { RowKey = input.ReadBytes(); break; } case 26: { rules_.AddEntriesFrom(input, _repeated_rules_codec); break; } } } } } #endregion } #endregion Designer generated code
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace MvcApplication7.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { private const int DefaultCollectionSize = 3; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
/*------------------------------------------------------------------------- DFA.cs -- Generation of the Scanner Automaton Compiler Generator Coco/R, Copyright (c) 1990, 2004 Hanspeter Moessenboeck, University of Linz extended by M. Loeberbauer & A. Woess, Univ. of Linz with improvements by Pat Terry, Rhodes University This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. As an exception, it is allowed to write an extension of Coco/R that is used as a plugin in non-free software. If not otherwise stated, any source code generated by Coco/R (other than Coco/R itself) does not fall under the GNU General Public License. -------------------------------------------------------------------------*/ using System; using System.IO; using System.Text; using System.Collections; namespace at.jku.ssw.Coco { //----------------------------------------------------------------------------- // State //----------------------------------------------------------------------------- public class State { // state of finite automaton public int nr; // state number public Action firstAction;// to first action of this state public Symbol endOf; // recognized token if state is final public bool ctx; // true if state is reached via contextTrans public State next; public void AddAction(Action act) { Action lasta = null, a = firstAction; while (a != null && act.typ >= a.typ) {lasta = a; a = a.next;} // collecting classes at the beginning gives better performance act.next = a; if (a==firstAction) firstAction = act; else lasta.next = act; } public void DetachAction(Action act) { Action lasta = null, a = firstAction; while (a != null && a != act) {lasta = a; a = a.next;} if (a != null) if (a == firstAction) firstAction = a.next; else lasta.next = a.next; } public void MeltWith(State s) { // copy actions of s to state for (Action action = s.firstAction; action != null; action = action.next) { Action a = new Action(action.typ, action.sym, action.tc); a.AddTargets(action); AddAction(a); } } } //----------------------------------------------------------------------------- // Action //----------------------------------------------------------------------------- public class Action { // action of finite automaton public int typ; // type of action symbol: clas, chr public int sym; // action symbol public int tc; // transition code: normalTrans, contextTrans public Target target; // states reached from this action public Action next; public Action(int typ, int sym, int tc) { this.typ = typ; this.sym = sym; this.tc = tc; } public void AddTarget(Target t) { // add t to the action.targets Target last = null; Target p = target; while (p != null && t.state.nr >= p.state.nr) { if (t.state == p.state) return; last = p; p = p.next; } t.next = p; if (p == target) target = t; else last.next = t; } public void AddTargets(Action a) { // add copy of a.targets to action.targets for (Target p = a.target; p != null; p = p.next) { Target t = new Target(p.state); AddTarget(t); } if (a.tc == Node.contextTrans) tc = Node.contextTrans; } public CharSet Symbols(Tab tab) { CharSet s; if (typ == Node.clas) s = tab.CharClassSet(sym).Clone(); else { s = new CharSet(); s.Set(sym); } return s; } public void ShiftWith(CharSet s, Tab tab) { if (s.Elements() == 1) { typ = Node.chr; sym = s.First(); } else { CharClass c = tab.FindCharClass(s); if (c == null) c = tab.NewCharClass("#", s); // class with dummy name typ = Node.clas; sym = c.n; } } } //----------------------------------------------------------------------------- // Target //----------------------------------------------------------------------------- public class Target { // set of states that are reached by an action public State state; // target state public Target next; public Target (State s) { state = s; } } //----------------------------------------------------------------------------- // Melted //----------------------------------------------------------------------------- public class Melted { // info about melted states public BitArray set; // set of old states public State state; // new state public Melted next; public Melted(BitArray set, State state) { this.set = set; this.state = state; } } //----------------------------------------------------------------------------- // Comment //----------------------------------------------------------------------------- public class Comment { // info about comment syntax public string start; public string stop; public bool nested; public Comment next; public Comment(string start, string stop, bool nested) { this.start = start; this.stop = stop; this.nested = nested; } } //----------------------------------------------------------------------------- // CharSet //----------------------------------------------------------------------------- public class CharSet { public class Range { public int from, to; public Range next; public Range(int from, int to) { this.from = from; this.to = to; } } public Range head; public bool this[int i] { get { for (Range p = head; p != null; p = p.next) if (i < p.from) return false; else if (i <= p.to) return true; // p.from <= i <= p.to return false; } } public void Set(int i) { Range cur = head, prev = null; while (cur != null && i >= cur.from-1) { if (i <= cur.to + 1) { // (cur.from-1) <= i <= (cur.to+1) if (i == cur.from - 1) cur.from--; else if (i == cur.to + 1) { cur.to++; Range next = cur.next; if (next != null && cur.to == next.from - 1) { cur.to = next.to; cur.next = next.next; }; } return; } prev = cur; cur = cur.next; } Range n = new Range(i, i); n.next = cur; if (prev == null) head = n; else prev.next = n; } public CharSet Clone() { CharSet s = new CharSet(); Range prev = null; for (Range cur = head; cur != null; cur = cur.next) { Range r = new Range(cur.from, cur.to); if (prev == null) s.head = r; else prev.next = r; prev = r; } return s; } public bool Equals(CharSet s) { Range p = head, q = s.head; while (p != null && q != null) { if (p.from != q.from || p.to != q.to) return false; p = p.next; q = q.next; } return p == q; } public int Elements() { int n = 0; for (Range p = head; p != null; p = p.next) n += p.to - p.from + 1; return n; } public int First() { if (head != null) return head.from; return -1; } public void Or(CharSet s) { for (Range p = s.head; p != null; p = p.next) for (int i = p.from; i <= p.to; i++) Set(i); } public void And(CharSet s) { CharSet x = new CharSet(); for (Range p = head; p != null; p = p.next) for (int i = p.from; i <= p.to; i++) if (s[i]) x.Set(i); head = x.head; } public void Subtract(CharSet s) { CharSet x = new CharSet(); for (Range p = head; p != null; p = p.next) for (int i = p.from; i <= p.to; i++) if (!s[i]) x.Set(i); head = x.head; } public bool Includes(CharSet s) { for (Range p = s.head; p != null; p = p.next) for (int i = p.from; i <= p.to; i++) if (!this[i]) return false; return true; } public bool Intersects(CharSet s) { for (Range p = s.head; p != null; p = p.next) for (int i = p.from; i <= p.to; i++) if (this[i]) return true; return false; } public void Fill() { head = new Range(Char.MinValue, Char.MaxValue); } } //----------------------------------------------------------------------------- // DFA //----------------------------------------------------------------------------- public class DFA { public const int EOF = -1; public int maxStates; public int lastStateNr; // highest state number public State firstState; public State lastState; // last allocated state public int lastSimState; // last non melted state public FileStream fram; // scanner frame input public StreamWriter gen; // generated scanner file public Symbol curSy; // current token to be recognized (in FindTrans) public Node curGraph; // start of graph for current token (in FindTrans) public bool ignoreCase; // true if input should be treated case-insensitively public bool dirtyDFA; // DFA may become nondeterministic in MatchLiteral public bool hasCtxMoves; // DFA has context transitions Parser parser; // other Coco objects Tab tab; Errors errors; TextWriter trace; //---------- Output primitives private string Ch(int ch) { if (ch < ' ' || ch >= 127 || ch == '\'' || ch == '\\') return Convert.ToString(ch); else return String.Format("'{0}'", (char)ch); } private string ChCond(char ch) { return String.Format("ch == {0}", Ch(ch)); } private void PutRange(CharSet s) { for (CharSet.Range r = s.head; r != null; r = r.next) { if (r.from == r.to) { gen.Write("ch == " + Ch(r.from)); } else if (r.from == 0) { gen.Write("ch <= " + Ch(r.to)); } else { gen.Write("ch >= " + Ch(r.from) + " && ch <= " + Ch(r.to)); } if (r.next != null) gen.Write(" || "); } } //---------- State handling State NewState() { State s = new State(); s.nr = ++lastStateNr; if (firstState == null) firstState = s; else lastState.next = s; lastState = s; return s; } void NewTransition(State from, State to, int typ, int sym, int tc) { if (to == firstState) parser.SemErr("token must not start with an iteration"); Target t = new Target(to); Action a = new Action(typ, sym, tc); a.target = t; from.AddAction(a); if (typ == Node.clas) curSy.tokenKind = Symbol.classToken; } void CombineShifts() { State state; Action a, b, c; CharSet seta, setb; for (state = firstState; state != null; state = state.next) { for (a = state.firstAction; a != null; a = a.next) { b = a.next; while (b != null) if (a.target.state == b.target.state && a.tc == b.tc) { seta = a.Symbols(tab); setb = b.Symbols(tab); seta.Or(setb); a.ShiftWith(seta, tab); c = b; b = b.next; state.DetachAction(c); } else b = b.next; } } } void FindUsedStates(State state, BitArray used) { if (used[state.nr]) return; used[state.nr] = true; for (Action a = state.firstAction; a != null; a = a.next) FindUsedStates(a.target.state, used); } void DeleteRedundantStates() { State[] newState = new State[lastStateNr + 1]; BitArray used = new BitArray(lastStateNr + 1); FindUsedStates(firstState, used); // combine equal final states for (State s1 = firstState.next; s1 != null; s1 = s1.next) // firstState cannot be final if (used[s1.nr] && s1.endOf != null && s1.firstAction == null && !s1.ctx) for (State s2 = s1.next; s2 != null; s2 = s2.next) if (used[s2.nr] && s1.endOf == s2.endOf && s2.firstAction == null & !s2.ctx) { used[s2.nr] = false; newState[s2.nr] = s1; } for (State state = firstState; state != null; state = state.next) if (used[state.nr]) for (Action a = state.firstAction; a != null; a = a.next) if (!used[a.target.state.nr]) a.target.state = newState[a.target.state.nr]; // delete unused states lastState = firstState; lastStateNr = 0; // firstState has number 0 for (State state = firstState.next; state != null; state = state.next) if (used[state.nr]) {state.nr = ++lastStateNr; lastState = state;} else lastState.next = state.next; } State TheState(Node p) { State state; if (p == null) {state = NewState(); state.endOf = curSy; return state;} else return p.state; } void Step(State from, Node p, BitArray stepped) { if (p == null) return; stepped[p.n] = true; switch (p.typ) { case Node.clas: case Node.chr: { NewTransition(from, TheState(p.next), p.typ, p.val, p.code); break; } case Node.alt: { Step(from, p.sub, stepped); Step(from, p.down, stepped); break; } case Node.iter: case Node.opt: { if (p.next != null && !stepped[p.next.n]) Step(from, p.next, stepped); Step(from, p.sub, stepped); break; } } } void NumberNodes(Node p, State state) { /* Assigns a state n.state to every node n. There will be a transition from n.state to n.next.state triggered by n.val. All nodes in an alternative chain are represented by the same state. */ if (p == null) return; if (p.state != null) return; // already visited; if (state == null) state = NewState(); p.state = state; if (tab.DelGraph(p)) state.endOf = curSy; switch (p.typ) { case Node.clas: case Node.chr: { NumberNodes(p.next, null); break; } case Node.opt: { NumberNodes(p.next, null); NumberNodes(p.sub, state); break; } case Node.iter: { NumberNodes(p.next, state); NumberNodes(p.sub, state); break; } case Node.alt: { NumberNodes(p.sub, state); NumberNodes(p.down, state); break; } } } void FindTrans (Node p, bool start, BitArray marked) { if (p == null || marked[p.n]) return; marked[p.n] = true; if (start) Step(p.state, p, new BitArray(tab.nodes.Count)); // start of group of equally numbered nodes switch (p.typ) { case Node.clas: case Node.chr: { FindTrans(p.next, true, marked); break; } case Node.opt: { FindTrans(p.next, true, marked); FindTrans(p.sub, false, marked); break; } case Node.iter: { FindTrans(p.next, false, marked); FindTrans(p.sub, false, marked); break; } case Node.alt: { FindTrans(p.sub, false, marked); FindTrans(p.down, false, marked); break; } } } public void ConvertToStates(Node p, Symbol sym) { curGraph = p; curSy = sym; if (tab.DelGraph(curGraph)) parser.SemErr("token might be empty"); NumberNodes(curGraph, firstState); FindTrans(curGraph, true, new BitArray(tab.nodes.Count)); } // match string against current automaton; store it either as a fixedToken or as a litToken public void MatchLiteral(string s, Symbol sym) { s = tab.Unescape(s.Substring(1, s.Length-2)); int i, len = s.Length; State state = firstState; Action a = null; for (i = 0; i < len; i++) { // try to match s against existing DFA a = FindAction(state, s[i]); if (a == null) break; state = a.target.state; } // if s was not totally consumed or leads to a non-final state => make new DFA from it if (i != len || state.endOf == null) { state = firstState; i = 0; a = null; dirtyDFA = true; } for (; i < len; i++) { // make new DFA for s[i..len-1] State to = NewState(); NewTransition(state, to, Node.chr, s[i], Node.normalTrans); state = to; } Symbol matchedSym = state.endOf; if (state.endOf == null) { state.endOf = sym; } else if (matchedSym.tokenKind == Symbol.fixedToken || (a != null && a.tc == Node.contextTrans)) { // s matched a token with a fixed definition or a token with an appendix that will be cut off parser.SemErr("tokens " + sym.name + " and " + matchedSym.name + " cannot be distinguished"); } else { // matchedSym == classToken || classLitToken matchedSym.tokenKind = Symbol.classLitToken; sym.tokenKind = Symbol.litToken; } } void SplitActions(State state, Action a, Action b) { Action c; CharSet seta, setb, setc; seta = a.Symbols(tab); setb = b.Symbols(tab); if (seta.Equals(setb)) { a.AddTargets(b); state.DetachAction(b); } else if (seta.Includes(setb)) { setc = seta.Clone(); setc.Subtract(setb); b.AddTargets(a); a.ShiftWith(setc, tab); } else if (setb.Includes(seta)) { setc = setb.Clone(); setc.Subtract(seta); a.AddTargets(b); b.ShiftWith(setc, tab); } else { setc = seta.Clone(); setc.And(setb); seta.Subtract(setc); setb.Subtract(setc); a.ShiftWith(seta, tab); b.ShiftWith(setb, tab); c = new Action(0, 0, Node.normalTrans); // typ and sym are set in ShiftWith c.AddTargets(a); c.AddTargets(b); c.ShiftWith(setc, tab); state.AddAction(c); } } bool Overlap(Action a, Action b) { CharSet seta, setb; if (a.typ == Node.chr) if (b.typ == Node.chr) return a.sym == b.sym; else {setb = tab.CharClassSet(b.sym); return setb[a.sym];} else { seta = tab.CharClassSet(a.sym); if (b.typ == Node.chr) return seta[b.sym]; else {setb = tab.CharClassSet(b.sym); return seta.Intersects(setb);} } } bool MakeUnique(State state) { // return true if actions were split bool changed = false; for (Action a = state.firstAction; a != null; a = a.next) for (Action b = a.next; b != null; b = b.next) if (Overlap(a, b)) {SplitActions(state, a, b); changed = true;} return changed; } void MeltStates(State state) { bool changed, ctx; BitArray targets; Symbol endOf; for (Action action = state.firstAction; action != null; action = action.next) { if (action.target.next != null) { GetTargetStates(action, out targets, out endOf, out ctx); Melted melt = StateWithSet(targets); if (melt == null) { State s = NewState(); s.endOf = endOf; s.ctx = ctx; for (Target targ = action.target; targ != null; targ = targ.next) s.MeltWith(targ.state); do {changed = MakeUnique(s);} while (changed); melt = NewMelted(targets, s); } action.target.next = null; action.target.state = melt.state; } } } void FindCtxStates() { for (State state = firstState; state != null; state = state.next) for (Action a = state.firstAction; a != null; a = a.next) if (a.tc == Node.contextTrans) a.target.state.ctx = true; } public void MakeDeterministic() { State state; bool changed; lastSimState = lastState.nr; maxStates = 2 * lastSimState; // heuristic for set size in Melted.set FindCtxStates(); for (state = firstState; state != null; state = state.next) do {changed = MakeUnique(state);} while (changed); for (state = firstState; state != null; state = state.next) MeltStates(state); DeleteRedundantStates(); CombineShifts(); } public void PrintStates() { trace.WriteLine(); trace.WriteLine("---------- states ----------"); for (State state = firstState; state != null; state = state.next) { bool first = true; if (state.endOf == null) trace.Write(" "); else trace.Write("E({0,12})", tab.Name(state.endOf.name)); trace.Write("{0,3}:", state.nr); if (state.firstAction == null) trace.WriteLine(); for (Action action = state.firstAction; action != null; action = action.next) { if (first) {trace.Write(" "); first = false;} else trace.Write(" "); if (action.typ == Node.clas) trace.Write(((CharClass)tab.classes[action.sym]).name); else trace.Write("{0, 3}", Ch(action.sym)); for (Target targ = action.target; targ != null; targ = targ.next) trace.Write(" {0, 3}", targ.state.nr); if (action.tc == Node.contextTrans) trace.WriteLine(" context"); else trace.WriteLine(); } } trace.WriteLine(); trace.WriteLine("---------- character classes ----------"); tab.WriteCharClasses(); } //---------------------------- actions -------------------------------- public Action FindAction(State state, char ch) { for (Action a = state.firstAction; a != null; a = a.next) if (a.typ == Node.chr && ch == a.sym) return a; else if (a.typ == Node.clas) { CharSet s = tab.CharClassSet(a.sym); if (s[ch]) return a; } return null; } public void GetTargetStates(Action a, out BitArray targets, out Symbol endOf, out bool ctx) { // compute the set of target states targets = new BitArray(maxStates); endOf = null; ctx = false; for (Target t = a.target; t != null; t = t.next) { int stateNr = t.state.nr; if (stateNr <= lastSimState) targets[stateNr] = true; else targets.Or(MeltedSet(stateNr)); if (t.state.endOf != null) if (endOf == null || endOf == t.state.endOf) endOf = t.state.endOf; else errors.SemErr("Tokens " + endOf.name + " and " + t.state.endOf.name + " cannot be distinguished"); if (t.state.ctx) { ctx = true; // The following check seems to be unnecessary. It reported an error // if a symbol + context was the prefix of another symbol, e.g. // s1 = "a" "b" "c". // s2 = "a" CONTEXT("b"). // But this is ok. // if (t.state.endOf != null) { // Console.WriteLine("Ambiguous context clause"); // errors.count++; // } } } } //------------------------- melted states ------------------------------ Melted firstMelted; // head of melted state list Melted NewMelted(BitArray set, State state) { Melted m = new Melted(set, state); m.next = firstMelted; firstMelted = m; return m; } BitArray MeltedSet(int nr) { Melted m = firstMelted; while (m != null) { if (m.state.nr == nr) return m.set; else m = m.next; } throw new FatalError("compiler error in Melted.Set"); } Melted StateWithSet(BitArray s) { for (Melted m = firstMelted; m != null; m = m.next) if (Sets.Equals(s, m.set)) return m; return null; } //------------------------ comments -------------------------------- public Comment firstComment; // list of comments string CommentStr(Node p) { StringBuilder s = new StringBuilder(); while (p != null) { if (p.typ == Node.chr) { s.Append((char)p.val); } else if (p.typ == Node.clas) { CharSet set = tab.CharClassSet(p.val); if (set.Elements() != 1) parser.SemErr("character set contains more than 1 character"); s.Append((char)set.First()); } else parser.SemErr("comment delimiters may not be structured"); p = p.next; } if (s.Length == 0 || s.Length > 2) { parser.SemErr("comment delimiters must be 1 or 2 characters long"); s = new StringBuilder("?"); } return s.ToString(); } public void NewComment(Node from, Node to, bool nested) { Comment c = new Comment(CommentStr(from), CommentStr(to), nested); c.next = firstComment; firstComment = c; } //------------------------ scanner generation ---------------------- void GenComBody(Comment com) { gen.WriteLine( "\t\t\tfor(;;) {"); gen.Write ( "\t\t\t\tif ({0}) ", ChCond(com.stop[0])); gen.WriteLine("{"); if (com.stop.Length == 1) { gen.WriteLine("\t\t\t\t\tlevel--;"); gen.WriteLine("\t\t\t\t\tif (level == 0) { oldEols = line - line0; NextCh(); return true; }"); gen.WriteLine("\t\t\t\t\tNextCh();"); } else { gen.WriteLine("\t\t\t\t\tNextCh();"); gen.WriteLine("\t\t\t\t\tif ({0}) {{", ChCond(com.stop[1])); gen.WriteLine("\t\t\t\t\t\tlevel--;"); gen.WriteLine("\t\t\t\t\t\tif (level == 0) { oldEols = line - line0; NextCh(); return true; }"); gen.WriteLine("\t\t\t\t\t\tNextCh();"); gen.WriteLine("\t\t\t\t\t}"); } if (com.nested) { gen.Write ("\t\t\t\t}"); gen.Write(" else if ({0}) ", ChCond(com.start[0])); gen.WriteLine("{"); if (com.start.Length == 1) gen.WriteLine("\t\t\t\t\tlevel++; NextCh();"); else { gen.WriteLine("\t\t\t\t\tNextCh();"); gen.Write ("\t\t\t\t\tif ({0}) ", ChCond(com.start[1])); gen.WriteLine("{"); gen.WriteLine("\t\t\t\t\t\tlevel++; NextCh();"); gen.WriteLine("\t\t\t\t\t}"); } } gen.WriteLine( "\t\t\t\t} else if (ch == Buffer.EOF) return false;"); gen.WriteLine( "\t\t\t\telse NextCh();"); gen.WriteLine( "\t\t\t}"); } void GenComment(Comment com, int i) { gen.WriteLine(); gen.Write ("\tbool Comment{0}() ", i); gen.WriteLine("{"); gen.WriteLine("\t\tint level = 1, pos0 = pos, line0 = line, col0 = col;"); if (com.start.Length == 1) { gen.WriteLine("\t\tNextCh();"); GenComBody(com); } else { gen.WriteLine("\t\tNextCh();"); gen.Write ("\t\tif ({0}) ", ChCond(com.start[1])); gen.WriteLine("{"); gen.WriteLine("\t\t\tNextCh();"); GenComBody(com); gen.WriteLine("\t\t} else {"); gen.WriteLine("\t\t\tbuffer.Pos = pos0; NextCh(); line = line0; col = col0;"); gen.WriteLine("\t\t}"); gen.WriteLine("\t\treturn false;"); } gen.WriteLine("\t}"); } void CopyFramePart(string stop) { char startCh = stop[0]; int endOfStopString = stop.Length-1; int ch = fram.ReadByte(); while (ch != EOF) if (ch == startCh) { int i = 0; do { if (i == endOfStopString) return; // stop[0..i] found ch = fram.ReadByte(); i++; } while (ch == stop[i]); // stop[0..i-1] found; continue with last read character gen.Write(stop.Substring(0, i)); } else { gen.Write((char)ch); ch = fram.ReadByte(); } throw new FatalError("incomplete or corrupt scanner frame file"); } string SymName(Symbol sym) { if (Char.IsLetter(sym.name[0])) { // real name value is stored in Tab.literals foreach (DictionaryEntry e in tab.literals) if ((Symbol)e.Value == sym) return (string)e.Key; } return sym.name; } void GenLiterals () { if (ignoreCase) { gen.WriteLine("\t\tswitch (t.val.ToLower()) {"); } else { gen.WriteLine("\t\tswitch (t.val) {"); } foreach (Symbol sym in tab.terminals) { if (sym.tokenKind == Symbol.litToken) { string name = SymName(sym); if (ignoreCase) name = name.ToLower(); // sym.name stores literals with quotes, e.g. "\"Literal\"" gen.WriteLine("\t\t\tcase {0}: t.kind = {1}; break;", name, sym.n); } } gen.WriteLine("\t\t\tdefault: break;"); gen.Write("\t\t}"); } void WriteState(State state) { Symbol endOf = state.endOf; gen.WriteLine("\t\t\tcase {0}:", state.nr); bool ctxEnd = state.ctx; for (Action action = state.firstAction; action != null; action = action.next) { if (action == state.firstAction) gen.Write("\t\t\t\tif ("); else gen.Write("\t\t\t\telse if ("); if (action.typ == Node.chr) gen.Write(ChCond((char)action.sym)); else PutRange(tab.CharClassSet(action.sym)); gen.Write(") {"); if (action.tc == Node.contextTrans) { gen.Write("apx++; "); ctxEnd = false; } else if (state.ctx) gen.Write("apx = 0; "); gen.Write("AddCh(); goto case {0};", action.target.state.nr); gen.WriteLine("}"); } if (state.firstAction == null) gen.Write("\t\t\t\t{"); else gen.Write("\t\t\t\telse {"); if (ctxEnd) { // final context state: cut appendix gen.WriteLine(); gen.WriteLine("\t\t\t\t\ttlen -= apx;"); gen.WriteLine("\t\t\t\t\tbuffer.Pos = t.pos; NextCh(); line = t.line; col = t.col;"); gen.WriteLine("\t\t\t\t\tfor (int i = 0; i < tlen; i++) NextCh();"); gen.Write( "\t\t\t\t\t"); } if (endOf == null) { gen.WriteLine("t.kind = noSym; break;}"); } else { gen.Write("t.kind = {0}; ", endOf.n); if (endOf.tokenKind == Symbol.classLitToken) { gen.WriteLine("t.val = new String(tval, 0, tlen); CheckLiteral(); return t;}"); } else { gen.WriteLine("break;}"); } } } void WriteStartTab() { for (Action action = firstState.firstAction; action != null; action = action.next) { int targetState = action.target.state.nr; if (action.typ == Node.chr) { gen.WriteLine("\t\tstart[" + action.sym + "] = " + targetState + "; "); } else { CharSet s = tab.CharClassSet(action.sym); for (CharSet.Range r = s.head; r != null; r = r.next) { gen.WriteLine("\t\tfor (int i = " + r.from + "; i <= " + r.to + "; ++i) start[i] = " + targetState + ";"); } } } gen.WriteLine("\t\tstart[Buffer.EOF] = -1;"); } void OpenGen(bool backUp) { /* pdt */ try { string fn = tab.srcDir + "Scanner.cs"; /* pdt */ if (File.Exists(fn) && backUp) File.Copy(fn, fn + ".old", true); gen = new StreamWriter(new FileStream(fn, FileMode.Create)); /* pdt */ } catch (IOException) { throw new FatalError("Cannot generate scanner file"); } } public void WriteScanner() { int i; string fr = tab.srcDir + "Scanner.frame"; /* pdt */ if (!File.Exists(fr)) { if (tab.frameDir != null) fr = tab.frameDir.Trim() + Path.DirectorySeparatorChar + "Scanner.frame"; if (!File.Exists(fr)) throw new FatalError("Cannot find Scanner.frame"); } try { fram = new FileStream(fr, FileMode.Open, FileAccess.Read, FileShare.Read); } catch (FileNotFoundException) { throw new FatalError("Cannot open Scanner.frame."); } OpenGen(true); /* pdt */ if (dirtyDFA) MakeDeterministic(); CopyFramePart("-->begin"); if (!tab.srcName.ToLower().EndsWith("coco.atg")) { gen.Close(); OpenGen(false); /* pdt */ } CopyFramePart("-->namespace"); if (tab.nsName != null && tab.nsName.Length > 0) { gen.Write("namespace "); gen.Write(tab.nsName); gen.Write(" {"); } CopyFramePart("-->declarations"); gen.WriteLine("\tconst int maxT = {0};", tab.terminals.Count - 1); gen.WriteLine("\tconst int noSym = {0};", tab.noSym.n); if (ignoreCase) gen.Write("\tchar valCh; // current input character (for token.val)"); CopyFramePart("-->initialization"); WriteStartTab(); CopyFramePart("-->casing1"); if (ignoreCase) { gen.WriteLine("\t\tvalCh = (char)ch;"); gen.Write ("\t\tif (ch != Buffer.EOF) ch = char.ToLower((char)ch);"); } CopyFramePart("-->casing2"); gen.Write("\t\ttval[tlen++] = "); if (ignoreCase) gen.Write("valCh;"); else gen.Write("(char)ch;"); CopyFramePart("-->comments"); Comment com = firstComment; i = 0; while (com != null) { GenComment(com, i); com = com.next; i++; } CopyFramePart("-->literals"); GenLiterals(); CopyFramePart("-->scan1"); gen.Write("\t\twhile (false"); if (tab.ignored.Elements() > 0) { gen.Write(" || "); PutRange(tab.ignored); } gen.WriteLine(") NextCh();"); if (firstComment != null) { gen.Write("\t\tif ("); com = firstComment; i = 0; while (com != null) { gen.Write(ChCond(com.start[0])); gen.Write(" && Comment{0}()", i); if (com.next != null) gen.Write(" ||"); com = com.next; i++; } gen.Write(") return NextToken();"); } if (hasCtxMoves) { gen.WriteLine(); gen.Write("\t\tint apx = 0;"); } /* pdt */ CopyFramePart("-->scan2"); for (State state = firstState.next; state != null; state = state.next) WriteState(state); CopyFramePart("$$$"); if (tab.nsName != null && tab.nsName.Length > 0) gen.Write("}"); gen.Close(); } public DFA (Parser parser) { this.parser = parser; tab = parser.tab; errors = parser.errors; trace = parser.trace; firstState = null; lastState = null; lastStateNr = -1; firstState = NewState(); firstMelted = null; firstComment = null; ignoreCase = false; dirtyDFA = false; hasCtxMoves = false; } } // end DFA } // end namespace
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace STIOnboardingPortal.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
#region License /* * Copyright 2002-2010 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #endregion #region Imports using System; using System.Collections; using System.Collections.Generic; using Spring.Objects.Factory.Config; #endregion namespace Spring.Objects.Factory.Support { /// <summary> /// Programmatic means of constructing a <see cref="IObjectDefinition"/> using the builder pattern. Intended primarily /// for use when implementing custom namespace parsers. /// </summary> /// <remarks>Set methods are used instead of properties, so that chaining of methods can be used to create /// 'one-liner'definitions that set multiple properties at one.</remarks> /// <author>Rod Johnson</author> /// <author>Rob Harrop</author> /// <author>Juergen Hoeller</author> /// <author>Mark Pollack (.NET)</author> public class ObjectDefinitionBuilder { #region Fields private AbstractObjectDefinition objectDefinition; private IObjectDefinitionFactory objectDefinitionFactory; private int constructorArgIndex; #endregion #region Constructor(s) /// <summary> /// Initializes a new instance of the <see cref="ObjectDefinitionBuilder"/> class, private /// to force use of factory methods. /// </summary> private ObjectDefinitionBuilder() { } #endregion #region Factory Methods /// <summary> /// Creates a new <see cref="ObjectDefinitionBuilder"/> used to construct a <see cref="Spring.Objects.Factory.Support.GenericObjectDefinition"/>. /// </summary> public static ObjectDefinitionBuilder GenericObjectDefinition() { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinition = new GenericObjectDefinition(); return builder; } /// <summary> /// Creates a new <see cref="ObjectDefinitionBuilder"/> used to construct a <see cref="Spring.Objects.Factory.Support.GenericObjectDefinition"/>. /// </summary> /// <param name="objectType">the <see cref="Type"/> of the object that the definition is being created for</param> public static ObjectDefinitionBuilder GenericObjectDefinition(Type objectType) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinition = new GenericObjectDefinition(); builder.objectDefinition.ObjectType = objectType; return builder; } /// <summary> /// Creates a new <see cref="ObjectDefinitionBuilder"/> used to construct a <see cref="Spring.Objects.Factory.Support.GenericObjectDefinition"/>. /// </summary> /// <param name="objectTypeName">the name of the <see cref="Type"/> of the object that the definition is being created for</param> public static ObjectDefinitionBuilder GenericObjectDefinition(string objectTypeName) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinition = new GenericObjectDefinition(); builder.objectDefinition.ObjectTypeName = objectTypeName; return builder; } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="objectTypeName">The type name of the object.</param> /// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns> public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, string objectTypeName) { return RootObjectDefinition(objectDefinitionFactory, objectTypeName, null); } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="objectTypeName">Name of the object type.</param> /// <param name="factoryMethodName">Name of the factory method.</param> /// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns> public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, string objectTypeName, string factoryMethodName) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinitionFactory = objectDefinitionFactory; // Pass in null for parent name and also AppDomain to force object definition to be register by name and not type. builder.objectDefinition = objectDefinitionFactory.CreateObjectDefinition(objectTypeName, null, null); builder.objectDefinition.FactoryMethodName = factoryMethodName; return builder; } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="objectType">Type of the object.</param> /// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns> public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, Type objectType) { return RootObjectDefinition(objectDefinitionFactory, objectType, null); } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a root object definition. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="objectType">Type of the object.</param> /// <param name="factoryMethodName">Name of the factory method.</param> /// <returns>A new <code>ObjectDefinitionBuilder</code> instance.</returns> public static ObjectDefinitionBuilder RootObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, Type objectType, string factoryMethodName) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinitionFactory = objectDefinitionFactory; builder.objectDefinition = objectDefinitionFactory.CreateObjectDefinition(objectType.FullName, null, AppDomain.CurrentDomain); builder.objectDefinition.ObjectType = objectType; builder.objectDefinition.FactoryMethodName = factoryMethodName; return builder; } /// <summary> /// Create a new <code>ObjectDefinitionBuilder</code> used to construct a child object definition.. /// </summary> /// <param name="objectDefinitionFactory">The object definition factory.</param> /// <param name="parentObjectName">Name of the parent object.</param> /// <returns></returns> public static ObjectDefinitionBuilder ChildObjectDefinition(IObjectDefinitionFactory objectDefinitionFactory, string parentObjectName) { ObjectDefinitionBuilder builder = new ObjectDefinitionBuilder(); builder.objectDefinitionFactory = objectDefinitionFactory; builder.objectDefinition = objectDefinitionFactory.CreateObjectDefinition(null, parentObjectName, AppDomain.CurrentDomain); return builder; } #endregion #region Properties /// <summary> /// Gets the current object definition in its raw (unvalidated) form. /// </summary> /// <value>The raw object definition.</value> public AbstractObjectDefinition RawObjectDefinition { get { return objectDefinition; } } /// <summary> /// Validate and gets the object definition. /// </summary> /// <value>The object definition.</value> public AbstractObjectDefinition ObjectDefinition { get { objectDefinition.Validate(); return objectDefinition; } } #endregion #region Methods //TODO add expression support. /// <summary> /// Adds the property value under the given name. /// </summary> /// <param name="name">The name.</param> /// <param name="value">The value.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder AddPropertyValue(string name, object value) { objectDefinition.PropertyValues.Add(new PropertyValue(name, value)); return this; } /// <summary> /// Adds a reference to the specified object name under the property specified. /// </summary> /// <param name="name">The name.</param> /// <param name="objectName">Name of the object.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder AddPropertyReference(string name, string objectName) { objectDefinition.PropertyValues.Add(new PropertyValue(name, new RuntimeObjectReference(objectName))); return this; } /// <summary> /// Adds an index constructor arg value. The current index is tracked internally and all addtions are /// at the present point /// </summary> /// <param name="value">The constructor arg value.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder AddConstructorArg(object value) { objectDefinition.ConstructorArgumentValues.AddIndexedArgumentValue(constructorArgIndex++,value); return this; } /// <summary> /// Adds a reference to the named object as a constructor argument. /// </summary> /// <param name="objectName">Name of the object.</param> /// <returns></returns> public ObjectDefinitionBuilder AddConstructorArgReference(string objectName) { return AddConstructorArg(new RuntimeObjectReference(objectName)); } /// <summary> /// Sets the name of the factory method to use for this definition. /// </summary> /// <param name="factoryMethod">The factory method.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetFactoryMethod(string factoryMethod) { objectDefinition.FactoryMethodName = factoryMethod; return this; } /// <summary> /// Sets the name of the factory object to use for this definition. /// </summary> /// <param name="factoryObject">The factory object.</param> /// <param name="factoryMethod">The factory method.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetFactoryObject(string factoryObject, string factoryMethod) { objectDefinition.FactoryObjectName = factoryObject; objectDefinition.FactoryMethodName = factoryMethod; return this; } /// <summary> /// Sets whether or not this definition describes a singleton object. /// </summary> /// <param name="singleton">if set to <c>true</c> [singleton].</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetSingleton(bool singleton) { objectDefinition.IsSingleton = singleton; return this; } /// <summary> /// Sets whether objects or not this definition is abstract. /// </summary> /// <param name="flag">if set to <c>true</c> [flag].</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetAbstract(bool flag) { objectDefinition.IsAbstract = flag; return this; } /// <summary> /// Sets whether objects for this definition should be lazily initialized or not. /// </summary> /// <param name="lazy">if set to <c>true</c> [lazy].</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetLazyInit(bool lazy) { objectDefinition.IsLazyInit = lazy; return this; } /// <summary> /// Sets the autowire mode for this definition. /// </summary> /// <param name="autowireMode">The autowire mode.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetAutowireMode(AutoWiringMode autowireMode) { objectDefinition.AutowireMode = autowireMode; return this; } /// <summary> /// Sets the autowire candidate value for this definition. /// </summary> /// <param name="autowireCandidate">The autowire candidate value</param> /// <returns></returns> public ObjectDefinitionBuilder SetAutowireCandidate(bool autowireCandidate) { objectDefinition.IsAutowireCandidate = autowireCandidate; return this; } /// <summary> /// Sets the primary value for this definition. /// </summary> /// <param name="primary">If object is primary</param> /// <returns></returns> public ObjectDefinitionBuilder SetPrimary(bool primary) { objectDefinition.IsPrimary = primary; return this; } /// <summary> /// Sets the dependency check mode for this definition. /// </summary> /// <param name="dependencyCheck">The dependency check.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetDependencyCheck(DependencyCheckingMode dependencyCheck) { objectDefinition.DependencyCheck = dependencyCheck; return this; } /// <summary> /// Sets the name of the destroy method for this definition. /// </summary> /// <param name="methodName">Name of the method.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetDestroyMethodName(string methodName) { objectDefinition.DestroyMethodName = methodName; return this; } /// <summary> /// Sets the name of the init method for this definition. /// </summary> /// <param name="methodName">Name of the method.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetInitMethodName(string methodName) { objectDefinition.InitMethodName = methodName; return this; } /// <summary> /// Sets the resource description for this definition. /// </summary> /// <param name="resourceDescription">The resource description.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder SetResourceDescription(string resourceDescription) { objectDefinition.ResourceDescription = resourceDescription; return this; } /// <summary> /// Adds the specified object name to the list of objects that this definition depends on. /// </summary> /// <param name="objectName">Name of the object.</param> /// <returns>The current <code>ObjectDefinitionBuilder</code>.</returns> public ObjectDefinitionBuilder AddDependsOn(string objectName) { if (objectDefinition.DependsOn == null) { objectDefinition.DependsOn = new string[] {objectName}; } else { List<string> arrayList = new List<string>(); arrayList.AddRange(objectDefinition.DependsOn); arrayList.AddRange(new string[]{ objectName}); objectDefinition.DependsOn = arrayList; } return this; } #endregion } }
using System; using System.Collections; using System.Collections.Generic; using System.Collections.Specialized; using System.ComponentModel.DataAnnotations; using System.Globalization; using System.Reflection; using System.Runtime.Serialization; using System.Web.Http; using System.Web.Http.Description; using System.Xml.Serialization; using Newtonsoft.Json; namespace HTTPOptions.Areas.HelpPage.ModelDescriptions { /// <summary> /// Generates model descriptions for given types. /// </summary> public class ModelDescriptionGenerator { // Modify this to support more data annotation attributes. private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>> { { typeof(RequiredAttribute), a => "Required" }, { typeof(RangeAttribute), a => { RangeAttribute range = (RangeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum); } }, { typeof(MaxLengthAttribute), a => { MaxLengthAttribute maxLength = (MaxLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length); } }, { typeof(MinLengthAttribute), a => { MinLengthAttribute minLength = (MinLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length); } }, { typeof(StringLengthAttribute), a => { StringLengthAttribute strLength = (StringLengthAttribute)a; return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength); } }, { typeof(DataTypeAttribute), a => { DataTypeAttribute dataType = (DataTypeAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString()); } }, { typeof(RegularExpressionAttribute), a => { RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a; return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern); } }, }; // Modify this to add more default documentations. private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string> { { typeof(Int16), "integer" }, { typeof(Int32), "integer" }, { typeof(Int64), "integer" }, { typeof(UInt16), "unsigned integer" }, { typeof(UInt32), "unsigned integer" }, { typeof(UInt64), "unsigned integer" }, { typeof(Byte), "byte" }, { typeof(Char), "character" }, { typeof(SByte), "signed byte" }, { typeof(Uri), "URI" }, { typeof(Single), "decimal number" }, { typeof(Double), "decimal number" }, { typeof(Decimal), "decimal number" }, { typeof(String), "string" }, { typeof(Guid), "globally unique identifier" }, { typeof(TimeSpan), "time interval" }, { typeof(DateTime), "date" }, { typeof(DateTimeOffset), "date" }, { typeof(Boolean), "boolean" }, }; private Lazy<IModelDocumentationProvider> _documentationProvider; public ModelDescriptionGenerator(HttpConfiguration config) { if (config == null) { throw new ArgumentNullException("config"); } _documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider); GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase); } public Dictionary<string, ModelDescription> GeneratedModels { get; private set; } private IModelDocumentationProvider DocumentationProvider { get { return _documentationProvider.Value; } } public ModelDescription GetOrCreateModelDescription(Type modelType) { if (modelType == null) { throw new ArgumentNullException("modelType"); } Type underlyingType = Nullable.GetUnderlyingType(modelType); if (underlyingType != null) { modelType = underlyingType; } ModelDescription modelDescription; string modelName = ModelNameHelper.GetModelName(modelType); if (GeneratedModels.TryGetValue(modelName, out modelDescription)) { if (modelType != modelDescription.ModelType) { throw new InvalidOperationException( String.Format( CultureInfo.CurrentCulture, "A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " + "Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.", modelName, modelDescription.ModelType.FullName, modelType.FullName)); } return modelDescription; } if (DefaultTypeDocumentation.ContainsKey(modelType)) { return GenerateSimpleTypeModelDescription(modelType); } if (modelType.IsEnum) { return GenerateEnumTypeModelDescription(modelType); } if (modelType.IsGenericType) { Type[] genericArguments = modelType.GetGenericArguments(); if (genericArguments.Length == 1) { Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments); if (enumerableType.IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, genericArguments[0]); } } if (genericArguments.Length == 2) { Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments); if (dictionaryType.IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]); } Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments); if (keyValuePairType.IsAssignableFrom(modelType)) { return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]); } } } if (modelType.IsArray) { Type elementType = modelType.GetElementType(); return GenerateCollectionModelDescription(modelType, elementType); } if (modelType == typeof(NameValueCollection)) { return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string)); } if (typeof(IDictionary).IsAssignableFrom(modelType)) { return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object)); } if (typeof(IEnumerable).IsAssignableFrom(modelType)) { return GenerateCollectionModelDescription(modelType, typeof(object)); } return GenerateComplexTypeModelDescription(modelType); } // Change this to provide different name for the member. private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute) { JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>(); if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName)) { return jsonProperty.PropertyName; } if (hasDataContractAttribute) { DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>(); if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name)) { return dataMember.Name; } } return member.Name; } private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute) { JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>(); XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>(); IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>(); NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>(); ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>(); bool hasMemberAttribute = member.DeclaringType.IsEnum ? member.GetCustomAttribute<EnumMemberAttribute>() != null : member.GetCustomAttribute<DataMemberAttribute>() != null; // Display member only if all the followings are true: // no JsonIgnoreAttribute // no XmlIgnoreAttribute // no IgnoreDataMemberAttribute // no NonSerializedAttribute // no ApiExplorerSettingsAttribute with IgnoreApi set to true // no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute return jsonIgnore == null && xmlIgnore == null && ignoreDataMember == null && nonSerialized == null && (apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) && (!hasDataContractAttribute || hasMemberAttribute); } private string CreateDefaultDocumentation(Type type) { string documentation; if (DefaultTypeDocumentation.TryGetValue(type, out documentation)) { return documentation; } if (DocumentationProvider != null) { documentation = DocumentationProvider.GetDocumentation(type); } return documentation; } private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel) { List<ParameterAnnotation> annotations = new List<ParameterAnnotation>(); IEnumerable<Attribute> attributes = property.GetCustomAttributes(); foreach (Attribute attribute in attributes) { Func<object, string> textGenerator; if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator)) { annotations.Add( new ParameterAnnotation { AnnotationAttribute = attribute, Documentation = textGenerator(attribute) }); } } // Rearrange the annotations annotations.Sort((x, y) => { // Special-case RequiredAttribute so that it shows up on top if (x.AnnotationAttribute is RequiredAttribute) { return -1; } if (y.AnnotationAttribute is RequiredAttribute) { return 1; } // Sort the rest based on alphabetic order of the documentation return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase); }); foreach (ParameterAnnotation annotation in annotations) { propertyModel.Annotations.Add(annotation); } } private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType) { ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType); if (collectionModelDescription != null) { return new CollectionModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, ElementDescription = collectionModelDescription }; } return null; } private ModelDescription GenerateComplexTypeModelDescription(Type modelType) { ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(complexModelDescription.Name, complexModelDescription); bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance); foreach (PropertyInfo property in properties) { if (ShouldDisplayMember(property, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(property, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(property); } GenerateAnnotations(property, propertyModel); complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType); } } FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance); foreach (FieldInfo field in fields) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { ParameterDescription propertyModel = new ParameterDescription { Name = GetMemberName(field, hasDataContractAttribute) }; if (DocumentationProvider != null) { propertyModel.Documentation = DocumentationProvider.GetDocumentation(field); } complexModelDescription.Properties.Add(propertyModel); propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType); } } return complexModelDescription; } private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new DictionaryModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType) { EnumTypeModelDescription enumDescription = new EnumTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null; foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static)) { if (ShouldDisplayMember(field, hasDataContractAttribute)) { EnumValueDescription enumValue = new EnumValueDescription { Name = field.Name, Value = field.GetRawConstantValue().ToString() }; if (DocumentationProvider != null) { enumValue.Documentation = DocumentationProvider.GetDocumentation(field); } enumDescription.Values.Add(enumValue); } } GeneratedModels.Add(enumDescription.Name, enumDescription); return enumDescription; } private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType) { ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType); ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType); return new KeyValuePairModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, KeyModelDescription = keyModelDescription, ValueModelDescription = valueModelDescription }; } private ModelDescription GenerateSimpleTypeModelDescription(Type modelType) { SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription { Name = ModelNameHelper.GetModelName(modelType), ModelType = modelType, Documentation = CreateDefaultDocumentation(modelType) }; GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription); return simpleModelDescription; } } }
/****************************************************************************** * VERITAS: Copyright (c) 2018 Veritas Technologies LLC. * This software is licensed as described in the file LICENSE which is part of this repository *****************************************************************************/ using System; using System.Diagnostics; using System.IO; using System.Reflection; using BEArcus.Agent; [assembly: Obfuscation(Exclude = true, ApplyToMembers = true, Feature = "renaming")] namespace Logging { /// <summary> /// The Log object is used by a component to send information to the log output stream of the application. /// The ultimate output location depends on which listeners the log object currently has. /// </summary> public class Log : ILog { protected string _logname; protected string _truncatedname; protected string _eventLogSource = @"BE_ARCUS_LOG"; //public string arcusLogFolder = @"C:\Arcus.Logs"; public string arcusLogFolder = CommonSettings.LogFile; private const uint LOG_SIZE_LIMIT = 100000; // 100 KB /// <remark> /// Name of the log. /// </remark> public string Name { get { return _logname; } set { _logname = value; _truncatedname = (value.Length > 12) ? value.Substring(0, 12) : value; } } public string Owner { get; set; } public string Description { get; set; } public string EventLogSource { get { return _eventLogSource; } set { _eventLogSource = value; } } //Constructor public Log() { // Create your file object here if (!Directory.Exists(arcusLogFolder)) // if it doesn't exist, create Directory.CreateDirectory(arcusLogFolder); } /// <summary> /// Log an informational text string to the logging stream. The text will only be emitted if the INFO bit is set. /// </summary> public void LogInfo(string text, params object[] parms) { WriteLog(text); } /// <summary> /// Log informational text to the logging stream. This is only performed if the user defined level matches what the stream is currently configured to emit. /// </summary> public void LogInfo(uint level, string text, params object[] parms) { WriteLog(text); } /// <summary> /// Log warning text to the logging stream. The text will only be emitted if the WARNING bit is set. /// </summary> public void LogWarning(string text, params object[] parms) { WriteLog(text); } /// <summary> /// Log warning text to the logging stream. The text will only be emitted if the WARNING bit is set. /// </summary> public void LogVerbose(string text, params object[] parms) { WriteLog(text); } /// <summary> /// Log error text to the logging stream. The text will only be emitted if the ERROR bit is set. /// </summary> public void LogError(string text, params object[] parms) { WriteLog(text); } /// <summary> /// Log error text to the logging stream. The text will only be emitted if the ERROR bit is set. /// Also Assert. /// </summary> public void LogErrorAndAssert(string text, params object[] parms) { WriteLog(text); } /// <summary> /// Log error and assert execution if condition=true, else do nothing. /// </summary> /// <param name="condition">if condition=true, log error and assert execution, else do nothing.</param> /// <param name="text">Message text to log in case of error\assert</param> /// <param name="parms"></param> public void LogErrorAndAssert(bool condition, string text, params object[] parms) { if (condition == true) { LogErrorAndAssert(text, parms); } } /// <summary> /// Log debug text to the logging stream. The text will only be emitted if the DEBUG bit is set. /// </summary> private void LogDebug(string text, params object[] parms) //saurabhG: Michael Avalone says use LogVerbose, avoid LogDebug, hence made private { WriteLog(text); } /// <summary> /// Log exception text to the logging stream. The text will only be emitted if the ERROR bit is set. /// </summary> public void LogException(string text, params object[] parms) { WriteLog(text); } /// <summary> /// Converts and exception to text and logs it to the logging stream. The text will only be emitted if the ERROR bit is set. /// </summary> public void LogException(Exception excpt) { if (excpt == null) { throw new ArgumentNullException("excpt"); } WriteLog(excpt.ToString()); } /// <summary> /// Logs an event to the system. The event will always be logged. /// </summary> public void LogEvent(string text, EventLogEntryType type, int category, byte[] data, params object[] parms) { if (text == null) { throw new ArgumentNullException("text"); } string str = string.Format(text, parms); WriteLog(text); System.Diagnostics.EventLog.WriteEntry(_eventLogSource, str, type, 0, (short)category, data); } /// <summary> /// Logs an event to the system. The event will always be logged. /// </summary> public void LogEvent(string text, EventLogEntryType type) { if (text == null) { throw new ArgumentNullException("text"); } System.Diagnostics.EventLog.WriteEntry(_eventLogSource, text, type, 0); WriteLog(text); } /// <summary> /// Logs an event to the system. The event will always be logged. /// </summary> public void LogEvent(long messageID, EventLogEntryType type, int category, object[] parms, byte[] data) { System.Diagnostics.EventLog.WriteEvent(_eventLogSource, new EventInstance(messageID, category, type), data, parms); } public override string ToString() { return _logname; } protected void WriteLog(string message) { string filePath = Path.Combine(arcusLogFolder, "ArcusLog.txt"); string file2Path = Path.Combine(arcusLogFolder, "ArcusLog1.txt"); //Create a file if it does not exists. StreamWriter w = File.AppendText(filePath); w.Close(); // Check for file size, copy in file2 if exceeds limit long length = new System.IO.FileInfo(filePath).Length; if (length > LOG_SIZE_LIMIT) { File.Copy(filePath, file2Path, true); File.WriteAllText(filePath, String.Empty); } using (StreamWriter streamWriter = File.AppendText(filePath)) { streamWriter.WriteLine("{0}:{1}:{2}", DateTime.Now.ToLongTimeString(), DateTime.Now.ToLongDateString(), message); streamWriter.Close(); } } } }
// // Mono.CSharp.Debugger/MonoSymbolTable.cs // // Author: // Martin Baulig (martin@ximian.com) // // (C) 2002 Ximian, Inc. http://www.ximian.com // // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Security.Cryptography; using System.Collections.Generic; using System.Text; using System.IO; // // Parts which are actually written into the symbol file are marked with // // #region This is actually written to the symbol file // #endregion // // Please do not modify these regions without previously talking to me. // // All changes to the file format must be synchronized in several places: // // a) The fields in these regions (and their order) must match the actual // contents of the symbol file. // // This helps people to understand the symbol file format without reading // too much source code, ie. you look at the appropriate region and then // you know what's actually in the file. // // It is also required to help me enforce b). // // b) The regions must be kept in sync with the unmanaged code in // mono/metadata/debug-mono-symfile.h // // When making changes to the file format, you must also increase two version // numbers: // // i) OffsetTable.Version in this file. // ii) MONO_SYMBOL_FILE_VERSION in mono/metadata/debug-mono-symfile.h // // After doing so, recompile everything, including the debugger. Symbol files // with different versions are incompatible to each other and the debugger and // the runtime enfore this, so you need to recompile all your assemblies after // changing the file format. // namespace Mono.CompilerServices.SymbolWriter { public class OffsetTable { public const int MajorVersion = 50; public const int MinorVersion = 0; public const long Magic = 0x45e82623fd7fa614; #region This is actually written to the symbol file public int TotalFileSize; public int DataSectionOffset; public int DataSectionSize; public int CompileUnitCount; public int CompileUnitTableOffset; public int CompileUnitTableSize; public int SourceCount; public int SourceTableOffset; public int SourceTableSize; public int MethodCount; public int MethodTableOffset; public int MethodTableSize; public int TypeCount; public int AnonymousScopeCount; public int AnonymousScopeTableOffset; public int AnonymousScopeTableSize; [Flags] public enum Flags { IsAspxSource = 1, WindowsFileNames = 2 } public Flags FileFlags; public int LineNumberTable_LineBase = LineNumberTable.Default_LineBase; public int LineNumberTable_LineRange = LineNumberTable.Default_LineRange; public int LineNumberTable_OpcodeBase = LineNumberTable.Default_OpcodeBase; #endregion internal OffsetTable () { int platform = (int) Environment.OSVersion.Platform; if ((platform != 4) && (platform != 128)) FileFlags |= Flags.WindowsFileNames; } internal OffsetTable (BinaryReader reader, int major_version, int minor_version) { TotalFileSize = reader.ReadInt32 (); DataSectionOffset = reader.ReadInt32 (); DataSectionSize = reader.ReadInt32 (); CompileUnitCount = reader.ReadInt32 (); CompileUnitTableOffset = reader.ReadInt32 (); CompileUnitTableSize = reader.ReadInt32 (); SourceCount = reader.ReadInt32 (); SourceTableOffset = reader.ReadInt32 (); SourceTableSize = reader.ReadInt32 (); MethodCount = reader.ReadInt32 (); MethodTableOffset = reader.ReadInt32 (); MethodTableSize = reader.ReadInt32 (); TypeCount = reader.ReadInt32 (); AnonymousScopeCount = reader.ReadInt32 (); AnonymousScopeTableOffset = reader.ReadInt32 (); AnonymousScopeTableSize = reader.ReadInt32 (); LineNumberTable_LineBase = reader.ReadInt32 (); LineNumberTable_LineRange = reader.ReadInt32 (); LineNumberTable_OpcodeBase = reader.ReadInt32 (); FileFlags = (Flags) reader.ReadInt32 (); } internal void Write (BinaryWriter bw, int major_version, int minor_version) { bw.Write (TotalFileSize); bw.Write (DataSectionOffset); bw.Write (DataSectionSize); bw.Write (CompileUnitCount); bw.Write (CompileUnitTableOffset); bw.Write (CompileUnitTableSize); bw.Write (SourceCount); bw.Write (SourceTableOffset); bw.Write (SourceTableSize); bw.Write (MethodCount); bw.Write (MethodTableOffset); bw.Write (MethodTableSize); bw.Write (TypeCount); bw.Write (AnonymousScopeCount); bw.Write (AnonymousScopeTableOffset); bw.Write (AnonymousScopeTableSize); bw.Write (LineNumberTable_LineBase); bw.Write (LineNumberTable_LineRange); bw.Write (LineNumberTable_OpcodeBase); bw.Write ((int) FileFlags); } public override string ToString () { return String.Format ( "OffsetTable [{0} - {1}:{2} - {3}:{4}:{5} - {6}:{7}:{8} - {9}]", TotalFileSize, DataSectionOffset, DataSectionSize, SourceCount, SourceTableOffset, SourceTableSize, MethodCount, MethodTableOffset, MethodTableSize, TypeCount); } } public class LineNumberEntry { #region This is actually written to the symbol file public readonly int Row; public int Column; public int EndRow, EndColumn; public readonly int File; public readonly int Offset; public readonly bool IsHidden; // Obsolete is never used #endregion public sealed class LocationComparer : IComparer<LineNumberEntry> { public static readonly LocationComparer Default = new LocationComparer (); public int Compare (LineNumberEntry l1, LineNumberEntry l2) { return l1.Row == l2.Row ? l1.Column.CompareTo (l2.Column) : l1.Row.CompareTo (l2.Row); } } public static readonly LineNumberEntry Null = new LineNumberEntry (0, 0, 0, 0); public LineNumberEntry (int file, int row, int column, int offset) : this (file, row, column, offset, false) { } public LineNumberEntry (int file, int row, int offset) : this (file, row, -1, offset, false) { } public LineNumberEntry (int file, int row, int column, int offset, bool is_hidden) : this (file, row, column, -1, -1, offset, is_hidden) { } public LineNumberEntry (int file, int row, int column, int end_row, int end_column, int offset, bool is_hidden) { this.File = file; this.Row = row; this.Column = column; this.EndRow = end_row; this.EndColumn = end_column; this.Offset = offset; this.IsHidden = is_hidden; } public override string ToString () { return String.Format ("[Line {0}:{1,2}-{3,4}:{5}]", File, Row, Column, EndRow, EndColumn, Offset); } } public class CodeBlockEntry { public int Index; #region This is actually written to the symbol file public int Parent; public Type BlockType; public int StartOffset; public int EndOffset; #endregion public enum Type { Lexical = 1, CompilerGenerated = 2, IteratorBody = 3, IteratorDispatcher = 4 } public CodeBlockEntry (int index, int parent, Type type, int start_offset) { this.Index = index; this.Parent = parent; this.BlockType = type; this.StartOffset = start_offset; } internal CodeBlockEntry (int index, MyBinaryReader reader) { this.Index = index; int type_flag = reader.ReadLeb128 (); BlockType = (Type) (type_flag & 0x3f); this.Parent = reader.ReadLeb128 (); this.StartOffset = reader.ReadLeb128 (); this.EndOffset = reader.ReadLeb128 (); /* Reserved for future extensions. */ if ((type_flag & 0x40) != 0) { int data_size = reader.ReadInt16 (); reader.BaseStream.Position += data_size; } } public void Close (int end_offset) { this.EndOffset = end_offset; } internal void Write (MyBinaryWriter bw) { bw.WriteLeb128 ((int) BlockType); bw.WriteLeb128 (Parent); bw.WriteLeb128 (StartOffset); bw.WriteLeb128 (EndOffset); } public override string ToString () { return String.Format ("[CodeBlock {0}:{1}:{2}:{3}:{4}]", Index, Parent, BlockType, StartOffset, EndOffset); } } public struct LocalVariableEntry { #region This is actually written to the symbol file public readonly int Index; public readonly string Name; public readonly int BlockIndex; #endregion public LocalVariableEntry (int index, string name, int block) { this.Index = index; this.Name = name; this.BlockIndex = block; } internal LocalVariableEntry (MonoSymbolFile file, MyBinaryReader reader) { Index = reader.ReadLeb128 (); Name = reader.ReadString (); BlockIndex = reader.ReadLeb128 (); } internal void Write (MonoSymbolFile file, MyBinaryWriter bw) { bw.WriteLeb128 (Index); bw.Write (Name); bw.WriteLeb128 (BlockIndex); } public override string ToString () { return String.Format ("[LocalVariable {0}:{1}:{2}]", Name, Index, BlockIndex - 1); } } public struct CapturedVariable { #region This is actually written to the symbol file public readonly string Name; public readonly string CapturedName; public readonly CapturedKind Kind; #endregion public enum CapturedKind : byte { Local, Parameter, This } public CapturedVariable (string name, string captured_name, CapturedKind kind) { this.Name = name; this.CapturedName = captured_name; this.Kind = kind; } internal CapturedVariable (MyBinaryReader reader) { Name = reader.ReadString (); CapturedName = reader.ReadString (); Kind = (CapturedKind) reader.ReadByte (); } internal void Write (MyBinaryWriter bw) { bw.Write (Name); bw.Write (CapturedName); bw.Write ((byte) Kind); } public override string ToString () { return String.Format ("[CapturedVariable {0}:{1}:{2}]", Name, CapturedName, Kind); } } public struct CapturedScope { #region This is actually written to the symbol file public readonly int Scope; public readonly string CapturedName; #endregion public CapturedScope (int scope, string captured_name) { this.Scope = scope; this.CapturedName = captured_name; } internal CapturedScope (MyBinaryReader reader) { Scope = reader.ReadLeb128 (); CapturedName = reader.ReadString (); } internal void Write (MyBinaryWriter bw) { bw.WriteLeb128 (Scope); bw.Write (CapturedName); } public override string ToString () { return String.Format ("[CapturedScope {0}:{1}]", Scope, CapturedName); } } public struct ScopeVariable { #region This is actually written to the symbol file public readonly int Scope; public readonly int Index; #endregion public ScopeVariable (int scope, int index) { this.Scope = scope; this.Index = index; } internal ScopeVariable (MyBinaryReader reader) { Scope = reader.ReadLeb128 (); Index = reader.ReadLeb128 (); } internal void Write (MyBinaryWriter bw) { bw.WriteLeb128 (Scope); bw.WriteLeb128 (Index); } public override string ToString () { return String.Format ("[ScopeVariable {0}:{1}]", Scope, Index); } } public class AnonymousScopeEntry { #region This is actually written to the symbol file public readonly int ID; #endregion List<CapturedVariable> captured_vars = new List<CapturedVariable> (); List<CapturedScope> captured_scopes = new List<CapturedScope> (); public AnonymousScopeEntry (int id) { this.ID = id; } internal AnonymousScopeEntry (MyBinaryReader reader) { ID = reader.ReadLeb128 (); int num_captured_vars = reader.ReadLeb128 (); for (int i = 0; i < num_captured_vars; i++) captured_vars.Add (new CapturedVariable (reader)); int num_captured_scopes = reader.ReadLeb128 (); for (int i = 0; i < num_captured_scopes; i++) captured_scopes.Add (new CapturedScope (reader)); } internal void AddCapturedVariable (string name, string captured_name, CapturedVariable.CapturedKind kind) { captured_vars.Add (new CapturedVariable (name, captured_name, kind)); } public CapturedVariable[] CapturedVariables { get { CapturedVariable[] retval = new CapturedVariable [captured_vars.Count]; captured_vars.CopyTo (retval, 0); return retval; } } internal void AddCapturedScope (int scope, string captured_name) { captured_scopes.Add (new CapturedScope (scope, captured_name)); } public CapturedScope[] CapturedScopes { get { CapturedScope[] retval = new CapturedScope [captured_scopes.Count]; captured_scopes.CopyTo (retval, 0); return retval; } } internal void Write (MyBinaryWriter bw) { bw.WriteLeb128 (ID); bw.WriteLeb128 (captured_vars.Count); foreach (CapturedVariable cv in captured_vars) cv.Write (bw); bw.WriteLeb128 (captured_scopes.Count); foreach (CapturedScope cs in captured_scopes) cs.Write (bw); } public override string ToString () { return String.Format ("[AnonymousScope {0}]", ID); } } public class CompileUnitEntry : ICompileUnit { #region This is actually written to the symbol file public readonly int Index; int DataOffset; #endregion MonoSymbolFile file; SourceFileEntry source; List<SourceFileEntry> include_files; List<NamespaceEntry> namespaces; bool creating; public static int Size { get { return 8; } } CompileUnitEntry ICompileUnit.Entry { get { return this; } } public CompileUnitEntry (MonoSymbolFile file, SourceFileEntry source) { this.file = file; this.source = source; this.Index = file.AddCompileUnit (this); creating = true; namespaces = new List<NamespaceEntry> (); } public void AddFile (SourceFileEntry file) { if (!creating) throw new InvalidOperationException (); if (include_files == null) include_files = new List<SourceFileEntry> (); include_files.Add (file); } public SourceFileEntry SourceFile { get { if (creating) return source; ReadData (); return source; } } public int DefineNamespace (string name, string[] using_clauses, int parent) { if (!creating) throw new InvalidOperationException (); int index = file.GetNextNamespaceIndex (); NamespaceEntry ns = new NamespaceEntry (name, index, using_clauses, parent); namespaces.Add (ns); return index; } internal void WriteData (MyBinaryWriter bw) { DataOffset = (int) bw.BaseStream.Position; bw.WriteLeb128 (source.Index); int count_includes = include_files != null ? include_files.Count : 0; bw.WriteLeb128 (count_includes); if (include_files != null) { foreach (SourceFileEntry entry in include_files) bw.WriteLeb128 (entry.Index); } bw.WriteLeb128 (namespaces.Count); foreach (NamespaceEntry ns in namespaces) ns.Write (file, bw); } internal void Write (BinaryWriter bw) { bw.Write (Index); bw.Write (DataOffset); } internal CompileUnitEntry (MonoSymbolFile file, MyBinaryReader reader) { this.file = file; Index = reader.ReadInt32 (); DataOffset = reader.ReadInt32 (); } public void ReadAll () { ReadData (); } void ReadData () { if (creating) throw new InvalidOperationException (); lock (file) { if (namespaces != null) return; MyBinaryReader reader = file.BinaryReader; int old_pos = (int) reader.BaseStream.Position; reader.BaseStream.Position = DataOffset; int source_idx = reader.ReadLeb128 (); source = file.GetSourceFile (source_idx); int count_includes = reader.ReadLeb128 (); if (count_includes > 0) { include_files = new List<SourceFileEntry> (); for (int i = 0; i < count_includes; i++) include_files.Add (file.GetSourceFile (reader.ReadLeb128 ())); } int count_ns = reader.ReadLeb128 (); namespaces = new List<NamespaceEntry> (); for (int i = 0; i < count_ns; i ++) namespaces.Add (new NamespaceEntry (file, reader)); reader.BaseStream.Position = old_pos; } } public NamespaceEntry[] Namespaces { get { ReadData (); NamespaceEntry[] retval = new NamespaceEntry [namespaces.Count]; namespaces.CopyTo (retval, 0); return retval; } } public SourceFileEntry[] IncludeFiles { get { ReadData (); if (include_files == null) return new SourceFileEntry [0]; SourceFileEntry[] retval = new SourceFileEntry [include_files.Count]; include_files.CopyTo (retval, 0); return retval; } } } public class SourceFileEntry { #region This is actually written to the symbol file public readonly int Index; int DataOffset; #endregion MonoSymbolFile file; string file_name; byte[] guid; byte[] hash; bool creating; bool auto_generated; public static int Size { get { return 8; } } public SourceFileEntry (MonoSymbolFile file, string file_name) { this.file = file; this.file_name = file_name; this.Index = file.AddSource (this); creating = true; } public SourceFileEntry (MonoSymbolFile file, string file_name, byte[] guid, byte[] checksum) : this (file, file_name) { this.guid = guid; this.hash = checksum; } public byte[] Checksum { get { return hash; } } internal void WriteData (MyBinaryWriter bw) { DataOffset = (int) bw.BaseStream.Position; bw.Write (file_name); if (guid == null) guid = new byte[16]; if (hash == null) { try { using (FileStream fs = new FileStream (file_name, FileMode.Open, FileAccess.Read)) { MD5 md5 = MD5.Create (); hash = md5.ComputeHash (fs); } } catch { hash = new byte [16]; } } bw.Write (guid); bw.Write (hash); bw.Write ((byte) (auto_generated ? 1 : 0)); } internal void Write (BinaryWriter bw) { bw.Write (Index); bw.Write (DataOffset); } internal SourceFileEntry (MonoSymbolFile file, MyBinaryReader reader) { this.file = file; Index = reader.ReadInt32 (); DataOffset = reader.ReadInt32 (); int old_pos = (int) reader.BaseStream.Position; reader.BaseStream.Position = DataOffset; file_name = reader.ReadString (); guid = reader.ReadBytes (16); hash = reader.ReadBytes (16); auto_generated = reader.ReadByte () == 1; reader.BaseStream.Position = old_pos; } public string FileName { get { return file_name; } set { file_name = value; } } public bool AutoGenerated { get { return auto_generated; } } public void SetAutoGenerated () { if (!creating) throw new InvalidOperationException (); auto_generated = true; file.OffsetTable.FileFlags |= OffsetTable.Flags.IsAspxSource; } public bool CheckChecksum () { try { using (FileStream fs = new FileStream (file_name, FileMode.Open)) { MD5 md5 = MD5.Create (); byte[] data = md5.ComputeHash (fs); for (int i = 0; i < 16; i++) if (data [i] != hash [i]) return false; return true; } } catch { return false; } } public override string ToString () { return String.Format ("SourceFileEntry ({0}:{1})", Index, DataOffset); } } public class LineNumberTable { protected LineNumberEntry[] _line_numbers; public LineNumberEntry[] LineNumbers { get { return _line_numbers; } } public readonly int LineBase; public readonly int LineRange; public readonly byte OpcodeBase; public readonly int MaxAddressIncrement; #region Configurable constants public const int Default_LineBase = -1; public const int Default_LineRange = 8; public const byte Default_OpcodeBase = 9; #endregion public const byte DW_LNS_copy = 1; public const byte DW_LNS_advance_pc = 2; public const byte DW_LNS_advance_line = 3; public const byte DW_LNS_set_file = 4; public const byte DW_LNS_const_add_pc = 8; public const byte DW_LNE_end_sequence = 1; // MONO extensions. public const byte DW_LNE_MONO_negate_is_hidden = 0x40; internal const byte DW_LNE_MONO__extensions_start = 0x40; internal const byte DW_LNE_MONO__extensions_end = 0x7f; protected LineNumberTable (MonoSymbolFile file) { this.LineBase = file.OffsetTable.LineNumberTable_LineBase; this.LineRange = file.OffsetTable.LineNumberTable_LineRange; this.OpcodeBase = (byte) file.OffsetTable.LineNumberTable_OpcodeBase; this.MaxAddressIncrement = (255 - OpcodeBase) / LineRange; } internal LineNumberTable (MonoSymbolFile file, LineNumberEntry[] lines) : this (file) { this._line_numbers = lines; } internal void Write (MonoSymbolFile file, MyBinaryWriter bw, bool hasColumnsInfo, bool hasEndInfo) { int start = (int) bw.BaseStream.Position; bool last_is_hidden = false; int last_line = 1, last_offset = 0, last_file = 1; for (int i = 0; i < LineNumbers.Length; i++) { int line_inc = LineNumbers [i].Row - last_line; int offset_inc = LineNumbers [i].Offset - last_offset; if (LineNumbers [i].File != last_file) { bw.Write (DW_LNS_set_file); bw.WriteLeb128 (LineNumbers [i].File); last_file = LineNumbers [i].File; } if (LineNumbers [i].IsHidden != last_is_hidden) { bw.Write ((byte) 0); bw.Write ((byte) 1); bw.Write (DW_LNE_MONO_negate_is_hidden); last_is_hidden = LineNumbers [i].IsHidden; } if (offset_inc >= MaxAddressIncrement) { if (offset_inc < 2 * MaxAddressIncrement) { bw.Write (DW_LNS_const_add_pc); offset_inc -= MaxAddressIncrement; } else { bw.Write (DW_LNS_advance_pc); bw.WriteLeb128 (offset_inc); offset_inc = 0; } } if ((line_inc < LineBase) || (line_inc >= LineBase + LineRange)) { bw.Write (DW_LNS_advance_line); bw.WriteLeb128 (line_inc); if (offset_inc != 0) { bw.Write (DW_LNS_advance_pc); bw.WriteLeb128 (offset_inc); } bw.Write (DW_LNS_copy); } else { byte opcode; opcode = (byte) (line_inc - LineBase + (LineRange * offset_inc) + OpcodeBase); bw.Write (opcode); } last_line = LineNumbers [i].Row; last_offset = LineNumbers [i].Offset; } bw.Write ((byte) 0); bw.Write ((byte) 1); bw.Write (DW_LNE_end_sequence); if (hasColumnsInfo) { for (int i = 0; i < LineNumbers.Length; i++) { var ln = LineNumbers [i]; if (ln.Row >= 0) bw.WriteLeb128 (ln.Column); } } if (hasEndInfo) { for (int i = 0; i < LineNumbers.Length; i++) { var ln = LineNumbers [i]; if (ln.EndRow == -1 || ln.EndColumn == -1 || ln.Row > ln.EndRow) { bw.WriteLeb128 (0xffffff); } else { bw.WriteLeb128 (ln.EndRow - ln.Row); bw.WriteLeb128 (ln.EndColumn); } } } file.ExtendedLineNumberSize += (int) bw.BaseStream.Position - start; } internal static LineNumberTable Read (MonoSymbolFile file, MyBinaryReader br, bool readColumnsInfo, bool readEndInfo) { LineNumberTable lnt = new LineNumberTable (file); lnt.DoRead (file, br, readColumnsInfo, readEndInfo); return lnt; } void DoRead (MonoSymbolFile file, MyBinaryReader br, bool includesColumns, bool includesEnds) { var lines = new List<LineNumberEntry> (); bool is_hidden = false, modified = false; int stm_line = 1, stm_offset = 0, stm_file = 1; while (true) { byte opcode = br.ReadByte (); if (opcode == 0) { byte size = br.ReadByte (); long end_pos = br.BaseStream.Position + size; opcode = br.ReadByte (); if (opcode == DW_LNE_end_sequence) { if (modified) lines.Add (new LineNumberEntry ( stm_file, stm_line, -1, stm_offset, is_hidden)); break; } else if (opcode == DW_LNE_MONO_negate_is_hidden) { is_hidden = !is_hidden; modified = true; } else if ((opcode >= DW_LNE_MONO__extensions_start) && (opcode <= DW_LNE_MONO__extensions_end)) { ; // reserved for future extensions } else { throw new MonoSymbolFileException ("Unknown extended opcode {0:x}", opcode); } br.BaseStream.Position = end_pos; continue; } else if (opcode < OpcodeBase) { switch (opcode) { case DW_LNS_copy: lines.Add (new LineNumberEntry ( stm_file, stm_line, -1, stm_offset, is_hidden)); modified = false; break; case DW_LNS_advance_pc: stm_offset += br.ReadLeb128 (); modified = true; break; case DW_LNS_advance_line: stm_line += br.ReadLeb128 (); modified = true; break; case DW_LNS_set_file: stm_file = br.ReadLeb128 (); modified = true; break; case DW_LNS_const_add_pc: stm_offset += MaxAddressIncrement; modified = true; break; default: throw new MonoSymbolFileException ( "Unknown standard opcode {0:x} in LNT", opcode); } } else { opcode -= OpcodeBase; stm_offset += opcode / LineRange; stm_line += LineBase + (opcode % LineRange); lines.Add (new LineNumberEntry ( stm_file, stm_line, -1, stm_offset, is_hidden)); modified = false; } } _line_numbers = lines.ToArray (); if (includesColumns) { for (int i = 0; i < _line_numbers.Length; ++i) { var ln = _line_numbers[i]; if (ln.Row >= 0) ln.Column = br.ReadLeb128 (); } } if (includesEnds) { for (int i = 0; i < _line_numbers.Length; ++i) { var ln = _line_numbers[i]; int row = br.ReadLeb128 (); if (row == 0xffffff) { ln.EndRow = -1; ln.EndColumn = -1; } else { ln.EndRow = ln.Row + row; ln.EndColumn = br.ReadLeb128 (); } } } } public bool GetMethodBounds (out LineNumberEntry start, out LineNumberEntry end) { if (_line_numbers.Length > 1) { start = _line_numbers [0]; end = _line_numbers [_line_numbers.Length - 1]; return true; } start = LineNumberEntry.Null; end = LineNumberEntry.Null; return false; } } public class MethodEntry : IComparable { #region This is actually written to the symbol file public readonly int CompileUnitIndex; public readonly int Token; public readonly int NamespaceID; int DataOffset; int LocalVariableTableOffset; int LineNumberTableOffset; int CodeBlockTableOffset; int ScopeVariableTableOffset; int RealNameOffset; Flags flags; #endregion int index; public Flags MethodFlags { get { return flags; } } public readonly CompileUnitEntry CompileUnit; LocalVariableEntry[] locals; CodeBlockEntry[] code_blocks; ScopeVariable[] scope_vars; LineNumberTable lnt; string real_name; public readonly MonoSymbolFile SymbolFile; public int Index { get { return index; } set { index = value; } } [Flags] public enum Flags { LocalNamesAmbiguous = 1, ColumnsInfoIncluded = 1 << 1, EndInfoIncluded = 1 << 2 } public const int Size = 12; internal MethodEntry (MonoSymbolFile file, MyBinaryReader reader, int index) { this.SymbolFile = file; this.index = index; Token = reader.ReadInt32 (); DataOffset = reader.ReadInt32 (); LineNumberTableOffset = reader.ReadInt32 (); long old_pos = reader.BaseStream.Position; reader.BaseStream.Position = DataOffset; CompileUnitIndex = reader.ReadLeb128 (); LocalVariableTableOffset = reader.ReadLeb128 (); NamespaceID = reader.ReadLeb128 (); CodeBlockTableOffset = reader.ReadLeb128 (); ScopeVariableTableOffset = reader.ReadLeb128 (); RealNameOffset = reader.ReadLeb128 (); flags = (Flags) reader.ReadLeb128 (); reader.BaseStream.Position = old_pos; CompileUnit = file.GetCompileUnit (CompileUnitIndex); } internal MethodEntry (MonoSymbolFile file, CompileUnitEntry comp_unit, int token, ScopeVariable[] scope_vars, LocalVariableEntry[] locals, LineNumberEntry[] lines, CodeBlockEntry[] code_blocks, string real_name, Flags flags, int namespace_id) { this.SymbolFile = file; this.real_name = real_name; this.locals = locals; this.code_blocks = code_blocks; this.scope_vars = scope_vars; this.flags = flags; index = -1; Token = token; CompileUnitIndex = comp_unit.Index; CompileUnit = comp_unit; NamespaceID = namespace_id; CheckLineNumberTable (lines); lnt = new LineNumberTable (file, lines); file.NumLineNumbers += lines.Length; int num_locals = locals != null ? locals.Length : 0; if (num_locals <= 32) { // Most of the time, the O(n^2) factor is actually // less than the cost of allocating the hash table, // 32 is a rough number obtained through some testing. for (int i = 0; i < num_locals; i ++) { string nm = locals [i].Name; for (int j = i + 1; j < num_locals; j ++) { if (locals [j].Name == nm) { flags |= Flags.LocalNamesAmbiguous; goto locals_check_done; } } } locals_check_done : ; } else { var local_names = new Dictionary<string, LocalVariableEntry> (); foreach (LocalVariableEntry local in locals) { if (local_names.ContainsKey (local.Name)) { flags |= Flags.LocalNamesAmbiguous; break; } local_names.Add (local.Name, local); } } } static void CheckLineNumberTable (LineNumberEntry[] line_numbers) { int last_offset = -1; int last_row = -1; if (line_numbers == null) return; for (int i = 0; i < line_numbers.Length; i++) { LineNumberEntry line = line_numbers [i]; if (line.Equals (LineNumberEntry.Null)) throw new MonoSymbolFileException (); if (line.Offset < last_offset) throw new MonoSymbolFileException (); if (line.Offset > last_offset) { last_row = line.Row; last_offset = line.Offset; } else if (line.Row > last_row) { last_row = line.Row; } } } internal void Write (MyBinaryWriter bw) { if ((index <= 0) || (DataOffset == 0)) throw new InvalidOperationException (); bw.Write (Token); bw.Write (DataOffset); bw.Write (LineNumberTableOffset); } internal void WriteData (MonoSymbolFile file, MyBinaryWriter bw) { if (index <= 0) throw new InvalidOperationException (); LocalVariableTableOffset = (int) bw.BaseStream.Position; int num_locals = locals != null ? locals.Length : 0; bw.WriteLeb128 (num_locals); for (int i = 0; i < num_locals; i++) locals [i].Write (file, bw); file.LocalCount += num_locals; CodeBlockTableOffset = (int) bw.BaseStream.Position; int num_code_blocks = code_blocks != null ? code_blocks.Length : 0; bw.WriteLeb128 (num_code_blocks); for (int i = 0; i < num_code_blocks; i++) code_blocks [i].Write (bw); ScopeVariableTableOffset = (int) bw.BaseStream.Position; int num_scope_vars = scope_vars != null ? scope_vars.Length : 0; bw.WriteLeb128 (num_scope_vars); for (int i = 0; i < num_scope_vars; i++) scope_vars [i].Write (bw); if (real_name != null) { RealNameOffset = (int) bw.BaseStream.Position; bw.Write (real_name); } foreach (var lne in lnt.LineNumbers) { if (lne.EndRow != -1 || lne.EndColumn != -1) flags |= Flags.EndInfoIncluded; } LineNumberTableOffset = (int) bw.BaseStream.Position; lnt.Write (file, bw, (flags & Flags.ColumnsInfoIncluded) != 0, (flags & Flags.EndInfoIncluded) != 0); DataOffset = (int) bw.BaseStream.Position; bw.WriteLeb128 (CompileUnitIndex); bw.WriteLeb128 (LocalVariableTableOffset); bw.WriteLeb128 (NamespaceID); bw.WriteLeb128 (CodeBlockTableOffset); bw.WriteLeb128 (ScopeVariableTableOffset); bw.WriteLeb128 (RealNameOffset); bw.WriteLeb128 ((int) flags); } public void ReadAll () { GetLineNumberTable (); GetLocals (); GetCodeBlocks (); GetScopeVariables (); GetRealName (); } public LineNumberTable GetLineNumberTable () { lock (SymbolFile) { if (lnt != null) return lnt; if (LineNumberTableOffset == 0) return null; MyBinaryReader reader = SymbolFile.BinaryReader; long old_pos = reader.BaseStream.Position; reader.BaseStream.Position = LineNumberTableOffset; lnt = LineNumberTable.Read (SymbolFile, reader, (flags & Flags.ColumnsInfoIncluded) != 0, (flags & Flags.EndInfoIncluded) != 0); reader.BaseStream.Position = old_pos; return lnt; } } public LocalVariableEntry[] GetLocals () { lock (SymbolFile) { if (locals != null) return locals; if (LocalVariableTableOffset == 0) return null; MyBinaryReader reader = SymbolFile.BinaryReader; long old_pos = reader.BaseStream.Position; reader.BaseStream.Position = LocalVariableTableOffset; int num_locals = reader.ReadLeb128 (); locals = new LocalVariableEntry [num_locals]; for (int i = 0; i < num_locals; i++) locals [i] = new LocalVariableEntry (SymbolFile, reader); reader.BaseStream.Position = old_pos; return locals; } } public CodeBlockEntry[] GetCodeBlocks () { lock (SymbolFile) { if (code_blocks != null) return code_blocks; if (CodeBlockTableOffset == 0) return null; MyBinaryReader reader = SymbolFile.BinaryReader; long old_pos = reader.BaseStream.Position; reader.BaseStream.Position = CodeBlockTableOffset; int num_code_blocks = reader.ReadLeb128 (); code_blocks = new CodeBlockEntry [num_code_blocks]; for (int i = 0; i < num_code_blocks; i++) code_blocks [i] = new CodeBlockEntry (i, reader); reader.BaseStream.Position = old_pos; return code_blocks; } } public ScopeVariable[] GetScopeVariables () { lock (SymbolFile) { if (scope_vars != null) return scope_vars; if (ScopeVariableTableOffset == 0) return null; MyBinaryReader reader = SymbolFile.BinaryReader; long old_pos = reader.BaseStream.Position; reader.BaseStream.Position = ScopeVariableTableOffset; int num_scope_vars = reader.ReadLeb128 (); scope_vars = new ScopeVariable [num_scope_vars]; for (int i = 0; i < num_scope_vars; i++) scope_vars [i] = new ScopeVariable (reader); reader.BaseStream.Position = old_pos; return scope_vars; } } public string GetRealName () { lock (SymbolFile) { if (real_name != null) return real_name; if (RealNameOffset == 0) return null; real_name = SymbolFile.BinaryReader.ReadString (RealNameOffset); return real_name; } } public int CompareTo (object obj) { MethodEntry method = (MethodEntry) obj; if (method.Token < Token) return 1; else if (method.Token > Token) return -1; else return 0; } public override string ToString () { return String.Format ("[Method {0}:{1:x}:{2}:{3}]", index, Token, CompileUnitIndex, CompileUnit); } } public struct NamespaceEntry { #region This is actually written to the symbol file public readonly string Name; public readonly int Index; public readonly int Parent; public readonly string[] UsingClauses; #endregion public NamespaceEntry (string name, int index, string[] using_clauses, int parent) { this.Name = name; this.Index = index; this.Parent = parent; this.UsingClauses = using_clauses != null ? using_clauses : new string [0]; } internal NamespaceEntry (MonoSymbolFile file, MyBinaryReader reader) { Name = reader.ReadString (); Index = reader.ReadLeb128 (); Parent = reader.ReadLeb128 (); int count = reader.ReadLeb128 (); UsingClauses = new string [count]; for (int i = 0; i < count; i++) UsingClauses [i] = reader.ReadString (); } internal void Write (MonoSymbolFile file, MyBinaryWriter bw) { bw.Write (Name); bw.WriteLeb128 (Index); bw.WriteLeb128 (Parent); bw.WriteLeb128 (UsingClauses.Length); foreach (string uc in UsingClauses) bw.Write (uc); } public override string ToString () { return String.Format ("[Namespace {0}:{1}:{2}]", Name, Index, Parent); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.IO; using System.Linq; using System.Globalization; using System.Collections.Generic; using System.Security.Cryptography; using System.Runtime.InteropServices; using System.Text; using System.Security.Cryptography.Pkcs; using System.Security.Cryptography.Xml; using System.Security.Cryptography.X509Certificates; using Xunit; using Test.Cryptography; using System.Security.Cryptography.Pkcs.Tests; namespace System.Security.Cryptography.Pkcs.EnvelopedCmsTests.Tests { public static partial class StateTests { // // Exercises various edge cases when EnvelopedCms methods and properties are called out of the "expected" order. // // The "expected" sequences are one of: // // ctor(ContentInfo)|ctor(ContentInfo,AlgorithmIdentifier) => Encrypt() => Encode() // // or // // ctor() => Decode() => RecipientInfos => Decrypt() => ContentInfo // // Most of these semantics are driven by backward compatibility. A tighter api design wouldn't // have exposed all these state transitions in the first place. // // The four states an EnvelopedCms can be in are as follows: // // State 1: Post constructor // // There are three constructor overloads, but all but one just supply default arguments // so we can consider there to be just one constructor. // // At this stage, there is no CMS underneath, just some properties (ContentInfo, AlgorithmIdentifier, etc.) // that serve as implicit inputs to a future Encrypt() call. // // State 2: Post Encrypt() // // Encrypt() can be called at any time and it effectively resets the state of the EnvelopedCms // (although the prior contents of ContentInfo, ContentEncryptionAlgorithm, UnprotectedAttributes and Certificates // will naturally influence the contents of CMS is constructed.) // // Encrypt() actually both encrypts and encodes, but you have to call Encode() afterward to pick up the encoded bytes. // // State 3: Post Decode() // // Decode() can also be called at any time - it's effectively a constructor that resets the internal // state and all the member properties. // // In this state, you can invoke the RecipientInfos properties to decide which recipient to pass to Decrypt(). // // State 4: Post Decrypt() // // A Decrypt() can only happen after a Decode(). // // Once in this state, you can fetch ContentInfo to get the decrypted content // but otherwise, the CMS is in a pretty useless state. // // // State 1 // // Constructed using any of the constructor overloads. // [Fact] public static void PostCtor_Version() { // Version returns 0 by fiat. EnvelopedCms ecms = new EnvelopedCms(); Assert.Equal(0, ecms.Version); } [Fact] public static void PostCtor_RecipientInfos() { // RecipientInfo returns empty collection by fiat. EnvelopedCms ecms = new EnvelopedCms(); RecipientInfoCollection recipients = ecms.RecipientInfos; Assert.Equal(0, recipients.Count); } [Fact] public static void PostCtor_Encode() { EnvelopedCms ecms = new EnvelopedCms(); Assert.Throws<InvalidOperationException>(() => ecms.Encode()); } [Fact] public static void PostCtor_Decrypt() { EnvelopedCms ecms = new EnvelopedCms(); Assert.Throws<InvalidOperationException>(() => ecms.Decrypt()); } [Fact] public static void PostCtor_ContentInfo() { ContentInfo expectedContentInfo = new ContentInfo(new byte[] { 1, 2, 3 }); EnvelopedCms ecms = new EnvelopedCms(expectedContentInfo); ContentInfo actualContentInfo = ecms.ContentInfo; Assert.Equal(expectedContentInfo.ContentType, actualContentInfo.ContentType); Assert.Equal<byte>(expectedContentInfo.Content, actualContentInfo.Content); } // // State 2 // // Called constructor + Encrypt() // [Fact] public static void PostEncrypt_Version() { ContentInfo expectedContentInfo = new ContentInfo(new byte[] { 1, 2, 3 }); EnvelopedCms ecms = new EnvelopedCms(expectedContentInfo); int versionBeforeEncrypt = ecms.Version; using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate()) { ecms.Encrypt(new CmsRecipient(cert)); } // Encrypt does not update Version member. Assert.Equal(versionBeforeEncrypt, ecms.Version); } [Fact] public static void PostEncrypt_RecipientInfos() { ContentInfo expectedContentInfo = new ContentInfo(new byte[] { 1, 2, 3 }); EnvelopedCms ecms = new EnvelopedCms(expectedContentInfo); using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate()) { ecms.Encrypt(new CmsRecipient(cert)); } object ignore; Assert.ThrowsAny<CryptographicException>(() => ignore = ecms.RecipientInfos); } [Fact] public static void PostEncrypt_Decrypt() { ContentInfo expectedContentInfo = new ContentInfo(new byte[] { 1, 2, 3 }); EnvelopedCms ecms = new EnvelopedCms(expectedContentInfo); using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate()) { ecms.Encrypt(new CmsRecipient(cert)); } Assert.ThrowsAny<CryptographicException>(() => ecms.Decrypt()); } [Fact] public static void PostEncrypt_ContentInfo() { ContentInfo expectedContentInfo = new ContentInfo(new byte[] { 1, 2, 3 }); EnvelopedCms ecms = new EnvelopedCms(expectedContentInfo); using (X509Certificate2 cert = Certificates.RSAKeyTransfer1.GetCertificate()) { ecms.Encrypt(new CmsRecipient(cert)); } // Encrypting does not update ContentInfo. ContentInfo actualContentInfo = ecms.ContentInfo; Assert.Equal(expectedContentInfo.ContentType, actualContentInfo.ContentType); Assert.Equal<byte>(expectedContentInfo.Content, actualContentInfo.Content); } // // State 3: Called Decode() // [Fact] public static void PostDecode_Encode() { byte[] encodedMessage = ("3082010c06092a864886f70d010703a081fe3081fb0201003181c83081c5020100302e301a311830160603550403130f5253" + "414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d01010105000481805e" + "bb2d08773594be9ec5d30c0707cf339f2b982a4f0797b74d520a0c973d668a9a6ad9d28066ef36e5b5620fef67f4d79ee50c" + "25eb999f0c656548347d5676ac4b779f8fce2b87e6388fbe483bb0fcf78ab1f1ff29169600401fded7b2803a0bf96cc160c4" + "96726216e986869eed578bda652855c85604a056201538ee56b6c4302b06092a864886f70d010701301406082a864886f70d" + "030704083adadf63cd297a86800835edc437e31d0b70").HexToByteArray(); EnvelopedCms ecms = new EnvelopedCms(); ecms.Decode(encodedMessage); // This should really have thrown an InvalidOperationException. Instead, you get... something back. byte[] expectedGarbage = "35edc437e31d0b70".HexToByteArray(); byte[] garbage = ecms.Encode(); AssertEncryptedContentEqual(expectedGarbage, garbage); } [Fact] public static void PostDecode_ContentInfo() { byte[] encodedMessage = ("3082010c06092a864886f70d010703a081fe3081fb0201003181c83081c5020100302e301a311830160603550403130f5253" + "414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d01010105000481805e" + "bb2d08773594be9ec5d30c0707cf339f2b982a4f0797b74d520a0c973d668a9a6ad9d28066ef36e5b5620fef67f4d79ee50c" + "25eb999f0c656548347d5676ac4b779f8fce2b87e6388fbe483bb0fcf78ab1f1ff29169600401fded7b2803a0bf96cc160c4" + "96726216e986869eed578bda652855c85604a056201538ee56b6c4302b06092a864886f70d010701301406082a864886f70d" + "030704083adadf63cd297a86800835edc437e31d0b70").HexToByteArray(); EnvelopedCms ecms = new EnvelopedCms(); ecms.Decode(encodedMessage); // This gets you the encrypted inner content. ContentInfo contentInfo = ecms.ContentInfo; Assert.Equal(Oids.Pkcs7Data, contentInfo.ContentType.Value); byte[] expectedGarbage = "35edc437e31d0b70".HexToByteArray(); AssertEncryptedContentEqual(expectedGarbage, contentInfo.Content); } // // State 4: Called Decode() + Decrypt() // [Fact] public static void PostDecrypt_Encode() { byte[] expectedContent = { 6, 3, 128, 33, 44 }; EnvelopedCms ecms = new EnvelopedCms(new ContentInfo(expectedContent)); ecms.Encrypt(new CmsRecipient(Certificates.RSAKeyTransfer1.GetCertificate())); byte[] encodedMessage = ("3082010c06092a864886f70d010703a081fe3081fb0201003181c83081c5020100302e301a311830160603550403130f5253" + "414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d010101050004818067" + "6bada56dcaf2e65226941242db73b5a5420a6212cd6af662db52fdc0ca63875cb69066f7074da0fc009ce724e2d73fb19380" + "2deea8d92b069486a41c7c4fc3cd0174a918a559f79319039b40ae797bcacc909c361275ee2a5b1f0ff09fb5c19508e3f5ac" + "051ac0f03603c27fb8993d49ac428f8bcfc23a90ef9b0fac0f423a302b06092a864886f70d010701301406082a864886f70d" + "0307040828dc4d72ca3132e48008546cc90f2c5d4b79").HexToByteArray(); ecms.Decode(encodedMessage); using (X509Certificate2 cer = Certificates.RSAKeyTransfer1.TryGetCertificateWithPrivateKey()) { if (cer == null) return; // Sorry - CertLoader is not configured to load certs with private keys - we've tested as much as we can. X509Certificate2Collection extraStore = new X509Certificate2Collection(cer); RecipientInfoCollection r = ecms.RecipientInfos; ecms.Decrypt(r[0], extraStore); // Desktop compat: Calling Encode() at this point should have thrown an InvalidOperationException. Instead, it returns // the decrypted inner content (same as ecms.ContentInfo.Content). This is easy for someone to take a reliance on // so for compat sake, we'd better keep it. byte[] encoded = ecms.Encode(); Assert.Equal<byte>(expectedContent, encoded); } } [Fact] public static void PostDecrypt_RecipientInfos() { byte[] expectedContent = { 6, 3, 128, 33, 44 }; EnvelopedCms ecms = new EnvelopedCms(new ContentInfo(expectedContent)); ecms.Encrypt(new CmsRecipient(Certificates.RSAKeyTransfer1.GetCertificate())); byte[] encodedMessage = ("3082010c06092a864886f70d010703a081fe3081fb0201003181c83081c5020100302e301a311830160603550403130f5253" + "414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d010101050004818067" + "6bada56dcaf2e65226941242db73b5a5420a6212cd6af662db52fdc0ca63875cb69066f7074da0fc009ce724e2d73fb19380" + "2deea8d92b069486a41c7c4fc3cd0174a918a559f79319039b40ae797bcacc909c361275ee2a5b1f0ff09fb5c19508e3f5ac" + "051ac0f03603c27fb8993d49ac428f8bcfc23a90ef9b0fac0f423a302b06092a864886f70d010701301406082a864886f70d" + "0307040828dc4d72ca3132e48008546cc90f2c5d4b79").HexToByteArray(); ecms.Decode(encodedMessage); using (X509Certificate2 cer = Certificates.RSAKeyTransfer1.TryGetCertificateWithPrivateKey()) { if (cer == null) return; // Sorry - CertLoader is not configured to load certs with private keys - we've tested as much as we can. X509Certificate2Collection extraStore = new X509Certificate2Collection(cer); RecipientInfoCollection col1 = ecms.RecipientInfos; ecms.Decrypt(col1[0], extraStore); // Make sure we can still RecipientInfos after a Decrypt() RecipientInfoCollection col2 = ecms.RecipientInfos; Assert.Equal(col1.Count, col2.Count); RecipientInfo r1 = col1[0]; RecipientInfo r2 = col2[0]; X509IssuerSerial is1 = (X509IssuerSerial)(r1.RecipientIdentifier.Value); X509IssuerSerial is2 = (X509IssuerSerial)(r2.RecipientIdentifier.Value); Assert.Equal(is1.IssuerName, is2.IssuerName); Assert.Equal(is1.SerialNumber, is2.SerialNumber); } } [Fact] public static void PostDecrypt_Decrypt() { byte[] expectedContent = { 6, 3, 128, 33, 44 }; byte[] encodedMessage = ("308202b006092a864886f70d010703a08202a13082029d020100318202583081c5020100302e301a31183016060355040313" + "0f5253414b65795472616e7366657231021031d935fb63e8cfab48a0bf7b397b67c0300d06092a864886f70d010101050004" + "81801026d9fb60d1a55686b73cf859c8bd66b58defda5e23e3da5f535f1427e3c5f7a4a2a94373e8e3ba5488a7c6a1059bfb" + "57301156698e7fca62671426d388fb3fb4373c9cb53132fda067598256bbfe8491b14dadaaf04d5fdfb2463f358ad0d6a594" + "bf6a4fbab6b3d725f08032e601492265e6336d5a638096f9975025ccd6393081c5020100302e301a31183016060355040313" + "0f5253414b65795472616e736665723202102bce9f9ece39f98044f0cd2faa9a14e7300d06092a864886f70d010101050004" + "8180b6497a2b789728f200ca1f974a676c531a4769f03f3929bd7526e7333ea483b4abb530a49c8532db5d4a4df66f173e3e" + "a4ba9e4814b584dc987ac87c46bb131daab535140968aafad8808100a2515e9c6d0c1f382b024992ce36b70b841628e0eb43" + "4db89545d702a8fbd3403188e7de7cb4bc1dcc3bc325467570654aaf2ee83081c5020100302e301a31183016060355040313" + "0f5253414b65795472616e736665723302104497d870785a23aa4432ed0106ef72a6300d06092a864886f70d010101050004" + "81807517e594c353d41abff334c6162988b78e05df7d79457c146fbc886d2d8057f594fa3a96cd8df5842c9758baac1fcdd5" + "d9672a9f8ef9426326cccaaf5954f2ae657f8c7b13aef2f811adb4954323aa8319a1e8f2ad4e5c96c1d3fbe413ae479e471b" + "b701cbdfa145c9b64f5e1f69f472804995d56c31351553f779cf8efec237303c06092a864886f70d010701301d0609608648" + "01650304012a041023a114c149d7d4017ce2f5ec7c5d53f980104e50ab3c15533743dd054ef3ff8b9d83").HexToByteArray(); EnvelopedCms ecms = new EnvelopedCms(); ecms.Decode(encodedMessage); using (X509Certificate2 cert1 = Certificates.RSAKeyTransfer1.TryGetCertificateWithPrivateKey()) using (X509Certificate2 cert2 = Certificates.RSAKeyTransfer2.TryGetCertificateWithPrivateKey()) using (X509Certificate2 cert3 = Certificates.RSAKeyTransfer3.TryGetCertificateWithPrivateKey()) { if (cert1 == null || cert2 == null || cert3 == null) return; // Sorry - CertLoader is not configured to load certs with private keys - we've tested as much as we can. X509Certificate2Collection extraStore = new X509Certificate2Collection(); extraStore.Add(cert1); extraStore.Add(cert2); extraStore.Add(cert3); RecipientInfoCollection r = ecms.RecipientInfos; ecms.Decrypt(r[0], extraStore); ContentInfo contentInfo = ecms.ContentInfo; Assert.Equal<byte>(expectedContent, contentInfo.Content); // Though this doesn't seem like a terribly unreasonable thing to attempt, attempting to call Decrypt() again // after a successful Decrypt() throws a CryptographicException saying "Already decrypted." Assert.ThrowsAny<CryptographicException>(() => ecms.Decrypt(r[1], extraStore)); } } private static void AssertEncryptedContentEqual(byte[] expected, byte[] actual) { if (expected.SequenceEqual(actual)) return; if (actual.Length > expected.Length && actual.Take(expected.Length).SequenceEqual(expected)) throw new Exception("Returned content had extra bytes padded. If you're running this test on the desktop framework, this is a known bug."); Assert.Equal<byte>(expected, actual); } } }
using System; using Csla; using ParentLoadSoftDelete.DataAccess; using ParentLoadSoftDelete.DataAccess.ERLevel; namespace ParentLoadSoftDelete.Business.ERLevel { /// <summary> /// E10_City (editable child object).<br/> /// This is a generated base class of <see cref="E10_City"/> business object. /// </summary> /// <remarks> /// This class contains one child collection:<br/> /// - <see cref="E11_CityRoadObjects"/> of type <see cref="E11_CityRoadColl"/> (1:M relation to <see cref="E12_CityRoad"/>)<br/> /// This class is an item of <see cref="E09_CityColl"/> collection. /// </remarks> [Serializable] public partial class E10_City : BusinessBase<E10_City> { #region Static Fields private static int _lastID; #endregion #region State Fields [NotUndoable] [NonSerialized] internal int parent_Region_ID = 0; #endregion #region Business Properties /// <summary> /// Maintains metadata about <see cref="City_ID"/> property. /// </summary> public static readonly PropertyInfo<int> City_IDProperty = RegisterProperty<int>(p => p.City_ID, "Cities ID"); /// <summary> /// Gets the Cities ID. /// </summary> /// <value>The Cities ID.</value> public int City_ID { get { return GetProperty(City_IDProperty); } } /// <summary> /// Maintains metadata about <see cref="City_Name"/> property. /// </summary> public static readonly PropertyInfo<string> City_NameProperty = RegisterProperty<string>(p => p.City_Name, "Cities Name"); /// <summary> /// Gets or sets the Cities Name. /// </summary> /// <value>The Cities Name.</value> public string City_Name { get { return GetProperty(City_NameProperty); } set { SetProperty(City_NameProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="E11_City_SingleObject"/> property. /// </summary> public static readonly PropertyInfo<E11_City_Child> E11_City_SingleObjectProperty = RegisterProperty<E11_City_Child>(p => p.E11_City_SingleObject, "E11 City Single Object", RelationshipTypes.Child); /// <summary> /// Gets the E11 City Single Object ("parent load" child property). /// </summary> /// <value>The E11 City Single Object.</value> public E11_City_Child E11_City_SingleObject { get { return GetProperty(E11_City_SingleObjectProperty); } private set { LoadProperty(E11_City_SingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="E11_City_ASingleObject"/> property. /// </summary> public static readonly PropertyInfo<E11_City_ReChild> E11_City_ASingleObjectProperty = RegisterProperty<E11_City_ReChild>(p => p.E11_City_ASingleObject, "E11 City ASingle Object", RelationshipTypes.Child); /// <summary> /// Gets the E11 City ASingle Object ("parent load" child property). /// </summary> /// <value>The E11 City ASingle Object.</value> public E11_City_ReChild E11_City_ASingleObject { get { return GetProperty(E11_City_ASingleObjectProperty); } private set { LoadProperty(E11_City_ASingleObjectProperty, value); } } /// <summary> /// Maintains metadata about child <see cref="E11_CityRoadObjects"/> property. /// </summary> public static readonly PropertyInfo<E11_CityRoadColl> E11_CityRoadObjectsProperty = RegisterProperty<E11_CityRoadColl>(p => p.E11_CityRoadObjects, "E11 CityRoad Objects", RelationshipTypes.Child); /// <summary> /// Gets the E11 City Road Objects ("parent load" child property). /// </summary> /// <value>The E11 City Road Objects.</value> public E11_CityRoadColl E11_CityRoadObjects { get { return GetProperty(E11_CityRoadObjectsProperty); } private set { LoadProperty(E11_CityRoadObjectsProperty, value); } } #endregion #region Factory Methods /// <summary> /// Factory method. Creates a new <see cref="E10_City"/> object. /// </summary> /// <returns>A reference to the created <see cref="E10_City"/> object.</returns> internal static E10_City NewE10_City() { return DataPortal.CreateChild<E10_City>(); } /// <summary> /// Factory method. Loads a <see cref="E10_City"/> object from the given E10_CityDto. /// </summary> /// <param name="data">The <see cref="E10_CityDto"/>.</param> /// <returns>A reference to the fetched <see cref="E10_City"/> object.</returns> internal static E10_City GetE10_City(E10_CityDto data) { E10_City obj = new E10_City(); // show the framework that this is a child object obj.MarkAsChild(); obj.Fetch(data); obj.LoadProperty(E11_CityRoadObjectsProperty, E11_CityRoadColl.NewE11_CityRoadColl()); obj.MarkOld(); return obj; } #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="E10_City"/> class. /// </summary> /// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks> [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)] public E10_City() { // Use factory methods and do not use direct creation. // show the framework that this is a child object MarkAsChild(); } #endregion #region Data Access /// <summary> /// Loads default values for the <see cref="E10_City"/> object properties. /// </summary> [Csla.RunLocal] protected override void Child_Create() { LoadProperty(City_IDProperty, System.Threading.Interlocked.Decrement(ref _lastID)); LoadProperty(E11_City_SingleObjectProperty, DataPortal.CreateChild<E11_City_Child>()); LoadProperty(E11_City_ASingleObjectProperty, DataPortal.CreateChild<E11_City_ReChild>()); LoadProperty(E11_CityRoadObjectsProperty, DataPortal.CreateChild<E11_CityRoadColl>()); var args = new DataPortalHookArgs(); OnCreate(args); base.Child_Create(); } /// <summary> /// Loads a <see cref="E10_City"/> object from the given <see cref="E10_CityDto"/>. /// </summary> /// <param name="data">The E10_CityDto to use.</param> private void Fetch(E10_CityDto data) { // Value properties LoadProperty(City_IDProperty, data.City_ID); LoadProperty(City_NameProperty, data.City_Name); // parent properties parent_Region_ID = data.Parent_Region_ID; var args = new DataPortalHookArgs(data); OnFetchRead(args); } /// <summary> /// Loads child <see cref="E11_City_Child"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(E11_City_Child child) { LoadProperty(E11_City_SingleObjectProperty, child); } /// <summary> /// Loads child <see cref="E11_City_ReChild"/> object. /// </summary> /// <param name="child">The child object to load.</param> internal void LoadChild(E11_City_ReChild child) { LoadProperty(E11_City_ASingleObjectProperty, child); } /// <summary> /// Inserts a new <see cref="E10_City"/> object in the database. /// </summary> /// <param name="parent">The parent object.</param> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Insert(E08_Region parent) { var dto = new E10_CityDto(); dto.Parent_Region_ID = parent.Region_ID; dto.City_Name = City_Name; using (var dalManager = DalFactoryParentLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(dto); OnInsertPre(args); var dal = dalManager.GetProvider<IE10_CityDal>(); using (BypassPropertyChecks) { var resultDto = dal.Insert(dto); LoadProperty(City_IDProperty, resultDto.City_ID); args = new DataPortalHookArgs(resultDto); } OnInsertPost(args); // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Updates in the database all changes made to the <see cref="E10_City"/> object. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_Update() { if (!IsDirty) return; var dto = new E10_CityDto(); dto.City_ID = City_ID; dto.City_Name = City_Name; using (var dalManager = DalFactoryParentLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(dto); OnUpdatePre(args); var dal = dalManager.GetProvider<IE10_CityDal>(); using (BypassPropertyChecks) { var resultDto = dal.Update(dto); args = new DataPortalHookArgs(resultDto); } OnUpdatePost(args); // flushes all pending data operations FieldManager.UpdateChildren(this); } } /// <summary> /// Self deletes the <see cref="E10_City"/> object from database. /// </summary> [Transactional(TransactionalTypes.TransactionScope)] private void Child_DeleteSelf() { using (var dalManager = DalFactoryParentLoadSoftDelete.GetManager()) { var args = new DataPortalHookArgs(); // flushes all pending data operations FieldManager.UpdateChildren(this); OnDeletePre(args); var dal = dalManager.GetProvider<IE10_CityDal>(); using (BypassPropertyChecks) { dal.Delete(ReadProperty(City_IDProperty)); } OnDeletePost(args); } } #endregion #region DataPortal Hooks /// <summary> /// Occurs after setting all defaults for object creation. /// </summary> partial void OnCreate(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation. /// </summary> partial void OnDeletePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Delete, after the delete operation, before Commit(). /// </summary> partial void OnDeletePost(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the fetch operation. /// </summary> partial void OnFetchPre(DataPortalHookArgs args); /// <summary> /// Occurs after the fetch operation (object or collection is fully loaded and set up). /// </summary> partial void OnFetchPost(DataPortalHookArgs args); /// <summary> /// Occurs after the low level fetch operation, before the data reader is destroyed. /// </summary> partial void OnFetchRead(DataPortalHookArgs args); /// <summary> /// Occurs after setting query parameters and before the update operation. /// </summary> partial void OnUpdatePre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit(). /// </summary> partial void OnUpdatePost(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation. /// </summary> partial void OnInsertPre(DataPortalHookArgs args); /// <summary> /// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit(). /// </summary> partial void OnInsertPost(DataPortalHookArgs args); #endregion } }
using System; using System.Collections; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using GenLib; using PrimerProObjects; using PrimerProLocalization; namespace PrimerProForms { /// <summary> /// Summary description for FormGraphemeTD. /// </summary> public partial class FormGraphemeTD : Form { //private GraphemeTDSearch m_Search; //Grapheme Search for Text Data //private string m_Grapheme; private ArrayList m_Graphemes; private bool m_ParaFormat; private bool m_UseGraphemesTaught; private bool m_NoDuplicates; private GraphemeInventory m_GI; private string m_Lang; //UI language public FormGraphemeTD(Font font, GraphemeInventory gi) { InitializeComponent(); this.chkGraphemesTaught.Checked = false; this.chkParaFmt.Checked = false; this.chkNoDup.Checked = false; this.tbGraphemes.Font = font; m_GI = gi; ; m_Lang = ""; } public FormGraphemeTD(Font font, GraphemeInventory gi, LocalizationTable table, string lang) { InitializeComponent(); this.chkGraphemesTaught.Checked = false; this.chkParaFmt.Checked = false; this.chkNoDup.Checked = false; this.tbGraphemes.Font = font; m_GI = gi; m_Lang = lang; this.UpdateFormForLocalization(table); } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose(bool disposing) { if (disposing) { if (components != null) { components.Dispose(); } } base.Dispose(disposing); } public ArrayList Graphemes { get { return m_Graphemes; } } public bool ParaFormat { get { return m_ParaFormat; } } public bool UseGraphemesTaught { get { return m_UseGraphemesTaught; } } public bool NoDuplicates { get { return m_NoDuplicates; } } private void btnGraphemes_Click(object sender, EventArgs e) { GraphemeInventory gi = m_GI; ArrayList alGI = new ArrayList(); ArrayList alSelection = new ArrayList(); for (int i = 0; i < gi.ConsonantCount(); i++) alGI.Add(gi.GetConsonant(i).Symbol); for (int i = 0; i < gi.VowelCount(); i++) alGI.Add(gi.GetVowel(i).Symbol); for (int i = 0; i < gi.ToneCount(); i++) alGI.Add(gi.GetTone(i).Symbol); for (int i = 0; i < gi.SyllographCount(); i++) alGI.Add(gi.GetSyllograph(i).Symbol); alSelection = Funct.ConvertStringToArrayList(this.tbGraphemes.Text, Constants.Space.ToString()); if ((m_Lang != "") && (m_Lang == OptionList.kFrench)) { FormItemSelectionFrench form = new FormItemSelectionFrench(alGI, alSelection, labGraphemes.Text, tbGraphemes.Font); DialogResult dr = form.ShowDialog(); if (dr == DialogResult.OK) { ArrayList al = form.Selection(); string strGraphemes = Funct.ConvertArrayListToString(al, Constants.Space.ToString()); this.tbGraphemes.Text = strGraphemes; } } else if ((m_Lang != "") && (m_Lang == OptionList.kSpanish)) { FormItemSelectionSpanish form = new FormItemSelectionSpanish(alGI, alSelection, labGraphemes.Text, tbGraphemes.Font); DialogResult dr = form.ShowDialog(); if (dr == DialogResult.OK) { ArrayList al = form.Selection(); string strGraphemes = Funct.ConvertArrayListToString(al, Constants.Space.ToString()); this.tbGraphemes.Text = strGraphemes; } } else { FormItemSelection form = new FormItemSelection(alGI, alSelection, labGraphemes.Text, tbGraphemes.Font); DialogResult dr = form.ShowDialog(); if (dr == DialogResult.OK) { ArrayList al = form.Selection(); string strGraphemes = Funct.ConvertArrayListToString(al, Constants.Space.ToString()); this.tbGraphemes.Text = strGraphemes; } } } private void btnOK_Click(object sender, EventArgs e) { string strGrfs = tbGraphemes.Text.Trim(); if (strGrfs != "") { m_Graphemes = Funct.ConvertStringToArrayList(strGrfs, Constants.Space.ToString()); ; m_ParaFormat = chkParaFmt.Checked; m_UseGraphemesTaught = chkGraphemesTaught.Checked; m_NoDuplicates = chkNoDup.Checked; } else { m_Graphemes = null; m_ParaFormat = false; m_UseGraphemesTaught = false; m_NoDuplicates = false; } } private void btnCancel_Click(object sender, EventArgs e) { m_Graphemes = null; ; m_ParaFormat = false; m_UseGraphemesTaught = false; m_NoDuplicates = false; this.Close(); } private void chkNoDup_CheckedChanged(object sender, EventArgs e) { if (chkNoDup.Checked) { chkParaFmt.Enabled = false; chkParaFmt.Checked = false; } else chkParaFmt.Enabled = true; } private void UpdateFormForLocalization(LocalizationTable table) { string strText = ""; strText = table.GetForm("FormGraphemeTDT"); if (strText != "") this.Text = strText; strText = table.GetForm("FormGraphemeTD0"); if (strText != "") this.labGraphemes.Text = strText; strText = table.GetForm("FormGraphemeTD2"); if (strText != "") this.chkParaFmt.Text = strText; strText = table.GetForm("FormGraphemeTD4"); if (strText != "") this.chkGraphemesTaught.Text = strText; strText = table.GetForm("FormGraphemeTD5"); if (strText != "") this.chkNoDup.Text = strText; strText = table.GetForm("FormGraphemeTD6"); if (strText != "") this.btnOK.Text = strText; strText = table.GetForm("FormGraphemeTD7"); if (strText != "") this.btnCancel.Text = strText; return; } } }
using System; using System.Runtime.InteropServices; using System.Threading; namespace SevenDigital.Messaging.Infrastructure.SignalHandling.PrivateMonoDerived { static class UnsafeNativeMethods { [UnmanagedFunctionPointer(CallingConvention.Cdecl)] public delegate int Mono_Posix_RuntimeIsShuttingDown(); [DllImport("MonoPosixHelper", EntryPoint = "Mono_Posix_FromSignum")] public static extern int FromSignum(Signum value, out int rval); [DllImport("MonoPosixHelper", EntryPoint = "Mono_Posix_ToSignum")] public static extern int ToSignum(int value, out Signum rval); [DllImport("MonoPosixHelper", EntryPoint = "Mono_Posix_FromRealTimeSignum")] public static extern int FromRealTimeSignum(int offset, out int rval); [DllImport("MonoPosixHelper", CallingConvention = CallingConvention.Cdecl, EntryPoint = "Mono_Unix_UnixSignal_install", SetLastError = true)] public static extern IntPtr install(int signum); [DllImport("MonoPosixHelper", CallingConvention = CallingConvention.Cdecl, EntryPoint = "Mono_Unix_UnixSignal_uninstall")] public static extern int uninstall(IntPtr info); [DllImport("MonoPosixHelper", CallingConvention = CallingConvention.Cdecl, EntryPoint = "Mono_Unix_UnixSignal_WaitAny")] public static extern int WaitAny(IntPtr[] infos, int count, int timeout, Mono_Posix_RuntimeIsShuttingDown shutting_down); [DllImport("MonoPosixHelper", CallingConvention = CallingConvention.Cdecl, EntryPoint = "Mono_Posix_SIGRTMIN")] public static extern int GetSIGRTMIN(); [DllImport("MonoPosixHelper", CallingConvention = CallingConvention.Cdecl, EntryPoint = "Mono_Posix_SIGRTMAX")] public static extern int GetSIGRTMAX(); } class UnixSignal : WaitHandle { public static bool TryToSignum(int value, out Signum rval) { return UnsafeNativeMethods.ToSignum(value, out rval) == 0; } public static Signum ToSignum(int value) { Signum result; if (UnsafeNativeMethods.ToSignum(value, out result) == -1) { throw new ArgumentException("value " + value + " is not an acceptable signum"); } return result; } public static RealTimeSignum ToRealTimeSignum(int offset) { return new RealTimeSignum(offset); } public static bool TryFromSignum(Signum value, out int rval) { return UnsafeNativeMethods.FromSignum(value, out rval) == 0; } public static int FromSignum(Signum value) { int result; if (UnsafeNativeMethods.FromSignum(value, out result) == -1) { throw new ArgumentException("value " + value + " is not an acceptable signum"); } return result; } public static int FromRealTimeSignum(RealTimeSignum sig) { int result; if (UnsafeNativeMethods.FromRealTimeSignum(sig.Offset, out result) == -1) { throw new ArgumentException("sig.Offset " + sig.Offset + " is not an acceptable offset"); } return result; } #pragma warning disable 169 // ReSharper disable FieldCanBeMadeReadOnly.Local, MemberCanBePrivate.Local [StructLayout(LayoutKind.Sequential)] private struct SignalInfo { public int signum; public int count; public int read_fd; public int write_fd; public int have_handler; public int pipecnt; public IntPtr handler; } // ReSharper restore FieldCanBeMadeReadOnly.Local, MemberCanBePrivate.Local #pragma warning restore 169 private readonly int signum; private IntPtr signal_info; private static readonly UnsafeNativeMethods.Mono_Posix_RuntimeIsShuttingDown ShuttingDown = RuntimeShuttingDownCallback; public Signum Signum { get { if (IsRealTimeSignal) { throw new InvalidOperationException("This signal is a RealTimeSignum"); } return ToSignum(signum); } } public RealTimeSignum RealTimeSignum { get { if (!IsRealTimeSignal) { throw new InvalidOperationException("This signal is not a RealTimeSignum"); } return ToRealTimeSignum(signum - UnsafeNativeMethods.GetSIGRTMIN()); } } public bool IsRealTimeSignal { get { AssertValid(); int sIGRTMIN = UnsafeNativeMethods.GetSIGRTMIN(); return sIGRTMIN != -1 && signum >= sIGRTMIN; } } private unsafe SignalInfo* Info { get { AssertValid(); return (SignalInfo*)((void*)signal_info); } } public bool IsSet { get { return Count > 0; } } public unsafe int Count { get { return Info->count; } set { Interlocked.Exchange(ref Info->count, value); } } public UnixSignal(Signum signum) { this.signum = FromSignum(signum); signal_info = UnsafeNativeMethods.install(this.signum); if (signal_info == IntPtr.Zero) { throw new ArgumentException("Unable to handle signal", "signum"); } } private static int RuntimeShuttingDownCallback() { return (!Environment.HasShutdownStarted) ? 0 : 1; } private void AssertValid() { if (signal_info == IntPtr.Zero) { throw new ObjectDisposedException(GetType().FullName); } } public unsafe bool Reset() { int num = Interlocked.Exchange(ref Info->count, 0); return num != 0; } protected override void Dispose(bool disposing) { base.Dispose(disposing); if (signal_info == IntPtr.Zero) { return; } UnsafeNativeMethods.uninstall(signal_info); signal_info = IntPtr.Zero; } public override bool WaitOne() { return WaitOne(-1, false); } public override bool WaitOne(TimeSpan timeout, bool exitContext) { var num = (long)timeout.TotalMilliseconds; if (num < -1L || num > 2147483647L) { throw new ArgumentOutOfRangeException("timeout"); } return WaitOne((int)num, exitContext); } public override bool WaitOne(int millisecondsTimeout, bool exitContext) { AssertValid(); if (exitContext) { throw new InvalidOperationException("exitContext is not supported"); } return WaitAny(new[] { this }, millisecondsTimeout) == 0; } public static int WaitAny(UnixSignal[] signals) { return WaitAny(signals, -1); } public static int WaitAny(UnixSignal[] signals, TimeSpan timeout) { var num = (long)timeout.TotalMilliseconds; if (num < -1L || num > 2147483647L) { throw new ArgumentOutOfRangeException("timeout"); } return WaitAny(signals, (int)num); } public static int WaitAny(UnixSignal[] signals, int millisecondsTimeout) { if (signals == null) { throw new ArgumentNullException("signals"); } if (millisecondsTimeout < -1) { throw new ArgumentOutOfRangeException("millisecondsTimeout"); } var array = new IntPtr[signals.Length]; for (int i = 0; i < signals.Length; i++) { array[i] = signals[i].signal_info; if (array[i] == IntPtr.Zero) { throw new InvalidOperationException("Disposed UnixSignal"); } } return UnsafeNativeMethods.WaitAny(array, array.Length, millisecondsTimeout, ShuttingDown); } } }
// Copyright (c) The Avalonia Project. All rights reserved. // Licensed under the MIT license. See licence.md file in the project root for full license information. namespace Avalonia.Media { /// <summary> /// Predefined brushes. /// </summary> public static class Brushes { /// <summary> /// Initializes static members of the <see cref="Brushes"/> class. /// </summary> static Brushes() { AliceBlue = new SolidColorBrush(Colors.AliceBlue); AntiqueWhite = new SolidColorBrush(Colors.AntiqueWhite); Aqua = new SolidColorBrush(Colors.Aqua); Aquamarine = new SolidColorBrush(Colors.Aquamarine); Azure = new SolidColorBrush(Colors.Azure); Beige = new SolidColorBrush(Colors.Beige); Bisque = new SolidColorBrush(Colors.Bisque); Black = new SolidColorBrush(Colors.Black); BlanchedAlmond = new SolidColorBrush(Colors.BlanchedAlmond); Blue = new SolidColorBrush(Colors.Blue); BlueViolet = new SolidColorBrush(Colors.BlueViolet); Brown = new SolidColorBrush(Colors.Brown); BurlyWood = new SolidColorBrush(Colors.BurlyWood); CadetBlue = new SolidColorBrush(Colors.CadetBlue); Chartreuse = new SolidColorBrush(Colors.Chartreuse); Chocolate = new SolidColorBrush(Colors.Chocolate); Coral = new SolidColorBrush(Colors.Coral); CornflowerBlue = new SolidColorBrush(Colors.CornflowerBlue); Cornsilk = new SolidColorBrush(Colors.Cornsilk); Crimson = new SolidColorBrush(Colors.Crimson); Cyan = new SolidColorBrush(Colors.Cyan); DarkBlue = new SolidColorBrush(Colors.DarkBlue); DarkCyan = new SolidColorBrush(Colors.DarkCyan); DarkGoldenrod = new SolidColorBrush(Colors.DarkGoldenrod); DarkGray = new SolidColorBrush(Colors.DarkGray); DarkGreen = new SolidColorBrush(Colors.DarkGreen); DarkKhaki = new SolidColorBrush(Colors.DarkKhaki); DarkMagenta = new SolidColorBrush(Colors.DarkMagenta); DarkOliveGreen = new SolidColorBrush(Colors.DarkOliveGreen); DarkOrange = new SolidColorBrush(Colors.DarkOrange); DarkOrchid = new SolidColorBrush(Colors.DarkOrchid); DarkRed = new SolidColorBrush(Colors.DarkRed); DarkSalmon = new SolidColorBrush(Colors.DarkSalmon); DarkSeaGreen = new SolidColorBrush(Colors.DarkSeaGreen); DarkSlateBlue = new SolidColorBrush(Colors.DarkSlateBlue); DarkSlateGray = new SolidColorBrush(Colors.DarkSlateGray); DarkTurquoise = new SolidColorBrush(Colors.DarkTurquoise); DarkViolet = new SolidColorBrush(Colors.DarkViolet); DeepPink = new SolidColorBrush(Colors.DeepPink); DeepSkyBlue = new SolidColorBrush(Colors.DeepSkyBlue); DimGray = new SolidColorBrush(Colors.DimGray); DodgerBlue = new SolidColorBrush(Colors.DodgerBlue); Firebrick = new SolidColorBrush(Colors.Firebrick); FloralWhite = new SolidColorBrush(Colors.FloralWhite); ForestGreen = new SolidColorBrush(Colors.ForestGreen); Fuchsia = new SolidColorBrush(Colors.Fuchsia); Gainsboro = new SolidColorBrush(Colors.Gainsboro); GhostWhite = new SolidColorBrush(Colors.GhostWhite); Gold = new SolidColorBrush(Colors.Gold); Goldenrod = new SolidColorBrush(Colors.Goldenrod); Gray = new SolidColorBrush(Colors.Gray); Green = new SolidColorBrush(Colors.Green); GreenYellow = new SolidColorBrush(Colors.GreenYellow); Honeydew = new SolidColorBrush(Colors.Honeydew); HotPink = new SolidColorBrush(Colors.HotPink); IndianRed = new SolidColorBrush(Colors.IndianRed); Indigo = new SolidColorBrush(Colors.Indigo); Ivory = new SolidColorBrush(Colors.Ivory); Khaki = new SolidColorBrush(Colors.Khaki); Lavender = new SolidColorBrush(Colors.Lavender); LavenderBlush = new SolidColorBrush(Colors.LavenderBlush); LawnGreen = new SolidColorBrush(Colors.LawnGreen); LemonChiffon = new SolidColorBrush(Colors.LemonChiffon); LightBlue = new SolidColorBrush(Colors.LightBlue); LightCoral = new SolidColorBrush(Colors.LightCoral); LightCyan = new SolidColorBrush(Colors.LightCyan); LightGoldenrodYellow = new SolidColorBrush(Colors.LightGoldenrodYellow); LightGray = new SolidColorBrush(Colors.LightGray); LightGreen = new SolidColorBrush(Colors.LightGreen); LightPink = new SolidColorBrush(Colors.LightPink); LightSalmon = new SolidColorBrush(Colors.LightSalmon); LightSeaGreen = new SolidColorBrush(Colors.LightSeaGreen); LightSkyBlue = new SolidColorBrush(Colors.LightSkyBlue); LightSlateGray = new SolidColorBrush(Colors.LightSlateGray); LightSteelBlue = new SolidColorBrush(Colors.LightSteelBlue); LightYellow = new SolidColorBrush(Colors.LightYellow); Lime = new SolidColorBrush(Colors.Lime); LimeGreen = new SolidColorBrush(Colors.LimeGreen); Linen = new SolidColorBrush(Colors.Linen); Magenta = new SolidColorBrush(Colors.Magenta); Maroon = new SolidColorBrush(Colors.Maroon); MediumAquamarine = new SolidColorBrush(Colors.MediumAquamarine); MediumBlue = new SolidColorBrush(Colors.MediumBlue); MediumOrchid = new SolidColorBrush(Colors.MediumOrchid); MediumPurple = new SolidColorBrush(Colors.MediumPurple); MediumSeaGreen = new SolidColorBrush(Colors.MediumSeaGreen); MediumSlateBlue = new SolidColorBrush(Colors.MediumSlateBlue); MediumSpringGreen = new SolidColorBrush(Colors.MediumSpringGreen); MediumTurquoise = new SolidColorBrush(Colors.MediumTurquoise); MediumVioletRed = new SolidColorBrush(Colors.MediumVioletRed); MidnightBlue = new SolidColorBrush(Colors.MidnightBlue); MintCream = new SolidColorBrush(Colors.MintCream); MistyRose = new SolidColorBrush(Colors.MistyRose); Moccasin = new SolidColorBrush(Colors.Moccasin); NavajoWhite = new SolidColorBrush(Colors.NavajoWhite); Navy = new SolidColorBrush(Colors.Navy); OldLace = new SolidColorBrush(Colors.OldLace); Olive = new SolidColorBrush(Colors.Olive); OliveDrab = new SolidColorBrush(Colors.OliveDrab); Orange = new SolidColorBrush(Colors.Orange); OrangeRed = new SolidColorBrush(Colors.OrangeRed); Orchid = new SolidColorBrush(Colors.Orchid); PaleGoldenrod = new SolidColorBrush(Colors.PaleGoldenrod); PaleGreen = new SolidColorBrush(Colors.PaleGreen); PaleTurquoise = new SolidColorBrush(Colors.PaleTurquoise); PaleVioletRed = new SolidColorBrush(Colors.PaleVioletRed); PapayaWhip = new SolidColorBrush(Colors.PapayaWhip); PeachPuff = new SolidColorBrush(Colors.PeachPuff); Peru = new SolidColorBrush(Colors.Peru); Pink = new SolidColorBrush(Colors.Pink); Plum = new SolidColorBrush(Colors.Plum); PowderBlue = new SolidColorBrush(Colors.PowderBlue); Purple = new SolidColorBrush(Colors.Purple); Red = new SolidColorBrush(Colors.Red); RosyBrown = new SolidColorBrush(Colors.RosyBrown); RoyalBlue = new SolidColorBrush(Colors.RoyalBlue); SaddleBrown = new SolidColorBrush(Colors.SaddleBrown); Salmon = new SolidColorBrush(Colors.Salmon); SandyBrown = new SolidColorBrush(Colors.SandyBrown); SeaGreen = new SolidColorBrush(Colors.SeaGreen); SeaShell = new SolidColorBrush(Colors.SeaShell); Sienna = new SolidColorBrush(Colors.Sienna); Silver = new SolidColorBrush(Colors.Silver); SkyBlue = new SolidColorBrush(Colors.SkyBlue); SlateBlue = new SolidColorBrush(Colors.SlateBlue); SlateGray = new SolidColorBrush(Colors.SlateGray); Snow = new SolidColorBrush(Colors.Snow); SpringGreen = new SolidColorBrush(Colors.SpringGreen); SteelBlue = new SolidColorBrush(Colors.SteelBlue); Tan = new SolidColorBrush(Colors.Tan); Teal = new SolidColorBrush(Colors.Teal); Thistle = new SolidColorBrush(Colors.Thistle); Tomato = new SolidColorBrush(Colors.Tomato); Transparent = new SolidColorBrush(Colors.Transparent); Turquoise = new SolidColorBrush(Colors.Turquoise); Violet = new SolidColorBrush(Colors.Violet); Wheat = new SolidColorBrush(Colors.Wheat); White = new SolidColorBrush(Colors.White); WhiteSmoke = new SolidColorBrush(Colors.WhiteSmoke); Yellow = new SolidColorBrush(Colors.Yellow); YellowGreen = new SolidColorBrush(Colors.YellowGreen); } /// <summary> /// Gets an <see cref="Colors.AliceBlue"/> colored brush. /// </summary> public static SolidColorBrush AliceBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.AntiqueWhite"/> colored brush. /// </summary> public static SolidColorBrush AntiqueWhite { get; private set; } /// <summary> /// Gets an <see cref="Colors.AliceBlue"/> colored brush. /// </summary> public static SolidColorBrush Aqua { get; private set; } /// <summary> /// Gets an <see cref="Colors.Aquamarine"/> colored brush. /// </summary> public static SolidColorBrush Aquamarine { get; private set; } /// <summary> /// Gets an <see cref="Colors.Azure"/> colored brush. /// </summary> public static SolidColorBrush Azure { get; private set; } /// <summary> /// Gets an <see cref="Colors.Beige"/> colored brush. /// </summary> public static SolidColorBrush Beige { get; private set; } /// <summary> /// Gets an <see cref="Colors.Bisque"/> colored brush. /// </summary> public static SolidColorBrush Bisque { get; private set; } /// <summary> /// Gets an <see cref="Colors.Black"/> colored brush. /// </summary> public static SolidColorBrush Black { get; private set; } /// <summary> /// Gets an <see cref="Colors.BlanchedAlmond"/> colored brush. /// </summary> public static SolidColorBrush BlanchedAlmond { get; private set; } /// <summary> /// Gets an <see cref="Colors.Blue"/> colored brush. /// </summary> public static SolidColorBrush Blue { get; private set; } /// <summary> /// Gets an <see cref="Colors.BlueViolet"/> colored brush. /// </summary> public static SolidColorBrush BlueViolet { get; private set; } /// <summary> /// Gets an <see cref="Colors.Brown"/> colored brush. /// </summary> public static SolidColorBrush Brown { get; private set; } /// <summary> /// Gets an <see cref="Colors.BurlyWood"/> colored brush. /// </summary> public static SolidColorBrush BurlyWood { get; private set; } /// <summary> /// Gets an <see cref="Colors.CadetBlue"/> colored brush. /// </summary> public static SolidColorBrush CadetBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.Chartreuse"/> colored brush. /// </summary> public static SolidColorBrush Chartreuse { get; private set; } /// <summary> /// Gets an <see cref="Colors.Chocolate"/> colored brush. /// </summary> public static SolidColorBrush Chocolate { get; private set; } /// <summary> /// Gets an <see cref="Colors.Coral"/> colored brush. /// </summary> public static SolidColorBrush Coral { get; private set; } /// <summary> /// Gets an <see cref="Colors.CornflowerBlue"/> colored brush. /// </summary> public static SolidColorBrush CornflowerBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.Cornsilk"/> colored brush. /// </summary> public static SolidColorBrush Cornsilk { get; private set; } /// <summary> /// Gets an <see cref="Colors.Crimson"/> colored brush. /// </summary> public static SolidColorBrush Crimson { get; private set; } /// <summary> /// Gets an <see cref="Colors.Cyan"/> colored brush. /// </summary> public static SolidColorBrush Cyan { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkBlue"/> colored brush. /// </summary> public static SolidColorBrush DarkBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkCyan"/> colored brush. /// </summary> public static SolidColorBrush DarkCyan { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkGoldenrod"/> colored brush. /// </summary> public static SolidColorBrush DarkGoldenrod { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkGray"/> colored brush. /// </summary> public static SolidColorBrush DarkGray { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkGreen"/> colored brush. /// </summary> public static SolidColorBrush DarkGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkKhaki"/> colored brush. /// </summary> public static SolidColorBrush DarkKhaki { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkMagenta"/> colored brush. /// </summary> public static SolidColorBrush DarkMagenta { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkOliveGreen"/> colored brush. /// </summary> public static SolidColorBrush DarkOliveGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkOrange"/> colored brush. /// </summary> public static SolidColorBrush DarkOrange { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkOrchid"/> colored brush. /// </summary> public static SolidColorBrush DarkOrchid { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkRed"/> colored brush. /// </summary> public static SolidColorBrush DarkRed { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkSalmon"/> colored brush. /// </summary> public static SolidColorBrush DarkSalmon { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkSeaGreen"/> colored brush. /// </summary> public static SolidColorBrush DarkSeaGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkSlateBlue"/> colored brush. /// </summary> public static SolidColorBrush DarkSlateBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkSlateGray"/> colored brush. /// </summary> public static SolidColorBrush DarkSlateGray { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkTurquoise"/> colored brush. /// </summary> public static SolidColorBrush DarkTurquoise { get; private set; } /// <summary> /// Gets an <see cref="Colors.DarkViolet"/> colored brush. /// </summary> public static SolidColorBrush DarkViolet { get; private set; } /// <summary> /// Gets an <see cref="Colors.DeepPink"/> colored brush. /// </summary> public static SolidColorBrush DeepPink { get; private set; } /// <summary> /// Gets an <see cref="Colors.DeepSkyBlue"/> colored brush. /// </summary> public static SolidColorBrush DeepSkyBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.DimGray"/> colored brush. /// </summary> public static SolidColorBrush DimGray { get; private set; } /// <summary> /// Gets an <see cref="Colors.DodgerBlue"/> colored brush. /// </summary> public static SolidColorBrush DodgerBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.Firebrick"/> colored brush. /// </summary> public static SolidColorBrush Firebrick { get; private set; } /// <summary> /// Gets an <see cref="Colors.FloralWhite"/> colored brush. /// </summary> public static SolidColorBrush FloralWhite { get; private set; } /// <summary> /// Gets an <see cref="Colors.ForestGreen"/> colored brush. /// </summary> public static SolidColorBrush ForestGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.Fuchsia"/> colored brush. /// </summary> public static SolidColorBrush Fuchsia { get; private set; } /// <summary> /// Gets an <see cref="Colors.Gainsboro"/> colored brush. /// </summary> public static SolidColorBrush Gainsboro { get; private set; } /// <summary> /// Gets an <see cref="Colors.GhostWhite"/> colored brush. /// </summary> public static SolidColorBrush GhostWhite { get; private set; } /// <summary> /// Gets an <see cref="Colors.Gold"/> colored brush. /// </summary> public static SolidColorBrush Gold { get; private set; } /// <summary> /// Gets an <see cref="Colors.Goldenrod"/> colored brush. /// </summary> public static SolidColorBrush Goldenrod { get; private set; } /// <summary> /// Gets an <see cref="Colors.Gray"/> colored brush. /// </summary> public static SolidColorBrush Gray { get; private set; } /// <summary> /// Gets an <see cref="Colors.Green"/> colored brush. /// </summary> public static SolidColorBrush Green { get; private set; } /// <summary> /// Gets an <see cref="Colors.GreenYellow"/> colored brush. /// </summary> public static SolidColorBrush GreenYellow { get; private set; } /// <summary> /// Gets an <see cref="Colors.Honeydew"/> colored brush. /// </summary> public static SolidColorBrush Honeydew { get; private set; } /// <summary> /// Gets an <see cref="Colors.HotPink"/> colored brush. /// </summary> public static SolidColorBrush HotPink { get; private set; } /// <summary> /// Gets an <see cref="Colors.IndianRed"/> colored brush. /// </summary> public static SolidColorBrush IndianRed { get; private set; } /// <summary> /// Gets an <see cref="Colors.Indigo"/> colored brush. /// </summary> public static SolidColorBrush Indigo { get; private set; } /// <summary> /// Gets an <see cref="Colors.Ivory"/> colored brush. /// </summary> public static SolidColorBrush Ivory { get; private set; } /// <summary> /// Gets an <see cref="Colors.Khaki"/> colored brush. /// </summary> public static SolidColorBrush Khaki { get; private set; } /// <summary> /// Gets an <see cref="Colors.Lavender"/> colored brush. /// </summary> public static SolidColorBrush Lavender { get; private set; } /// <summary> /// Gets an <see cref="Colors.LavenderBlush"/> colored brush. /// </summary> public static SolidColorBrush LavenderBlush { get; private set; } /// <summary> /// Gets an <see cref="Colors.LawnGreen"/> colored brush. /// </summary> public static SolidColorBrush LawnGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.LemonChiffon"/> colored brush. /// </summary> public static SolidColorBrush LemonChiffon { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightBlue"/> colored brush. /// </summary> public static SolidColorBrush LightBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightCoral"/> colored brush. /// </summary> public static SolidColorBrush LightCoral { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightCyan"/> colored brush. /// </summary> public static SolidColorBrush LightCyan { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightGoldenrodYellow"/> colored brush. /// </summary> public static SolidColorBrush LightGoldenrodYellow { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightGray"/> colored brush. /// </summary> public static SolidColorBrush LightGray { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightGreen"/> colored brush. /// </summary> public static SolidColorBrush LightGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightPink"/> colored brush. /// </summary> public static SolidColorBrush LightPink { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightSalmon"/> colored brush. /// </summary> public static SolidColorBrush LightSalmon { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightSeaGreen"/> colored brush. /// </summary> public static SolidColorBrush LightSeaGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightSkyBlue"/> colored brush. /// </summary> public static SolidColorBrush LightSkyBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightSlateGray"/> colored brush. /// </summary> public static SolidColorBrush LightSlateGray { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightSteelBlue"/> colored brush. /// </summary> public static SolidColorBrush LightSteelBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.LightYellow"/> colored brush. /// </summary> public static SolidColorBrush LightYellow { get; private set; } /// <summary> /// Gets an <see cref="Colors.Lime"/> colored brush. /// </summary> public static SolidColorBrush Lime { get; private set; } /// <summary> /// Gets an <see cref="Colors.LimeGreen"/> colored brush. /// </summary> public static SolidColorBrush LimeGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.Linen"/> colored brush. /// </summary> public static SolidColorBrush Linen { get; private set; } /// <summary> /// Gets an <see cref="Colors.Magenta"/> colored brush. /// </summary> public static SolidColorBrush Magenta { get; private set; } /// <summary> /// Gets an <see cref="Colors.Maroon"/> colored brush. /// </summary> public static SolidColorBrush Maroon { get; private set; } /// <summary> /// Gets an <see cref="Colors.MediumAquamarine"/> colored brush. /// </summary> public static SolidColorBrush MediumAquamarine { get; private set; } /// <summary> /// Gets an <see cref="Colors.MediumBlue"/> colored brush. /// </summary> public static SolidColorBrush MediumBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.MediumOrchid"/> colored brush. /// </summary> public static SolidColorBrush MediumOrchid { get; private set; } /// <summary> /// Gets an <see cref="Colors.MediumPurple"/> colored brush. /// </summary> public static SolidColorBrush MediumPurple { get; private set; } /// <summary> /// Gets an <see cref="Colors.MediumSeaGreen"/> colored brush. /// </summary> public static SolidColorBrush MediumSeaGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.MediumSlateBlue"/> colored brush. /// </summary> public static SolidColorBrush MediumSlateBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.MediumSpringGreen"/> colored brush. /// </summary> public static SolidColorBrush MediumSpringGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.MediumTurquoise"/> colored brush. /// </summary> public static SolidColorBrush MediumTurquoise { get; private set; } /// <summary> /// Gets an <see cref="Colors.MediumVioletRed"/> colored brush. /// </summary> public static SolidColorBrush MediumVioletRed { get; private set; } /// <summary> /// Gets an <see cref="Colors.MidnightBlue"/> colored brush. /// </summary> public static SolidColorBrush MidnightBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.MintCream"/> colored brush. /// </summary> public static SolidColorBrush MintCream { get; private set; } /// <summary> /// Gets an <see cref="Colors.MistyRose"/> colored brush. /// </summary> public static SolidColorBrush MistyRose { get; private set; } /// <summary> /// Gets an <see cref="Colors.Moccasin"/> colored brush. /// </summary> public static SolidColorBrush Moccasin { get; private set; } /// <summary> /// Gets an <see cref="Colors.NavajoWhite"/> colored brush. /// </summary> public static SolidColorBrush NavajoWhite { get; private set; } /// <summary> /// Gets an <see cref="Colors.Navy"/> colored brush. /// </summary> public static SolidColorBrush Navy { get; private set; } /// <summary> /// Gets an <see cref="Colors.OldLace"/> colored brush. /// </summary> public static SolidColorBrush OldLace { get; private set; } /// <summary> /// Gets an <see cref="Colors.Olive"/> colored brush. /// </summary> public static SolidColorBrush Olive { get; private set; } /// <summary> /// Gets an <see cref="Colors.OliveDrab"/> colored brush. /// </summary> public static SolidColorBrush OliveDrab { get; private set; } /// <summary> /// Gets an <see cref="Colors.Orange"/> colored brush. /// </summary> public static SolidColorBrush Orange { get; private set; } /// <summary> /// Gets an <see cref="Colors.OrangeRed"/> colored brush. /// </summary> public static SolidColorBrush OrangeRed { get; private set; } /// <summary> /// Gets an <see cref="Colors.Orchid"/> colored brush. /// </summary> public static SolidColorBrush Orchid { get; private set; } /// <summary> /// Gets an <see cref="Colors.PaleGoldenrod"/> colored brush. /// </summary> public static SolidColorBrush PaleGoldenrod { get; private set; } /// <summary> /// Gets an <see cref="Colors.PaleGreen"/> colored brush. /// </summary> public static SolidColorBrush PaleGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.PaleTurquoise"/> colored brush. /// </summary> public static SolidColorBrush PaleTurquoise { get; private set; } /// <summary> /// Gets an <see cref="Colors.PaleVioletRed"/> colored brush. /// </summary> public static SolidColorBrush PaleVioletRed { get; private set; } /// <summary> /// Gets an <see cref="Colors.PapayaWhip"/> colored brush. /// </summary> public static SolidColorBrush PapayaWhip { get; private set; } /// <summary> /// Gets an <see cref="Colors.PeachPuff"/> colored brush. /// </summary> public static SolidColorBrush PeachPuff { get; private set; } /// <summary> /// Gets an <see cref="Colors.Peru"/> colored brush. /// </summary> public static SolidColorBrush Peru { get; private set; } /// <summary> /// Gets an <see cref="Colors.Pink"/> colored brush. /// </summary> public static SolidColorBrush Pink { get; private set; } /// <summary> /// Gets an <see cref="Colors.Plum"/> colored brush. /// </summary> public static SolidColorBrush Plum { get; private set; } /// <summary> /// Gets an <see cref="Colors.PowderBlue"/> colored brush. /// </summary> public static SolidColorBrush PowderBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.Purple"/> colored brush. /// </summary> public static SolidColorBrush Purple { get; private set; } /// <summary> /// Gets an <see cref="Colors.Red"/> colored brush. /// </summary> public static SolidColorBrush Red { get; private set; } /// <summary> /// Gets an <see cref="Colors.RosyBrown"/> colored brush. /// </summary> public static SolidColorBrush RosyBrown { get; private set; } /// <summary> /// Gets an <see cref="Colors.RoyalBlue"/> colored brush. /// </summary> public static SolidColorBrush RoyalBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.SaddleBrown"/> colored brush. /// </summary> public static SolidColorBrush SaddleBrown { get; private set; } /// <summary> /// Gets an <see cref="Colors.Salmon"/> colored brush. /// </summary> public static SolidColorBrush Salmon { get; private set; } /// <summary> /// Gets an <see cref="Colors.SandyBrown"/> colored brush. /// </summary> public static SolidColorBrush SandyBrown { get; private set; } /// <summary> /// Gets an <see cref="Colors.SeaGreen"/> colored brush. /// </summary> public static SolidColorBrush SeaGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.SeaShell"/> colored brush. /// </summary> public static SolidColorBrush SeaShell { get; private set; } /// <summary> /// Gets an <see cref="Colors.Sienna"/> colored brush. /// </summary> public static SolidColorBrush Sienna { get; private set; } /// <summary> /// Gets an <see cref="Colors.Silver"/> colored brush. /// </summary> public static SolidColorBrush Silver { get; private set; } /// <summary> /// Gets an <see cref="Colors.SkyBlue"/> colored brush. /// </summary> public static SolidColorBrush SkyBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.SlateBlue"/> colored brush. /// </summary> public static SolidColorBrush SlateBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.SlateGray"/> colored brush. /// </summary> public static SolidColorBrush SlateGray { get; private set; } /// <summary> /// Gets an <see cref="Colors.Snow"/> colored brush. /// </summary> public static SolidColorBrush Snow { get; private set; } /// <summary> /// Gets an <see cref="Colors.SpringGreen"/> colored brush. /// </summary> public static SolidColorBrush SpringGreen { get; private set; } /// <summary> /// Gets an <see cref="Colors.SteelBlue"/> colored brush. /// </summary> public static SolidColorBrush SteelBlue { get; private set; } /// <summary> /// Gets an <see cref="Colors.Tan"/> colored brush. /// </summary> public static SolidColorBrush Tan { get; private set; } /// <summary> /// Gets an <see cref="Colors.Teal"/> colored brush. /// </summary> public static SolidColorBrush Teal { get; private set; } /// <summary> /// Gets an <see cref="Colors.Thistle"/> colored brush. /// </summary> public static SolidColorBrush Thistle { get; private set; } /// <summary> /// Gets an <see cref="Colors.Tomato"/> colored brush. /// </summary> public static SolidColorBrush Tomato { get; private set; } /// <summary> /// Gets an <see cref="Colors.Transparent"/> colored brush. /// </summary> public static SolidColorBrush Transparent { get; private set; } /// <summary> /// Gets an <see cref="Colors.Turquoise"/> colored brush. /// </summary> public static SolidColorBrush Turquoise { get; private set; } /// <summary> /// Gets an <see cref="Colors.Violet"/> colored brush. /// </summary> public static SolidColorBrush Violet { get; private set; } /// <summary> /// Gets an <see cref="Colors.Wheat"/> colored brush. /// </summary> public static SolidColorBrush Wheat { get; private set; } /// <summary> /// Gets an <see cref="Colors.White"/> colored brush. /// </summary> public static SolidColorBrush White { get; private set; } /// <summary> /// Gets an <see cref="Colors.WhiteSmoke"/> colored brush. /// </summary> public static SolidColorBrush WhiteSmoke { get; private set; } /// <summary> /// Gets an <see cref="Colors.Yellow"/> colored brush. /// </summary> public static SolidColorBrush Yellow { get; private set; } /// <summary> /// Gets an <see cref="Colors.YellowGreen"/> colored brush. /// </summary> public static SolidColorBrush YellowGreen { get; private set; } } }
#region License // Copyright (c) 2007-2018, FluentMigrator Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using FluentMigrator.Runner.Generators.Redshift; using NUnit.Framework; using Shouldly; namespace FluentMigrator.Tests.Unit.Generators.Redshift { [TestFixture] public sealed class RedshiftDataTests : BaseDataTests { private RedshiftGenerator _generator; [SetUp] public void Setup() { _generator = new RedshiftGenerator(); } [Test] public override void CanDeleteDataForAllRowsWithCustomSchema() { var expression = GeneratorTestHelper.GetDeleteDataAllRowsExpression(); expression.SchemaName = "TestSchema"; var result = _generator.Generate(expression); result.ShouldBe("DELETE FROM \"TestSchema\".\"TestTable1\";"); } [Test] public override void CanDeleteDataForAllRowsWithDefaultSchema() { var expression = GeneratorTestHelper.GetDeleteDataAllRowsExpression(); var result = _generator.Generate(expression); result.ShouldBe("DELETE FROM \"public\".\"TestTable1\";"); } [Test] public override void CanDeleteDataForMultipleRowsWithCustomSchema() { var expression = GeneratorTestHelper.GetDeleteDataMultipleRowsExpression(); expression.SchemaName = "TestSchema"; var result = _generator.Generate(expression); result.ShouldBe( "DELETE FROM \"TestSchema\".\"TestTable1\" WHERE \"Name\" = 'Just''in' AND \"Website\" IS NULL;DELETE FROM \"TestSchema\".\"TestTable1\" WHERE \"Website\" = 'github.com';"); } [Test] public override void CanDeleteDataForMultipleRowsWithDefaultSchema() { var expression = GeneratorTestHelper.GetDeleteDataMultipleRowsExpression(); var result = _generator.Generate(expression); result.ShouldBe( "DELETE FROM \"public\".\"TestTable1\" WHERE \"Name\" = 'Just''in' AND \"Website\" IS NULL;DELETE FROM \"public\".\"TestTable1\" WHERE \"Website\" = 'github.com';"); } [Test] public override void CanDeleteDataWithCustomSchema() { var expression = GeneratorTestHelper.GetDeleteDataExpression(); expression.SchemaName = "TestSchema"; var result = _generator.Generate(expression); result.ShouldBe( "DELETE FROM \"TestSchema\".\"TestTable1\" WHERE \"Name\" = 'Just''in' AND \"Website\" IS NULL;"); } [Test] public override void CanDeleteDataWithDefaultSchema() { var expression = GeneratorTestHelper.GetDeleteDataExpression(); var result = _generator.Generate(expression); result.ShouldBe( "DELETE FROM \"public\".\"TestTable1\" WHERE \"Name\" = 'Just''in' AND \"Website\" IS NULL;"); } public override void CanDeleteDataWithDbNullCriteria() { var expression = GeneratorTestHelper.GetDeleteDataExpressionWithDbNullValue(); var result = _generator.Generate(expression); result.ShouldBe( "DELETE FROM \"public\".\"TestTable1\" WHERE \"Name\" = 'Just''in' AND \"Website\" IS NULL;"); } [Test] public override void CanInsertDataWithCustomSchema() { var expression = GeneratorTestHelper.GetInsertDataExpression(); expression.SchemaName = "TestSchema"; var expected = "INSERT INTO \"TestSchema\".\"TestTable1\" (\"Id\",\"Name\",\"Website\") VALUES (1,'Just''in','codethinked.com');"; expected += "INSERT INTO \"TestSchema\".\"TestTable1\" (\"Id\",\"Name\",\"Website\") VALUES (2,'Na\\te','kohari.org');"; var result = _generator.Generate(expression); result.ShouldBe(expected); } [Test] public override void CanInsertDataWithDefaultSchema() { var expression = GeneratorTestHelper.GetInsertDataExpression(); var expected = "INSERT INTO \"public\".\"TestTable1\" (\"Id\",\"Name\",\"Website\") VALUES (1,'Just''in','codethinked.com');"; expected += "INSERT INTO \"public\".\"TestTable1\" (\"Id\",\"Name\",\"Website\") VALUES (2,'Na\\te','kohari.org');"; var result = _generator.Generate(expression); result.ShouldBe(expected); } [Test] public override void CanInsertGuidDataWithCustomSchema() { var expression = GeneratorTestHelper.GetInsertGUIDExpression(); expression.SchemaName = "TestSchema"; var result = _generator.Generate(expression); result.ShouldBe(string.Format("INSERT INTO \"TestSchema\".\"TestTable1\" (\"guid\") VALUES ('{0}');", GeneratorTestHelper.TestGuid)); } [Test] public override void CanInsertGuidDataWithDefaultSchema() { var expression = GeneratorTestHelper.GetInsertGUIDExpression(); var result = _generator.Generate(expression); result.ShouldBe(string.Format("INSERT INTO \"public\".\"TestTable1\" (\"guid\") VALUES ('{0}');", GeneratorTestHelper.TestGuid)); } [Test] public override void CanUpdateDataForAllDataWithCustomSchema() { var expression = GeneratorTestHelper.GetUpdateDataExpressionWithAllRows(); expression.SchemaName = "TestSchema"; var result = _generator.Generate(expression); result.ShouldBe("UPDATE \"TestSchema\".\"TestTable1\" SET \"Name\" = 'Just''in', \"Age\" = 25 WHERE 1 = 1;"); } [Test] public override void CanUpdateDataForAllDataWithDefaultSchema() { var expression = GeneratorTestHelper.GetUpdateDataExpressionWithAllRows(); var result = _generator.Generate(expression); result.ShouldBe("UPDATE \"public\".\"TestTable1\" SET \"Name\" = 'Just''in', \"Age\" = 25 WHERE 1 = 1;"); } [Test] public override void CanUpdateDataWithCustomSchema() { var expression = GeneratorTestHelper.GetUpdateDataExpression(); expression.SchemaName = "TestSchema"; var result = _generator.Generate(expression); result.ShouldBe( "UPDATE \"TestSchema\".\"TestTable1\" SET \"Name\" = 'Just''in', \"Age\" = 25 WHERE \"Id\" = 9 AND \"Homepage\" IS NULL;"); } [Test] public override void CanUpdateDataWithDefaultSchema() { var expression = GeneratorTestHelper.GetUpdateDataExpression(); var result = _generator.Generate(expression); result.ShouldBe( "UPDATE \"public\".\"TestTable1\" SET \"Name\" = 'Just''in', \"Age\" = 25 WHERE \"Id\" = 9 AND \"Homepage\" IS NULL;"); } public override void CanUpdateDataWithDbNullCriteria() { var expression = GeneratorTestHelper.GetUpdateDataExpressionWithDbNullValue(); var result = _generator.Generate(expression); result.ShouldBe( "UPDATE \"public\".\"TestTable1\" SET \"Name\" = 'Just''in', \"Age\" = 25 WHERE \"Id\" = 9 AND \"Homepage\" IS NULL;"); } } }
/* * Copyright (c) Contributors, http://aurora-sim.org/, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Aurora-Sim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using Aurora.Framework; using Nini.Config; using OpenMetaverse; using OpenMetaverse.StructuredData; using OpenSim.Region.Framework.Interfaces; using OpenSim.Services.Interfaces; using FriendInfo = OpenSim.Services.Interfaces.FriendInfo; namespace Aurora.Modules.Friends { public class FriendsModule : ISharedRegionModule, IFriendsModule { protected Dictionary<UUID, UserFriendData> m_Friends = new Dictionary<UUID, UserFriendData>(); protected List<IScene> m_Scenes = new List<IScene>(); public bool m_enabled = true; protected bool m_firstStart = true; protected Dictionary<UUID, List<UUID>> m_friendsToInformOfStatusChanges = new Dictionary<UUID, List<UUID>>(); protected IFriendsService FriendsService { get { return m_Scenes[0].RequestModuleInterface<IFriendsService>(); } } protected IGridService GridService { get { if (m_Scenes.Count == 0) return null; return m_Scenes[0].GridService; } } public IUserAccountService UserAccountService { get { return m_Scenes[0].UserAccountService; } } public IAsyncMessagePostService AsyncMessagePostService { get { return m_Scenes[0].RequestModuleInterface<IAsyncMessagePostService>(); } } public ISyncMessagePosterService SyncMessagePosterService { get { return m_Scenes[0].RequestModuleInterface<ISyncMessagePosterService>(); } } public IAsyncMessageRecievedService AsyncMessageRecievedService { get { return m_Scenes[0].RequestModuleInterface<IAsyncMessageRecievedService>(); } } #region IFriendsModule Members public int GetFriendPerms(UUID principalID, UUID friendID) { FriendInfo[] friends = GetFriends(principalID); #if (!ISWIN) foreach (FriendInfo fi in friends) { if (fi.Friend == friendID.ToString()) { return fi.TheirFlags; } } #else foreach (FriendInfo fi in friends.Where(fi => fi.Friend == friendID.ToString())) { return fi.TheirFlags; } #endif return -1; } public void SendFriendsStatusMessage(UUID FriendToInformID, UUID userID, bool online) { // Try local IClientAPI friendClient = LocateClientObject(FriendToInformID); if (friendClient != null) { //MainConsole.Instance.DebugFormat("[FRIENDS]: Local Status Notify {0} that user {1} is {2}", friendID, userID, online); // the friend in this sim as root agent if (online) friendClient.SendAgentOnline(new[] {userID}); else friendClient.SendAgentOffline(new[] {userID}); // we're done return; } lock (m_friendsToInformOfStatusChanges) { if (!m_friendsToInformOfStatusChanges.ContainsKey(FriendToInformID)) m_friendsToInformOfStatusChanges.Add(FriendToInformID, new List<UUID>()); m_friendsToInformOfStatusChanges[FriendToInformID].Add(userID); } // Friend is not online. Ignore. } public FriendInfo[] GetFriends(UUID agentID) { UserFriendData friendsData; lock (m_Friends) { if (m_Friends.TryGetValue(agentID, out friendsData)) return friendsData.Friends; else { UpdateFriendsCache(agentID); if (m_Friends.TryGetValue(agentID, out friendsData)) return friendsData.Friends; } } return new FriendInfo[0]; } #endregion #region ISharedRegionModule Members public void Initialise(IConfigSource config) { } public void PostInitialise() { } public void Close() { } public void AddRegion(IScene scene) { if (!m_enabled) return; m_Scenes.Add(scene); scene.RegisterModuleInterface<IFriendsModule>(this); scene.EventManager.OnNewClient += OnNewClient; scene.EventManager.OnClosingClient += OnClosingClient; scene.EventManager.OnMakeRootAgent += OnMakeRootAgent; } public void RegionLoaded(IScene scene) { if (m_firstStart) AsyncMessageRecievedService.OnMessageReceived += OnMessageReceived; m_firstStart = false; } public void RemoveRegion(IScene scene) { if (!m_enabled) return; m_Scenes.Remove(scene); scene.UnregisterModuleInterface<IFriendsModule>(this); scene.EventManager.OnNewClient -= OnNewClient; scene.EventManager.OnClosingClient -= OnClosingClient; scene.EventManager.OnMakeRootAgent -= OnMakeRootAgent; } public string Name { get { return "FriendsModule"; } } public Type ReplaceableInterface { get { return null; } } #endregion protected OSDMap OnMessageReceived(OSDMap message) { if (!message.ContainsKey("Method")) return null; if (message["Method"] == "FriendGrantRights") { UUID Requester = message["Requester"].AsUUID(); UUID Target = message["Target"].AsUUID(); int MyFlags = message["MyFlags"].AsInteger(); int Rights = message["Rights"].AsInteger(); LocalGrantRights(Requester, Target, MyFlags, Rights); } else if (message["Method"] == "FriendTerminated") { UUID Requester = message["Requester"].AsUUID(); UUID ExFriend = message["ExFriend"].AsUUID(); LocalFriendshipTerminated(ExFriend, Requester); } else if (message["Method"] == "FriendshipOffered") { //UUID Requester = message["Requester"].AsUUID(); UUID Friend = message["Friend"].AsUUID(); GridInstantMessage im = new GridInstantMessage(); im.FromOSD((OSDMap) message["Message"]); LocalFriendshipOffered(Friend, im); } else if (message["Method"] == "FriendshipDenied") { UUID Requester = message["Requester"].AsUUID(); string ClientName = message["ClientName"].AsString(); UUID FriendID = message["FriendID"].AsUUID(); LocalFriendshipDenied(Requester, ClientName, FriendID); } else if (message["Method"] == "FriendshipApproved") { UUID Requester = message["Requester"].AsUUID(); string ClientName = message["ClientName"].AsString(); UUID FriendID = message["FriendID"].AsUUID(); LocalFriendshipApproved(Requester, ClientName, null, FriendID); } return null; } private void OnClosingClient(IClientAPI client) { client.OnInstantMessage -= OnInstantMessage; client.OnApproveFriendRequest -= OnApproveFriendRequest; client.OnDenyFriendRequest -= OnDenyFriendRequest; client.OnTerminateFriendship -= OnTerminateFriendship; client.OnGrantUserRights -= OnGrantUserRights; } private void OnNewClient(IClientAPI client) { client.OnInstantMessage += OnInstantMessage; client.OnApproveFriendRequest += OnApproveFriendRequest; client.OnDenyFriendRequest += OnDenyFriendRequest; client.OnTerminateFriendship += OnTerminateFriendship; client.OnGrantUserRights += OnGrantUserRights; //Only send if they are root! //Util.FireAndForget(delegate(object o) //{ // SendFriendsOnlineIfNeeded(client); //}); } private void OnMakeRootAgent(IScenePresence presence) { //Only send if they are root! Util.FireAndForget(delegate { SendFriendsOnlineIfNeeded(presence.ControllingClient); }); } public void SendFriendsOnlineIfNeeded(IClientAPI client) { UUID agentID = client.AgentId; // Send outstanding friendship offers List<string> outstanding = new List<string>(); FriendInfo[] friends = GetFriends(agentID); foreach (FriendInfo fi in friends) { if (fi.TheirFlags == -1) outstanding.Add(fi.Friend); UUID friendID; string url = "", first = "", last = "", secret = ""; if (HGUtil.ParseUniversalUserIdentifier(fi.Friend, out friendID, out url, out first, out last, out secret)) { IUserManagement userManagement = m_Scenes[0].RequestModuleInterface<IUserManagement>(); if (userManagement != null) userManagement.AddUser(friendID, fi.Friend); } } GridInstantMessage im = new GridInstantMessage(client.Scene, UUID.Zero, String.Empty, agentID, (byte) InstantMessageDialog.FriendshipOffered, "Will you be my friend?", true, Vector3.Zero); foreach (string fid in outstanding) { UUID fromAgentID; string url = "", first = "", last = "", secret = ""; if (!UUID.TryParse(fid, out fromAgentID)) if ( !HGUtil.ParseUniversalUserIdentifier(fid, out fromAgentID, out url, out first, out last, out secret)) continue; UserAccount account = m_Scenes[0].UserAccountService.GetUserAccount(client.Scene.RegionInfo.ScopeID, fromAgentID); im.fromAgentID = fromAgentID; if (account != null) im.fromAgentName = account.Name; else im.fromAgentName = first + " " + last; im.offline = 1; im.imSessionID = im.fromAgentID; // Finally LocalFriendshipOffered(agentID, im); } lock (m_friendsToInformOfStatusChanges) { if (m_friendsToInformOfStatusChanges.ContainsKey(agentID)) { List<UUID> onlineFriends = new List<UUID>(m_friendsToInformOfStatusChanges[agentID]); foreach (UUID friend in onlineFriends) { SendFriendsStatusMessage(agentID, friend, true); } m_friendsToInformOfStatusChanges.Remove(agentID); } } } /// <summary> /// Find the client for a ID /// </summary> public IClientAPI LocateClientObject(UUID agentID) { IScene scene = GetClientScene(agentID); if (scene != null) { IScenePresence presence = scene.GetScenePresence(agentID); if (presence != null) return presence.ControllingClient; } return null; } /// <summary> /// Find the scene for an agent /// </summary> public IScene GetClientScene(UUID agentId) { lock (m_Scenes) { foreach (IScene scene in from scene in m_Scenes let presence = scene.GetScenePresence(agentId) where presence != null && !presence.IsChildAgent select scene) { return scene; } } return null; } private void OnInstantMessage(IClientAPI client, GridInstantMessage im) { if ((InstantMessageDialog) im.dialog == InstantMessageDialog.FriendshipOffered) { // we got a friendship offer UUID principalID = im.fromAgentID; UUID friendID = im.toAgentID; //Can't trust the incoming name for friend offers, so we have to find it ourselves. UserAccount sender = m_Scenes[0].UserAccountService.GetUserAccount(UUID.Zero, principalID); im.fromAgentName = sender.Name; UserAccount reciever = m_Scenes[0].UserAccountService.GetUserAccount(UUID.Zero, friendID); MainConsole.Instance.DebugFormat("[FRIENDS]: {0} offered friendship to {1}", sender.Name, reciever.Name); // This user wants to be friends with the other user. // Let's add the relation backwards, in case the other is not online FriendsService.StoreFriend(friendID, principalID.ToString(), 0); // Now let's ask the other user to be friends with this user ForwardFriendshipOffer(principalID, friendID, im); } } private void ForwardFriendshipOffer(UUID agentID, UUID friendID, GridInstantMessage im) { // !!!!!!!! This is a hack so that we don't have to keep state (transactionID/imSessionID) // We stick this agent's ID as imSession, so that it's directly available on the receiving end im.imSessionID = im.fromAgentID; // Try the local sim UserAccount account = UserAccountService.GetUserAccount(m_Scenes[0].RegionInfo.ScopeID, agentID); im.fromAgentName = (account == null) ? "Unknown" : account.Name; if (LocalFriendshipOffered(friendID, im)) return; // The prospective friend is not here [as root]. Let's forward. SyncMessagePosterService.Post(SyncMessageHelper.FriendshipOffered( agentID, friendID, im, m_Scenes[0].RegionInfo.RegionHandle), m_Scenes[0].RegionInfo.RegionHandle); // If the prospective friend is not online, he'll get the message upon login. } private void OnApproveFriendRequest(IClientAPI client, UUID agentID, UUID friendID, List<UUID> callingCardFolders) { MainConsole.Instance.DebugFormat("[FRIENDS]: {0} accepted friendship from {1}", agentID, friendID); FriendsService.StoreFriend(agentID, friendID.ToString(), 1); FriendsService.StoreFriend(friendID, agentID.ToString(), 1); // Update the local cache UpdateFriendsCache(agentID); // // Notify the friend // // // Send calling card to the local user // ICallingCardModule ccmodule = client.Scene.RequestModuleInterface<ICallingCardModule>(); if (ccmodule != null) { UserAccount account = client.Scene.UserAccountService.GetUserAccount(UUID.Zero, friendID); UUID folderID = client.Scene.InventoryService.GetFolderForType(agentID, InventoryType.Unknown, AssetType.CallingCard) .ID; if (account != null) ccmodule.CreateCallingCard(client, friendID, folderID, account.Name); } // Try Local if (LocalFriendshipApproved(agentID, client.Name, client, friendID)) return; SyncMessagePosterService.Post(SyncMessageHelper.FriendshipApproved( agentID, client.Name, friendID, m_Scenes[0].RegionInfo.RegionHandle), m_Scenes[0].RegionInfo.RegionHandle); } private void OnDenyFriendRequest(IClientAPI client, UUID agentID, UUID friendID, List<UUID> callingCardFolders) { MainConsole.Instance.DebugFormat("[FRIENDS]: {0} denied friendship to {1}", agentID, friendID); FriendsService.Delete(agentID, friendID.ToString()); FriendsService.Delete(friendID, agentID.ToString()); // // Notify the friend // // Try local if (LocalFriendshipDenied(agentID, client.Name, friendID)) return; SyncMessagePosterService.Post(SyncMessageHelper.FriendshipDenied( agentID, client.Name, friendID, m_Scenes[0].RegionInfo.RegionHandle), m_Scenes[0].RegionInfo.RegionHandle); } private void OnTerminateFriendship(IClientAPI client, UUID agentID, UUID exfriendID) { FriendsService.Delete(agentID, exfriendID.ToString()); FriendsService.Delete(exfriendID, agentID.ToString()); // Update local cache UpdateFriendsCache(agentID); client.SendTerminateFriend(exfriendID); // // Notify the friend // // Try local if (LocalFriendshipTerminated(exfriendID, agentID)) return; SyncMessagePosterService.Post(SyncMessageHelper.FriendTerminated( agentID, exfriendID, m_Scenes[0].RegionInfo.RegionHandle), m_Scenes[0].RegionInfo.RegionHandle); } private void OnGrantUserRights(IClientAPI remoteClient, UUID requester, UUID target, int rights) { FriendInfo[] friends = GetFriends(remoteClient.AgentId); if (friends.Length == 0) return; MainConsole.Instance.DebugFormat("[FRIENDS MODULE]: User {0} changing rights to {1} for friend {2}", requester, rights, target); // Let's find the friend in this user's friend list FriendInfo friend = null; #if (!ISWIN) foreach (FriendInfo fi in friends) { if (fi.Friend == target.ToString()) { friend = fi; } } #else foreach (FriendInfo fi in friends.Where(fi => fi.Friend == target.ToString())) { friend = fi; } #endif if (friend != null) // Found it { // Store it on the DB FriendsService.StoreFriend(requester, target.ToString(), rights); // Store it in the local cache int myFlags = friend.MyFlags; friend.MyFlags = rights; // Always send this back to the original client remoteClient.SendChangeUserRights(requester, target, rights); // // Notify the friend // // Try local if (!LocalGrantRights(requester, target, myFlags, rights)) { SyncMessagePosterService.Post(SyncMessageHelper.FriendGrantRights( requester, target, myFlags, rights, m_Scenes[0].RegionInfo.RegionHandle), m_Scenes[0].RegionInfo.RegionHandle); } } } private void UpdateFriendsCache(UUID agentID) { UserFriendData friendsData = new UserFriendData { PrincipalID = agentID, Refcount = 0, Friends = FriendsService.GetFriends(agentID).ToArray() }; lock (m_Friends) { m_Friends[agentID] = friendsData; } } #region Local public bool LocalFriendshipOffered(UUID toID, GridInstantMessage im) { IClientAPI friendClient = LocateClientObject(toID); if (friendClient != null) { // the prospective friend in this sim as root agent friendClient.SendInstantMessage(im); // we're done return true; } return false; } public bool LocalFriendshipApproved(UUID userID, string name, IClientAPI us, UUID friendID) { IClientAPI friendClient = LocateClientObject(friendID); if (friendClient != null) { //They are online, send the online message if (us != null) us.SendAgentOnline(new[] {friendID}); // the prospective friend in this sim as root agent GridInstantMessage im = new GridInstantMessage(m_Scenes[0], userID, name, friendID, (byte) InstantMessageDialog.FriendshipAccepted, userID.ToString(), false, Vector3.Zero); friendClient.SendInstantMessage(im); // Update the local cache UpdateFriendsCache(friendID); // // put a calling card into the inventory of the friend // ICallingCardModule ccmodule = friendClient.Scene.RequestModuleInterface<ICallingCardModule>(); if (ccmodule != null) { UserAccount account = friendClient.Scene.UserAccountService.GetUserAccount(UUID.Zero, userID); UUID folderID = friendClient.Scene.InventoryService.GetFolderForType(friendID, InventoryType.Unknown, AssetType.CallingCard).ID; ccmodule.CreateCallingCard(friendClient, userID, folderID, account.Name); } // we're done return true; } return false; } public bool LocalFriendshipDenied(UUID userID, string userName, UUID friendID) { IClientAPI friendClient = LocateClientObject(friendID); if (friendClient != null) { // the prospective friend in this sim as root agent GridInstantMessage im = new GridInstantMessage(m_Scenes[0], userID, userName, friendID, (byte) InstantMessageDialog.FriendshipDeclined, userID.ToString(), false, Vector3.Zero); friendClient.SendInstantMessage(im); // we're done return true; } return false; } public bool LocalFriendshipTerminated(UUID exfriendID, UUID terminatingUser) { IClientAPI friendClient = LocateClientObject(exfriendID); if (friendClient != null) { // update local cache UpdateFriendsCache(exfriendID); // the friend in this sim as root agent // you do NOT send the friend his uuid... /me sighs... - Revolution friendClient.SendTerminateFriend(terminatingUser); return true; } return false; } public bool LocalGrantRights(UUID userID, UUID friendID, int userFlags, int rights) { IClientAPI friendClient = LocateClientObject(friendID); if (friendClient != null) { bool onlineBitChanged = ((rights ^ userFlags) & (int) FriendRights.CanSeeOnline) != 0; if (onlineBitChanged) { if ((rights & (int) FriendRights.CanSeeOnline) == 1) friendClient.SendAgentOnline(new[] {new UUID(userID)}); else friendClient.SendAgentOffline(new[] {new UUID(userID)}); } else { bool canEditObjectsChanged = ((rights ^ userFlags) & (int) FriendRights.CanModifyObjects) != 0; if (canEditObjectsChanged) friendClient.SendChangeUserRights(userID, friendID, rights); } // Update local cache FriendInfo[] friends = GetFriends(friendID); lock (m_Friends) { #if (!ISWIN) foreach (FriendInfo finfo in friends) { if (finfo.Friend == userID.ToString()) { finfo.TheirFlags = rights; } } #else foreach (FriendInfo finfo in friends.Where(finfo => finfo.Friend == userID.ToString())) { finfo.TheirFlags = rights; } #endif } friends = GetFriends(userID); lock (m_Friends) { #if (!ISWIN) foreach (FriendInfo finfo in friends) { if (finfo.Friend == friendID.ToString()) { finfo.MyFlags = rights; } } #else foreach (FriendInfo finfo in friends.Where(finfo => finfo.Friend == friendID.ToString())) { finfo.MyFlags = rights; } #endif } //Add primFlag updates for all the prims in the sim with the owner, so that the new permissions are set up correctly IScenePresence friendSP = friendClient.Scene.GetScenePresence(friendClient.AgentId); #if (!ISWIN) foreach (ISceneEntity entity in friendClient.Scene.Entities.GetEntities()) { if (entity.OwnerID == userID) { entity.ScheduleGroupUpdateToAvatar(friendSP, PrimUpdateFlags.PrimFlags); } } #else foreach (ISceneEntity entity in friendClient.Scene.Entities.GetEntities().Where(entity => entity.OwnerID == userID)) { entity.ScheduleGroupUpdateToAvatar(friendSP, PrimUpdateFlags.PrimFlags); } #endif return true; } return false; } #endregion #region Nested type: UserFriendData protected class UserFriendData { public FriendInfo[] Friends; public UUID PrincipalID; public int Refcount; public bool IsFriend(string friend) { #if (!ISWIN) foreach (FriendInfo fi in Friends) { if (fi.Friend == friend) return true; } return false; #else return Friends.Any(fi => fi.Friend == friend); #endif } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Data.Common; using System.Diagnostics; using System.Threading; using System.Threading.Tasks; using System.Collections.Concurrent; namespace System.Data.ProviderBase { internal sealed class DbConnectionPool { private enum State { Initializing, Running, ShuttingDown, } private sealed class PendingGetConnection { public PendingGetConnection(long dueTime, DbConnection owner, TaskCompletionSource<DbConnectionInternal> completion, DbConnectionOptions userOptions) { DueTime = dueTime; Owner = owner; Completion = completion; } public long DueTime { get; private set; } public DbConnection Owner { get; private set; } public TaskCompletionSource<DbConnectionInternal> Completion { get; private set; } public DbConnectionOptions UserOptions { get; private set; } } private sealed class PoolWaitHandles { private readonly Semaphore _poolSemaphore; private readonly ManualResetEvent _errorEvent; // Using a Mutex requires ThreadAffinity because SQL CLR can swap // the underlying Win32 thread associated with a managed thread in preemptive mode. // Using an AutoResetEvent does not have that complication. private readonly Semaphore _creationSemaphore; private readonly WaitHandle[] _handlesWithCreate; private readonly WaitHandle[] _handlesWithoutCreate; internal PoolWaitHandles() { _poolSemaphore = new Semaphore(0, MAX_Q_SIZE); _errorEvent = new ManualResetEvent(false); _creationSemaphore = new Semaphore(1, 1); _handlesWithCreate = new WaitHandle[] { _poolSemaphore, _errorEvent, _creationSemaphore }; _handlesWithoutCreate = new WaitHandle[] { _poolSemaphore, _errorEvent }; } internal Semaphore CreationSemaphore { get { return _creationSemaphore; } } internal ManualResetEvent ErrorEvent { get { return _errorEvent; } } internal Semaphore PoolSemaphore { get { return _poolSemaphore; } } internal WaitHandle[] GetHandles(bool withCreate) { return withCreate ? _handlesWithCreate : _handlesWithoutCreate; } } private const int MAX_Q_SIZE = (int)0x00100000; // The order of these is important; we want the WaitAny call to be signaled // for a free object before a creation signal. Only the index first signaled // object is returned from the WaitAny call. private const int SEMAPHORE_HANDLE = (int)0x0; private const int ERROR_HANDLE = (int)0x1; private const int CREATION_HANDLE = (int)0x2; private const int BOGUS_HANDLE = (int)0x3; private const int ERROR_WAIT_DEFAULT = 5 * 1000; // 5 seconds // we do want a testable, repeatable set of generated random numbers private static readonly Random s_random = new Random(5101977); // Value obtained from Dave Driver private readonly int _cleanupWait; private readonly DbConnectionPoolIdentity _identity; private readonly DbConnectionFactory _connectionFactory; private readonly DbConnectionPoolGroup _connectionPoolGroup; private readonly DbConnectionPoolGroupOptions _connectionPoolGroupOptions; private DbConnectionPoolProviderInfo _connectionPoolProviderInfo; private State _state; private readonly ConcurrentStack<DbConnectionInternal> _stackOld = new ConcurrentStack<DbConnectionInternal>(); private readonly ConcurrentStack<DbConnectionInternal> _stackNew = new ConcurrentStack<DbConnectionInternal>(); private readonly ConcurrentQueue<PendingGetConnection> _pendingOpens = new ConcurrentQueue<PendingGetConnection>(); private int _pendingOpensWaiting = 0; private readonly WaitCallback _poolCreateRequest; private int _waitCount; private readonly PoolWaitHandles _waitHandles; private Exception _resError; private volatile bool _errorOccurred; private int _errorWait; private Timer _errorTimer; private Timer _cleanupTimer; private readonly List<DbConnectionInternal> _objectList; private int _totalObjects; // only created by DbConnectionPoolGroup.GetConnectionPool internal DbConnectionPool( DbConnectionFactory connectionFactory, DbConnectionPoolGroup connectionPoolGroup, DbConnectionPoolIdentity identity, DbConnectionPoolProviderInfo connectionPoolProviderInfo) { Debug.Assert(null != connectionPoolGroup, "null connectionPoolGroup"); if ((null != identity) && identity.IsRestricted) { throw ADP.InternalError(ADP.InternalErrorCode.AttemptingToPoolOnRestrictedToken); } _state = State.Initializing; lock (s_random) { // Random.Next is not thread-safe _cleanupWait = s_random.Next(12, 24) * 10 * 1000; // 2-4 minutes in 10 sec intervals } _connectionFactory = connectionFactory; _connectionPoolGroup = connectionPoolGroup; _connectionPoolGroupOptions = connectionPoolGroup.PoolGroupOptions; _connectionPoolProviderInfo = connectionPoolProviderInfo; _identity = identity; _waitHandles = new PoolWaitHandles(); _errorWait = ERROR_WAIT_DEFAULT; _errorTimer = null; // No error yet. _objectList = new List<DbConnectionInternal>(MaxPoolSize); _poolCreateRequest = new WaitCallback(PoolCreateRequest); // used by CleanupCallback _state = State.Running; //_cleanupTimer & QueuePoolCreateRequest is delayed until DbConnectionPoolGroup calls // StartBackgroundCallbacks after pool is actually in the collection } private int CreationTimeout { get { return PoolGroupOptions.CreationTimeout; } } internal int Count { get { return _totalObjects; } } internal DbConnectionFactory ConnectionFactory { get { return _connectionFactory; } } internal bool ErrorOccurred { get { return _errorOccurred; } } internal TimeSpan LoadBalanceTimeout { get { return PoolGroupOptions.LoadBalanceTimeout; } } private bool NeedToReplenish { get { if (State.Running != _state) // Don't allow connection create when not running. return false; int totalObjects = Count; if (totalObjects >= MaxPoolSize) return false; if (totalObjects < MinPoolSize) return true; int freeObjects = (_stackNew.Count + _stackOld.Count); int waitingRequests = _waitCount; bool needToReplenish = (freeObjects < waitingRequests) || ((freeObjects == waitingRequests) && (totalObjects > 1)); return needToReplenish; } } internal DbConnectionPoolIdentity Identity { get { return _identity; } } internal bool IsRunning { get { return State.Running == _state; } } private int MaxPoolSize { get { return PoolGroupOptions.MaxPoolSize; } } private int MinPoolSize { get { return PoolGroupOptions.MinPoolSize; } } internal DbConnectionPoolGroup PoolGroup { get { return _connectionPoolGroup; } } internal DbConnectionPoolGroupOptions PoolGroupOptions { get { return _connectionPoolGroupOptions; } } internal DbConnectionPoolProviderInfo ProviderInfo { get { return _connectionPoolProviderInfo; } } internal bool UseLoadBalancing { get { return PoolGroupOptions.UseLoadBalancing; } } private bool UsingIntegrateSecurity { get { return (null != _identity && DbConnectionPoolIdentity.NoIdentity != _identity); } } private void CleanupCallback(Object state) { // Called when the cleanup-timer ticks over. // This is the automatic pruning method. Every period, we will // perform a two-step process: // // First, for each free object above MinPoolSize, we will obtain a // semaphore representing one object and destroy one from old stack. // We will continue this until we either reach MinPoolSize, we are // unable to obtain a free object, or we have exhausted all the // objects on the old stack. // // Second we move all free objects on the new stack to the old stack. // So, every period the objects on the old stack are destroyed and // the objects on the new stack are pushed to the old stack. All // objects that are currently out and in use are not on either stack. // // With this logic, objects are pruned from the pool if unused for // at least one period but not more than two periods. // Destroy free objects that put us above MinPoolSize from old stack. while (Count > MinPoolSize) { // While above MinPoolSize... if (_waitHandles.PoolSemaphore.WaitOne(0)) { // We obtained a objects from the semaphore. DbConnectionInternal obj; if (_stackOld.TryPop(out obj)) { Debug.Assert(obj != null, "null connection is not expected"); // If we obtained one from the old stack, destroy it. DestroyObject(obj); } else { // Else we exhausted the old stack (the object the // semaphore represents is on the new stack), so break. _waitHandles.PoolSemaphore.Release(1); break; } } else { break; } } // Push to the old-stack. For each free object, move object from // new stack to old stack. if (_waitHandles.PoolSemaphore.WaitOne(0)) { for (;;) { DbConnectionInternal obj; if (!_stackNew.TryPop(out obj)) break; Debug.Assert(obj != null, "null connection is not expected"); Debug.Assert(!obj.IsEmancipated, "pooled object not in pool"); Debug.Assert(obj.CanBePooled, "pooled object is not poolable"); _stackOld.Push(obj); } _waitHandles.PoolSemaphore.Release(1); } // Queue up a request to bring us up to MinPoolSize QueuePoolCreateRequest(); } internal void Clear() { DbConnectionInternal obj; // First, quickly doom everything. lock (_objectList) { int count = _objectList.Count; for (int i = 0; i < count; ++i) { obj = _objectList[i]; if (null != obj) { obj.DoNotPoolThisConnection(); } } } // Second, dispose of all the free connections. while (_stackNew.TryPop(out obj)) { Debug.Assert(obj != null, "null connection is not expected"); DestroyObject(obj); } while (_stackOld.TryPop(out obj)) { Debug.Assert(obj != null, "null connection is not expected"); DestroyObject(obj); } // Finally, reclaim everything that's emancipated (which, because // it's been doomed, will cause it to be disposed of as well) ReclaimEmancipatedObjects(); } private Timer CreateCleanupTimer() => ADP.UnsafeCreateTimer( new TimerCallback(CleanupCallback), null, _cleanupWait, _cleanupWait); private DbConnectionInternal CreateObject(DbConnection owningObject, DbConnectionOptions userOptions, DbConnectionInternal oldConnection) { DbConnectionInternal newObj = null; try { newObj = _connectionFactory.CreatePooledConnection(this, owningObject, _connectionPoolGroup.ConnectionOptions, _connectionPoolGroup.PoolKey, userOptions); if (null == newObj) { throw ADP.InternalError(ADP.InternalErrorCode.CreateObjectReturnedNull); // CreateObject succeeded, but null object } if (!newObj.CanBePooled) { throw ADP.InternalError(ADP.InternalErrorCode.NewObjectCannotBePooled); // CreateObject succeeded, but non-poolable object } newObj.PrePush(null); lock (_objectList) { if ((oldConnection != null) && (oldConnection.Pool == this)) { _objectList.Remove(oldConnection); } _objectList.Add(newObj); _totalObjects = _objectList.Count; } // If the old connection belonged to another pool, we need to remove it from that if (oldConnection != null) { var oldConnectionPool = oldConnection.Pool; if (oldConnectionPool != null && oldConnectionPool != this) { Debug.Assert(oldConnectionPool._state == State.ShuttingDown, "Old connections pool should be shutting down"); lock (oldConnectionPool._objectList) { oldConnectionPool._objectList.Remove(oldConnection); oldConnectionPool._totalObjects = oldConnectionPool._objectList.Count; } } } // Reset the error wait: _errorWait = ERROR_WAIT_DEFAULT; } catch (Exception e) { if (!ADP.IsCatchableExceptionType(e)) { throw; } newObj = null; // set to null, so we do not return bad new object // Failed to create instance _resError = e; // Make sure the timer starts even if ThreadAbort occurs after setting the ErrorEvent. // timer allocation has to be done out of CER block Timer t = new Timer(new TimerCallback(this.ErrorCallback), null, Timeout.Infinite, Timeout.Infinite); bool timerIsNotDisposed; try { } finally { _waitHandles.ErrorEvent.Set(); _errorOccurred = true; // Enable the timer. // Note that the timer is created to allow periodic invocation. If ThreadAbort occurs in the middle of ErrorCallback, // the timer will restart. Otherwise, the timer callback (ErrorCallback) destroys the timer after resetting the error to avoid second callback. _errorTimer = t; timerIsNotDisposed = t.Change(_errorWait, _errorWait); } Debug.Assert(timerIsNotDisposed, "ErrorCallback timer has been disposed"); if (30000 < _errorWait) { _errorWait = 60000; } else { _errorWait *= 2; } throw; } return newObj; } private void DeactivateObject(DbConnectionInternal obj) { obj.DeactivateConnection(); bool returnToGeneralPool = false; bool destroyObject = false; if (obj.IsConnectionDoomed) { // the object is not fit for reuse -- just dispose of it. destroyObject = true; } else { // NOTE: constructor should ensure that current state cannot be State.Initializing, so it can only // be State.Running or State.ShuttingDown Debug.Assert(_state == State.Running || _state == State.ShuttingDown); lock (obj) { // A connection with a delegated transaction cannot currently // be returned to a different customer until the transaction // actually completes, so we send it into Stasis -- the SysTx // transaction object will ensure that it is owned (not lost), // and it will be certain to put it back into the pool. if (_state == State.ShuttingDown) { // connection is being closed and the pool has been marked as shutting // down, so destroy this object. destroyObject = true; } else { if (obj.CanBePooled) { // We must put this connection into the transacted pool // while inside a lock to prevent a race condition with // the transaction asynchronously completing on a second // thread. // return to general pool returnToGeneralPool = true; } else { // object is not fit for reuse -- just dispose of it destroyObject = true; } } } } if (returnToGeneralPool) { // Only push the connection into the general pool if we didn't // already push it onto the transacted pool, put it into stasis, // or want to destroy it. Debug.Assert(destroyObject == false); PutNewObject(obj); } else if (destroyObject) { DestroyObject(obj); QueuePoolCreateRequest(); } //------------------------------------------------------------------------------------- // postcondition // ensure that the connection was processed Debug.Assert( returnToGeneralPool == true || destroyObject == true); } internal void DestroyObject(DbConnectionInternal obj) { // A connection with a delegated transaction cannot be disposed of // until the delegated transaction has actually completed. Instead, // we simply leave it alone; when the transaction completes, it will // come back through PutObjectFromTransactedPool, which will call us // again. bool removed = false; lock (_objectList) { removed = _objectList.Remove(obj); Debug.Assert(removed, "attempt to DestroyObject not in list"); _totalObjects = _objectList.Count; } if (removed) { } obj.Dispose(); } private void ErrorCallback(Object state) { _errorOccurred = false; _waitHandles.ErrorEvent.Reset(); // the error state is cleaned, destroy the timer to avoid periodic invocation Timer t = _errorTimer; _errorTimer = null; if (t != null) { t.Dispose(); // Cancel timer request. } } // TODO: move this to src/Common and integrate with SqlClient // Note: Odbc connections are not passing through this code private Exception TryCloneCachedException() { return _resError; } private void WaitForPendingOpen() { PendingGetConnection next; do { bool started = false; try { try { } finally { started = Interlocked.CompareExchange(ref _pendingOpensWaiting, 1, 0) == 0; } if (!started) { return; } while (_pendingOpens.TryDequeue(out next)) { if (next.Completion.Task.IsCompleted) { continue; } uint delay; if (next.DueTime == Timeout.Infinite) { delay = unchecked((uint)Timeout.Infinite); } else { delay = (uint)Math.Max(ADP.TimerRemainingMilliseconds(next.DueTime), 0); } DbConnectionInternal connection = null; bool timeout = false; Exception caughtException = null; try { bool allowCreate = true; bool onlyOneCheckConnection = false; timeout = !TryGetConnection(next.Owner, delay, allowCreate, onlyOneCheckConnection, next.UserOptions, out connection); } catch (Exception e) { caughtException = e; } if (caughtException != null) { next.Completion.TrySetException(caughtException); } else if (timeout) { next.Completion.TrySetException(ADP.ExceptionWithStackTrace(ADP.PooledOpenTimeout())); } else { Debug.Assert(connection != null, "connection should never be null in success case"); if (!next.Completion.TrySetResult(connection)) { // if the completion was cancelled, lets try and get this connection back for the next try PutObject(connection, next.Owner); } } } } finally { if (started) { Interlocked.Exchange(ref _pendingOpensWaiting, 0); } } } while (_pendingOpens.TryPeek(out next)); } internal bool TryGetConnection(DbConnection owningObject, TaskCompletionSource<DbConnectionInternal> retry, DbConnectionOptions userOptions, out DbConnectionInternal connection) { uint waitForMultipleObjectsTimeout = 0; bool allowCreate = false; if (retry == null) { waitForMultipleObjectsTimeout = (uint)CreationTimeout; // Set the wait timeout to INFINITE (-1) if the SQL connection timeout is 0 (== infinite) if (waitForMultipleObjectsTimeout == 0) waitForMultipleObjectsTimeout = unchecked((uint)Timeout.Infinite); allowCreate = true; } if (_state != State.Running) { connection = null; return true; } bool onlyOneCheckConnection = true; if (TryGetConnection(owningObject, waitForMultipleObjectsTimeout, allowCreate, onlyOneCheckConnection, userOptions, out connection)) { return true; } else if (retry == null) { // timed out on a sync call return true; } var pendingGetConnection = new PendingGetConnection( CreationTimeout == 0 ? Timeout.Infinite : ADP.TimerCurrent() + ADP.TimerFromSeconds(CreationTimeout / 1000), owningObject, retry, userOptions); _pendingOpens.Enqueue(pendingGetConnection); // it is better to StartNew too many times than not enough if (_pendingOpensWaiting == 0) { Thread waitOpenThread = new Thread(WaitForPendingOpen); waitOpenThread.IsBackground = true; waitOpenThread.Start(); } connection = null; return false; } private bool TryGetConnection(DbConnection owningObject, uint waitForMultipleObjectsTimeout, bool allowCreate, bool onlyOneCheckConnection, DbConnectionOptions userOptions, out DbConnectionInternal connection) { DbConnectionInternal obj = null; if (null == obj) { Interlocked.Increment(ref _waitCount); do { int waitResult = BOGUS_HANDLE; try { try { } finally { waitResult = WaitHandle.WaitAny(_waitHandles.GetHandles(allowCreate), unchecked((int)waitForMultipleObjectsTimeout)); } // From the WaitAny docs: "If more than one object became signaled during // the call, this is the array index of the signaled object with the // smallest index value of all the signaled objects." This is important // so that the free object signal will be returned before a creation // signal. switch (waitResult) { case WaitHandle.WaitTimeout: Interlocked.Decrement(ref _waitCount); connection = null; return false; case ERROR_HANDLE: // Throw the error that PoolCreateRequest stashed. Interlocked.Decrement(ref _waitCount); throw TryCloneCachedException(); case CREATION_HANDLE: try { obj = UserCreateRequest(owningObject, userOptions); } catch { if (null == obj) { Interlocked.Decrement(ref _waitCount); } throw; } finally { // Ensure that we release this waiter, regardless // of any exceptions that may be thrown. if (null != obj) { Interlocked.Decrement(ref _waitCount); } } if (null == obj) { // If we were not able to create an object, check to see if // we reached MaxPoolSize. If so, we will no longer wait on // the CreationHandle, but instead wait for a free object or // the timeout. if (Count >= MaxPoolSize && 0 != MaxPoolSize) { if (!ReclaimEmancipatedObjects()) { // modify handle array not to wait on creation mutex anymore Debug.Assert(2 == CREATION_HANDLE, "creation handle changed value"); allowCreate = false; } } } break; case SEMAPHORE_HANDLE: // // guaranteed available inventory // Interlocked.Decrement(ref _waitCount); obj = GetFromGeneralPool(); if ((obj != null) && (!obj.IsConnectionAlive())) { DestroyObject(obj); obj = null; // Setting to null in case creating a new object fails if (onlyOneCheckConnection) { if (_waitHandles.CreationSemaphore.WaitOne(unchecked((int)waitForMultipleObjectsTimeout))) { try { obj = UserCreateRequest(owningObject, userOptions); } finally { _waitHandles.CreationSemaphore.Release(1); } } else { // Timeout waiting for creation semaphore - return null connection = null; return false; } } } break; default: Interlocked.Decrement(ref _waitCount); throw ADP.InternalError(ADP.InternalErrorCode.UnexpectedWaitAnyResult); } } finally { if (CREATION_HANDLE == waitResult) { _waitHandles.CreationSemaphore.Release(1); } } } while (null == obj); } if (null != obj) { PrepareConnection(owningObject, obj); } connection = obj; return true; } private void PrepareConnection(DbConnection owningObject, DbConnectionInternal obj) { lock (obj) { // Protect against Clear and ReclaimEmancipatedObjects, which call IsEmancipated, which is affected by PrePush and PostPop obj.PostPop(owningObject); } try { obj.ActivateConnection(); } catch { // if Activate throws an exception // put it back in the pool or have it properly disposed of this.PutObject(obj, owningObject); throw; } } /// <summary> /// Creates a new connection to replace an existing connection /// </summary> /// <param name="owningObject">Outer connection that currently owns <paramref name="oldConnection"/></param> /// <param name="userOptions">Options used to create the new connection</param> /// <param name="oldConnection">Inner connection that will be replaced</param> /// <returns>A new inner connection that is attached to the <paramref name="owningObject"/></returns> internal DbConnectionInternal ReplaceConnection(DbConnection owningObject, DbConnectionOptions userOptions, DbConnectionInternal oldConnection) { DbConnectionInternal newConnection = UserCreateRequest(owningObject, userOptions, oldConnection); if (newConnection != null) { PrepareConnection(owningObject, newConnection); oldConnection.PrepareForReplaceConnection(); oldConnection.DeactivateConnection(); oldConnection.Dispose(); } return newConnection; } private DbConnectionInternal GetFromGeneralPool() { DbConnectionInternal obj = null; if (!_stackNew.TryPop(out obj)) { if (!_stackOld.TryPop(out obj)) { obj = null; } else { Debug.Assert(obj != null, "null connection is not expected"); } } else { Debug.Assert(obj != null, "null connection is not expected"); } // When another thread is clearing this pool, // it will remove all connections in this pool which causes the // following assert to fire, which really mucks up stress against // checked bits. if (null != obj) { } return (obj); } private void PoolCreateRequest(object state) { // called by pooler to ensure pool requests are currently being satisfied - // creation mutex has not been obtained if (State.Running == _state) { // in case WaitForPendingOpen ever failed with no subsequent OpenAsync calls, // start it back up again if (!_pendingOpens.IsEmpty && _pendingOpensWaiting == 0) { Thread waitOpenThread = new Thread(WaitForPendingOpen); waitOpenThread.IsBackground = true; waitOpenThread.Start(); } // Before creating any new objects, reclaim any released objects that were // not closed. ReclaimEmancipatedObjects(); if (!ErrorOccurred) { if (NeedToReplenish) { // Check to see if pool was created using integrated security and if so, make // sure the identity of current user matches that of user that created pool. // If it doesn't match, do not create any objects on the ThreadPool thread, // since either Open will fail or we will open a object for this pool that does // not belong in this pool. The side effect of this is that if using integrated // security min pool size cannot be guaranteed. if (UsingIntegrateSecurity && !_identity.Equals(DbConnectionPoolIdentity.GetCurrent())) { return; } int waitResult = BOGUS_HANDLE; try { try { } finally { waitResult = WaitHandle.WaitAny(_waitHandles.GetHandles(withCreate: true), CreationTimeout); } if (CREATION_HANDLE == waitResult) { DbConnectionInternal newObj; // Check ErrorOccurred again after obtaining mutex if (!ErrorOccurred) { while (NeedToReplenish) { // Don't specify any user options because there is no outer connection associated with the new connection newObj = CreateObject(owningObject: null, userOptions: null, oldConnection: null); // We do not need to check error flag here, since we know if // CreateObject returned null, we are in error case. if (null != newObj) { PutNewObject(newObj); } else { break; } } } } else if (WaitHandle.WaitTimeout == waitResult) { // do not wait forever and potential block this worker thread // instead wait for a period of time and just requeue to try again QueuePoolCreateRequest(); } } finally { if (CREATION_HANDLE == waitResult) { // reuse waitResult and ignore its value _waitHandles.CreationSemaphore.Release(1); } } } } } } internal void PutNewObject(DbConnectionInternal obj) { Debug.Assert(null != obj, "why are we adding a null object to the pool?"); // Debug.Assert(obj.CanBePooled, "non-poolable object in pool"); _stackNew.Push(obj); _waitHandles.PoolSemaphore.Release(1); } internal void PutObject(DbConnectionInternal obj, object owningObject) { Debug.Assert(null != obj, "null obj?"); // Once a connection is closing (which is the state that we're in at // this point in time) you cannot delegate a transaction to or enlist // a transaction in it, so we can correctly presume that if there was // not a delegated or enlisted transaction to start with, that there // will not be a delegated or enlisted transaction once we leave the // lock. lock (obj) { // Calling PrePush prevents the object from being reclaimed // once we leave the lock, because it sets _pooledCount such // that it won't appear to be out of the pool. What that // means, is that we're now responsible for this connection: // it won't get reclaimed if we drop the ball somewhere. obj.PrePush(owningObject); } DeactivateObject(obj); } private void QueuePoolCreateRequest() { if (State.Running == _state) { // Make sure we're at quota by posting a callback to the threadpool. ThreadPool.QueueUserWorkItem(_poolCreateRequest); } } private bool ReclaimEmancipatedObjects() { bool emancipatedObjectFound = false; List<DbConnectionInternal> reclaimedObjects = new List<DbConnectionInternal>(); int count; lock (_objectList) { count = _objectList.Count; for (int i = 0; i < count; ++i) { DbConnectionInternal obj = _objectList[i]; if (null != obj) { bool locked = false; try { Monitor.TryEnter(obj, ref locked); if (locked) { // avoid race condition with PrePush/PostPop and IsEmancipated if (obj.IsEmancipated) { // Inside the lock, we want to do as little // as possible, so we simply mark the object // as being in the pool, but hand it off to // an out of pool list to be deactivated, // etc. obj.PrePush(null); reclaimedObjects.Add(obj); } } } finally { if (locked) Monitor.Exit(obj); } } } } // NOTE: we don't want to call DeactivateObject while we're locked, // because it can make roundtrips to the server and this will block // object creation in the pooler. Instead, we queue things we need // to do up, and process them outside the lock. count = reclaimedObjects.Count; for (int i = 0; i < count; ++i) { DbConnectionInternal obj = reclaimedObjects[i]; emancipatedObjectFound = true; DeactivateObject(obj); } return emancipatedObjectFound; } internal void Startup() { _cleanupTimer = CreateCleanupTimer(); if (NeedToReplenish) { QueuePoolCreateRequest(); } } internal void Shutdown() { _state = State.ShuttingDown; // deactivate timer callbacks Timer t = _cleanupTimer; _cleanupTimer = null; if (null != t) { t.Dispose(); } } private DbConnectionInternal UserCreateRequest(DbConnection owningObject, DbConnectionOptions userOptions, DbConnectionInternal oldConnection = null) { // called by user when they were not able to obtain a free object but // instead obtained creation mutex DbConnectionInternal obj = null; if (ErrorOccurred) { throw TryCloneCachedException(); } else { if ((oldConnection != null) || (Count < MaxPoolSize) || (0 == MaxPoolSize)) { // If we have an odd number of total objects, reclaim any dead objects. // If we did not find any objects to reclaim, create a new one. if ((oldConnection != null) || (Count & 0x1) == 0x1 || !ReclaimEmancipatedObjects()) obj = CreateObject(owningObject, userOptions, oldConnection); } return obj; } } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.IO; using System.Linq; using System.Net.Http; using System.Net.Http.Formatting; using System.Net.Http.Headers; using System.Web.Http.Description; using System.Xml.Linq; using Newtonsoft.Json; namespace SiaqodbCloudService.Areas.HelpPage { /// <summary> /// This class will generate the samples for the help page. /// </summary> public class HelpPageSampleGenerator { /// <summary> /// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class. /// </summary> public HelpPageSampleGenerator() { ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>(); ActionSamples = new Dictionary<HelpPageSampleKey, object>(); SampleObjects = new Dictionary<Type, object>(); SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>> { DefaultSampleObjectFactory, }; } /// <summary> /// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>. /// </summary> public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; } /// <summary> /// Gets the objects that are used directly as samples for certain actions. /// </summary> public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; } /// <summary> /// Gets the objects that are serialized as samples by the supported formatters. /// </summary> public IDictionary<Type, object> SampleObjects { get; internal set; } /// <summary> /// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order, /// stopping when the factory successfully returns a non-<see langref="null"/> object. /// </summary> /// <remarks> /// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use /// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and /// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks> [SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures", Justification = "This is an appropriate nesting of generic types")] public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; } /// <summary> /// Gets the request body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api) { return GetSample(api, SampleDirection.Request); } /// <summary> /// Gets the response body samples for a given <see cref="ApiDescription"/>. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The samples keyed by media type.</returns> public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api) { return GetSample(api, SampleDirection.Response); } /// <summary> /// Gets the request or response body samples. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The samples keyed by media type.</returns> public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection) { if (api == null) { throw new ArgumentNullException("api"); } string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters); var samples = new Dictionary<MediaTypeHeaderValue, object>(); // Use the samples provided directly for actions var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection); foreach (var actionSample in actionSamples) { samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value)); } // Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage. // Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters. if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type)) { object sampleObject = GetSampleObject(type); foreach (var formatter in formatters) { foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes) { if (!samples.ContainsKey(mediaType)) { object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection); // If no sample found, try generate sample using formatter and sample object if (sample == null && sampleObject != null) { sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType); } samples.Add(mediaType, WrapSampleIfString(sample)); } } } } return samples; } /// <summary> /// Search for samples that are provided directly through <see cref="ActionSamples"/>. /// </summary> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="type">The CLR type.</param> /// <param name="formatter">The formatter.</param> /// <param name="mediaType">The media type.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param> /// <returns>The sample that matches the parameters.</returns> public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection) { object sample; // First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames. // If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames. // If still not found, try to get the sample provided for the specified mediaType and type. // Finally, try to get the sample provided for the specified mediaType. if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) || ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample)) { return sample; } return null; } /// <summary> /// Gets the sample object that will be serialized by the formatters. /// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create /// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other /// factories in <see cref="SampleObjectFactories"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>The sample object.</returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")] public virtual object GetSampleObject(Type type) { object sampleObject; if (!SampleObjects.TryGetValue(type, out sampleObject)) { // No specific object available, try our factories. foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories) { if (factory == null) { continue; } try { sampleObject = factory(this, type); if (sampleObject != null) { break; } } catch { // Ignore any problems encountered in the factory; go on to the next one (if any). } } } return sampleObject; } /// <summary> /// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <returns>The type.</returns> public virtual Type ResolveHttpRequestMessageType(ApiDescription api) { string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName; string actionName = api.ActionDescriptor.ActionName; IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name); Collection<MediaTypeFormatter> formatters; return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters); } /// <summary> /// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used. /// </summary> /// <param name="api">The <see cref="ApiDescription"/>.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> /// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param> /// <param name="formatters">The formatters.</param> [SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")] public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters) { if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection)) { throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection)); } if (api == null) { throw new ArgumentNullException("api"); } Type type; if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) || ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type)) { // Re-compute the supported formatters based on type Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>(); foreach (var formatter in api.ActionDescriptor.Configuration.Formatters) { if (IsFormatSupported(sampleDirection, formatter, type)) { newFormatters.Add(formatter); } } formatters = newFormatters; } else { switch (sampleDirection) { case SampleDirection.Request: ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody); type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType; formatters = api.SupportedRequestBodyFormatters; break; case SampleDirection.Response: default: type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType; formatters = api.SupportedResponseFormatters; break; } } return type; } /// <summary> /// Writes the sample object using formatter. /// </summary> /// <param name="formatter">The formatter.</param> /// <param name="value">The value.</param> /// <param name="type">The type.</param> /// <param name="mediaType">Type of the media.</param> /// <returns></returns> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")] public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType) { if (formatter == null) { throw new ArgumentNullException("formatter"); } if (mediaType == null) { throw new ArgumentNullException("mediaType"); } object sample = String.Empty; MemoryStream ms = null; HttpContent content = null; try { if (formatter.CanWriteType(type)) { ms = new MemoryStream(); content = new ObjectContent(type, value, formatter, mediaType); formatter.WriteToStreamAsync(type, value, ms, content, null).Wait(); ms.Position = 0; StreamReader reader = new StreamReader(ms); string serializedSampleString = reader.ReadToEnd(); if (mediaType.MediaType.ToUpperInvariant().Contains("XML")) { serializedSampleString = TryFormatXml(serializedSampleString); } else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON")) { serializedSampleString = TryFormatJson(serializedSampleString); } sample = new TextSample(serializedSampleString); } else { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.", mediaType, formatter.GetType().Name, type.Name)); } } catch (Exception e) { sample = new InvalidSample(String.Format( CultureInfo.CurrentCulture, "An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}", formatter.GetType().Name, mediaType.MediaType, UnwrapException(e).Message)); } finally { if (ms != null) { ms.Dispose(); } if (content != null) { content.Dispose(); } } return sample; } internal static Exception UnwrapException(Exception exception) { AggregateException aggregateException = exception as AggregateException; if (aggregateException != null) { return aggregateException.Flatten().InnerException; } return exception; } // Default factory for sample objects private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type) { // Try to create a default sample object ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatJson(string str) { try { object parsedJson = JsonConvert.DeserializeObject(str); return JsonConvert.SerializeObject(parsedJson, Formatting.Indented); } catch { // can't parse JSON, return the original string return str; } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")] private static string TryFormatXml(string str) { try { XDocument xml = XDocument.Parse(str); return xml.ToString(); } catch { // can't parse XML, return the original string return str; } } private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type) { switch (sampleDirection) { case SampleDirection.Request: return formatter.CanReadType(type); case SampleDirection.Response: return formatter.CanWriteType(type); } return false; } private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection) { HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase); foreach (var sample in ActionSamples) { HelpPageSampleKey sampleKey = sample.Key; if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) && String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) && (sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) && sampleDirection == sampleKey.SampleDirection) { yield return sample; } } } private static object WrapSampleIfString(object sample) { string stringSample = sample as string; if (stringSample != null) { return new TextSample(stringSample); } return sample; } } }
#region Copyright notice and license // Copyright 2015 gRPC authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #endregion using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Grpc.Core.Internal; using Grpc.Core.Logging; using Grpc.Core.Utils; namespace Grpc.Core { /// <summary> /// Represents a gRPC channel. Channels are an abstraction of long-lived connections to remote servers. /// More client objects can reuse the same channel. Creating a channel is an expensive operation compared to invoking /// a remote call so in general you should reuse a single channel for as many calls as possible. /// </summary> public class Channel { static readonly ILogger Logger = GrpcEnvironment.Logger.ForType<Channel>(); readonly object myLock = new object(); readonly AtomicCounter activeCallCounter = new AtomicCounter(); readonly CancellationTokenSource shutdownTokenSource = new CancellationTokenSource(); readonly string target; readonly GrpcEnvironment environment; readonly CompletionQueueSafeHandle completionQueue; readonly ChannelSafeHandle handle; readonly Dictionary<string, ChannelOption> options; readonly Task connectivityWatcherTask; bool shutdownRequested; /// <summary> /// Creates a channel that connects to a specific host. /// Port will default to 80 for an unsecure channel and to 443 for a secure channel. /// </summary> /// <param name="target">Target of the channel.</param> /// <param name="credentials">Credentials to secure the channel.</param> public Channel(string target, ChannelCredentials credentials) : this(target, credentials, null) { } /// <summary> /// Creates a channel that connects to a specific host. /// Port will default to 80 for an unsecure channel and to 443 for a secure channel. /// </summary> /// <param name="target">Target of the channel.</param> /// <param name="credentials">Credentials to secure the channel.</param> /// <param name="options">Channel options.</param> public Channel(string target, ChannelCredentials credentials, IEnumerable<ChannelOption> options) { this.target = GrpcPreconditions.CheckNotNull(target, "target"); this.options = CreateOptionsDictionary(options); EnsureUserAgentChannelOption(this.options); this.environment = GrpcEnvironment.AddRef(); this.completionQueue = this.environment.PickCompletionQueue(); using (var nativeCredentials = credentials.ToNativeCredentials()) using (var nativeChannelArgs = ChannelOptions.CreateChannelArgs(this.options.Values)) { if (nativeCredentials != null) { this.handle = ChannelSafeHandle.CreateSecure(nativeCredentials, target, nativeChannelArgs); } else { this.handle = ChannelSafeHandle.CreateInsecure(target, nativeChannelArgs); } } // TODO(jtattermusch): Workaround for https://github.com/GoogleCloudPlatform/google-cloud-dotnet/issues/822. // Remove once retries are supported in C core this.connectivityWatcherTask = RunConnectivityWatcherAsync(); GrpcEnvironment.RegisterChannel(this); } /// <summary> /// Creates a channel that connects to a specific host and port. /// </summary> /// <param name="host">The name or IP address of the host.</param> /// <param name="port">The port.</param> /// <param name="credentials">Credentials to secure the channel.</param> public Channel(string host, int port, ChannelCredentials credentials) : this(host, port, credentials, null) { } /// <summary> /// Creates a channel that connects to a specific host and port. /// </summary> /// <param name="host">The name or IP address of the host.</param> /// <param name="port">The port.</param> /// <param name="credentials">Credentials to secure the channel.</param> /// <param name="options">Channel options.</param> public Channel(string host, int port, ChannelCredentials credentials, IEnumerable<ChannelOption> options) : this(string.Format("{0}:{1}", host, port), credentials, options) { } /// <summary> /// Gets current connectivity state of this channel. /// After channel is has been shutdown, <c>ChannelState.Shutdown</c> will be returned. /// </summary> public ChannelState State { get { return GetConnectivityState(false); } } // cached handler for watch connectivity state static readonly BatchCompletionDelegate WatchConnectivityStateHandler = (success, ctx, state) => { var tcs = (TaskCompletionSource<bool>) state; tcs.SetResult(success); }; /// <summary> /// Returned tasks completes once channel state has become different from /// given lastObservedState. /// If deadline is reached or and error occurs, returned task is cancelled. /// </summary> public async Task WaitForStateChangedAsync(ChannelState lastObservedState, DateTime? deadline = null) { var result = await WaitForStateChangedInternalAsync(lastObservedState, deadline).ConfigureAwait(false); if (!result) { throw new TaskCanceledException("Reached deadline."); } } /// <summary> /// Returned tasks completes once channel state has become different from /// given lastObservedState (<c>true</c> is returned) or if the wait has timed out (<c>false</c> is returned). /// </summary> internal Task<bool> WaitForStateChangedInternalAsync(ChannelState lastObservedState, DateTime? deadline = null) { GrpcPreconditions.CheckArgument(lastObservedState != ChannelState.Shutdown, "Shutdown is a terminal state. No further state changes can occur."); var tcs = new TaskCompletionSource<bool>(); var deadlineTimespec = deadline.HasValue ? Timespec.FromDateTime(deadline.Value) : Timespec.InfFuture; lock (myLock) { if (handle.IsClosed) { // If channel has been already shutdown and handle was disposed, we would end up with // an abandoned completion added to the completion registry. Instead, we make sure we fail early. throw new ObjectDisposedException(nameof(handle), "Channel handle has already been disposed."); } else { // pass "tcs" as "state" for WatchConnectivityStateHandler. handle.WatchConnectivityState(lastObservedState, deadlineTimespec, completionQueue, WatchConnectivityStateHandler, tcs); } } return tcs.Task; } /// <summary>Resolved address of the remote endpoint in URI format.</summary> public string ResolvedTarget { get { return handle.GetTarget(); } } /// <summary>The original target used to create the channel.</summary> public string Target { get { return this.target; } } /// <summary> /// Returns a token that gets cancelled once <c>ShutdownAsync</c> is invoked. /// </summary> public CancellationToken ShutdownToken { get { return this.shutdownTokenSource.Token; } } /// <summary> /// Allows explicitly requesting channel to connect without starting an RPC. /// Returned task completes once state Ready was seen. If the deadline is reached, /// or channel enters the Shutdown state, the task is cancelled. /// There is no need to call this explicitly unless your use case requires that. /// Starting an RPC on a new channel will request connection implicitly. /// </summary> /// <param name="deadline">The deadline. <c>null</c> indicates no deadline.</param> public async Task ConnectAsync(DateTime? deadline = null) { var currentState = GetConnectivityState(true); while (currentState != ChannelState.Ready) { if (currentState == ChannelState.Shutdown) { throw new OperationCanceledException("Channel has reached Shutdown state."); } await WaitForStateChangedAsync(currentState, deadline).ConfigureAwait(false); currentState = GetConnectivityState(false); } } /// <summary> /// Shuts down the channel cleanly. It is strongly recommended to shutdown /// all previously created channels before exiting from the process. /// </summary> /// <remarks> /// This method doesn't wait for all calls on this channel to finish (nor does /// it explicitly cancel all outstanding calls). It is user's responsibility to make sure /// all the calls on this channel have finished (successfully or with an error) /// before shutting down the channel to ensure channel shutdown won't impact /// the outcome of those remote calls. /// </remarks> public async Task ShutdownAsync() { lock (myLock) { GrpcPreconditions.CheckState(!shutdownRequested); shutdownRequested = true; } GrpcEnvironment.UnregisterChannel(this); shutdownTokenSource.Cancel(); var activeCallCount = activeCallCounter.Count; if (activeCallCount > 0) { Logger.Warning("Channel shutdown was called but there are still {0} active calls for that channel.", activeCallCount); } lock (myLock) { handle.Dispose(); } await Task.WhenAll(GrpcEnvironment.ReleaseAsync(), connectivityWatcherTask).ConfigureAwait(false); } internal ChannelSafeHandle Handle { get { return this.handle; } } internal GrpcEnvironment Environment { get { return this.environment; } } internal CompletionQueueSafeHandle CompletionQueue { get { return this.completionQueue; } } internal void AddCallReference(object call) { activeCallCounter.Increment(); bool success = false; handle.DangerousAddRef(ref success); GrpcPreconditions.CheckState(success); } internal void RemoveCallReference(object call) { handle.DangerousRelease(); activeCallCounter.Decrement(); } private ChannelState GetConnectivityState(bool tryToConnect) { try { lock (myLock) { return handle.CheckConnectivityState(tryToConnect); } } catch (ObjectDisposedException) { return ChannelState.Shutdown; } } /// <summary> /// Constantly Watches channel connectivity status to work around https://github.com/GoogleCloudPlatform/google-cloud-dotnet/issues/822 /// </summary> private async Task RunConnectivityWatcherAsync() { try { var lastState = State; while (lastState != ChannelState.Shutdown) { lock (myLock) { if (shutdownRequested) { break; } } // ignore the result await WaitForStateChangedInternalAsync(lastState, DateTime.UtcNow.AddSeconds(1)).ConfigureAwait(false); lastState = State; } } catch (ObjectDisposedException) { // during shutdown, channel is going to be disposed. } } private static void EnsureUserAgentChannelOption(Dictionary<string, ChannelOption> options) { var key = ChannelOptions.PrimaryUserAgentString; var userAgentString = ""; ChannelOption option; if (options.TryGetValue(key, out option)) { // user-provided userAgentString needs to be at the beginning userAgentString = option.StringValue + " "; }; // TODO(jtattermusch): it would be useful to also provide .NET/mono version. userAgentString += string.Format("grpc-csharp/{0}", VersionInfo.CurrentVersion); options[ChannelOptions.PrimaryUserAgentString] = new ChannelOption(key, userAgentString); } private static Dictionary<string, ChannelOption> CreateOptionsDictionary(IEnumerable<ChannelOption> options) { var dict = new Dictionary<string, ChannelOption>(); if (options == null) { return dict; } foreach (var option in options) { dict.Add(option.Name, option); } return dict; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+ // // GroupByQueryOperator.cs // // =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Threading; using IEnumerator = System.Collections.IEnumerator; namespace System.Linq.Parallel { /// <summary> /// The operator type for GroupBy statements. This operator groups the input based on /// a key-selection routine, yielding one-to-many values of key-to-elements. The /// implementation is very much like the hash join operator, in which we first build /// a big hashtable of the input; then we just iterate over each unique key in the /// hashtable, yielding it plus all of the elements with the same key. /// </summary> /// <typeparam name="TSource"></typeparam> /// <typeparam name="TGroupKey"></typeparam> /// <typeparam name="TElement"></typeparam> internal sealed class GroupByQueryOperator<TSource, TGroupKey, TElement> : UnaryQueryOperator<TSource, IGrouping<TGroupKey, TElement>> { private readonly Func<TSource, TGroupKey> _keySelector; // Key selection function. private readonly Func<TSource, TElement> _elementSelector; // Optional element selection function. private readonly IEqualityComparer<TGroupKey> _keyComparer; // An optional key comparison object. //--------------------------------------------------------------------------------------- // Initializes a new group by operator. // // Arguments: // child - the child operator or data source from which to pull data // keySelector - a delegate representing the key selector function // elementSelector - a delegate representing the element selector function // keyComparer - an optional key comparison routine // // Assumptions: // keySelector must be non null. // elementSelector must be non null. // internal GroupByQueryOperator(IEnumerable<TSource> child, Func<TSource, TGroupKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TGroupKey> keyComparer) : base(child) { Debug.Assert(child != null, "child data source cannot be null"); Debug.Assert(keySelector != null, "need a selector function"); Debug.Assert(elementSelector != null || typeof(TSource) == typeof(TElement), "need an element function if TSource!=TElement"); _keySelector = keySelector; _elementSelector = elementSelector; _keyComparer = keyComparer; SetOrdinalIndexState(OrdinalIndexState.Shuffled); } internal override void WrapPartitionedStream<TKey>( PartitionedStream<TSource, TKey> inputStream, IPartitionedStreamRecipient<IGrouping<TGroupKey, TElement>> recipient, bool preferStriping, QuerySettings settings) { // Hash-repartition the source stream if (Child.OutputOrdered) { WrapPartitionedStreamHelperOrdered<TKey>( ExchangeUtilities.HashRepartitionOrdered<TSource, TGroupKey, TKey>( inputStream, _keySelector, _keyComparer, null, settings.CancellationState.MergedCancellationToken), recipient, settings.CancellationState.MergedCancellationToken ); } else { WrapPartitionedStreamHelper<TKey, int>( ExchangeUtilities.HashRepartition<TSource, TGroupKey, TKey>( inputStream, _keySelector, _keyComparer, null, settings.CancellationState.MergedCancellationToken), recipient, settings.CancellationState.MergedCancellationToken ); } } //--------------------------------------------------------------------------------------- // This is a helper method. WrapPartitionedStream decides what type TKey is going // to be, and then call this method with that key as a generic parameter. // private void WrapPartitionedStreamHelper<TIgnoreKey, TKey>( PartitionedStream<Pair<TSource, TGroupKey>, TKey> hashStream, IPartitionedStreamRecipient<IGrouping<TGroupKey, TElement>> recipient, CancellationToken cancellationToken) { int partitionCount = hashStream.PartitionCount; PartitionedStream<IGrouping<TGroupKey, TElement>, TKey> outputStream = new PartitionedStream<IGrouping<TGroupKey, TElement>, TKey>(partitionCount, hashStream.KeyComparer, OrdinalIndexState.Shuffled); // If there is no element selector, we return a special identity enumerator. Otherwise, // we return one that will apply the element selection function during enumeration. for (int i = 0; i < partitionCount; i++) { if (_elementSelector == null) { Debug.Assert(typeof(TSource) == typeof(TElement)); var enumerator = new GroupByIdentityQueryOperatorEnumerator<TSource, TGroupKey, TKey>( hashStream[i], _keyComparer, cancellationToken); outputStream[i] = (QueryOperatorEnumerator<IGrouping<TGroupKey, TElement>, TKey>)(object)enumerator; } else { outputStream[i] = new GroupByElementSelectorQueryOperatorEnumerator<TSource, TGroupKey, TElement, TKey>( hashStream[i], _keyComparer, _elementSelector, cancellationToken); } } recipient.Receive(outputStream); } //--------------------------------------------------------------------------------------- // This is a helper method. WrapPartitionedStream decides what type TKey is going // to be, and then call this method with that key as a generic parameter. // private void WrapPartitionedStreamHelperOrdered<TKey>( PartitionedStream<Pair<TSource, TGroupKey>, TKey> hashStream, IPartitionedStreamRecipient<IGrouping<TGroupKey, TElement>> recipient, CancellationToken cancellationToken) { int partitionCount = hashStream.PartitionCount; PartitionedStream<IGrouping<TGroupKey, TElement>, TKey> outputStream = new PartitionedStream<IGrouping<TGroupKey, TElement>, TKey>(partitionCount, hashStream.KeyComparer, OrdinalIndexState.Shuffled); // If there is no element selector, we return a special identity enumerator. Otherwise, // we return one that will apply the element selection function during enumeration. IComparer<TKey> orderComparer = hashStream.KeyComparer; for (int i = 0; i < partitionCount; i++) { if (_elementSelector == null) { Debug.Assert(typeof(TSource) == typeof(TElement)); var enumerator = new OrderedGroupByIdentityQueryOperatorEnumerator<TSource, TGroupKey, TKey>( hashStream[i], _keySelector, _keyComparer, orderComparer, cancellationToken); outputStream[i] = (QueryOperatorEnumerator<IGrouping<TGroupKey, TElement>, TKey>)(object)enumerator; } else { outputStream[i] = new OrderedGroupByElementSelectorQueryOperatorEnumerator<TSource, TGroupKey, TElement, TKey>( hashStream[i], _keySelector, _elementSelector, _keyComparer, orderComparer, cancellationToken); } } recipient.Receive(outputStream); } //----------------------------------------------------------------------------------- // Override of the query operator base class's Open method. // internal override QueryResults<IGrouping<TGroupKey, TElement>> Open(QuerySettings settings, bool preferStriping) { // We just open our child operator. Do not propagate the preferStriping value, but instead explicitly // set it to false. Regardless of whether the parent prefers striping or range partitioning, the output // will be hash-partitioned. QueryResults<TSource> childResults = Child.Open(settings, false); return new UnaryQueryOperatorResults(childResults, this, settings, false); } //--------------------------------------------------------------------------------------- // Returns an enumerable that represents the query executing sequentially. // internal override IEnumerable<IGrouping<TGroupKey, TElement>> AsSequentialQuery(CancellationToken token) { IEnumerable<TSource> wrappedChild = CancellableEnumerable.Wrap(Child.AsSequentialQuery(token), token); if (_elementSelector == null) { Debug.Assert(typeof(TElement) == typeof(TSource)); return (IEnumerable<IGrouping<TGroupKey, TElement>>)wrappedChild.GroupBy(_keySelector, _keyComparer); } else { return wrappedChild.GroupBy(_keySelector, _elementSelector, _keyComparer); } } //--------------------------------------------------------------------------------------- // Whether this operator performs a premature merge that would not be performed in // a similar sequential operation (i.e., in LINQ to Objects). // internal override bool LimitsParallelism { get { return false; } } } //--------------------------------------------------------------------------------------- // The enumerator type responsible for grouping elements and yielding the key-value sets. // // Assumptions: // Just like the Join operator, this won't work properly at all if the analysis engine // didn't choose to hash partition. We will simply not yield correct groupings. // internal abstract class GroupByQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> : QueryOperatorEnumerator<IGrouping<TGroupKey, TElement>, TOrderKey> { protected readonly QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> _source; // The data source to enumerate. protected readonly IEqualityComparer<TGroupKey> _keyComparer; // A key comparer. protected readonly CancellationToken _cancellationToken; private Mutables _mutables; // All of the mutable state. private class Mutables { internal HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>> _hashLookup; // The lookup with key-value mappings. internal int _hashLookupIndex; // The current index within the lookup. } //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // protected GroupByQueryOperatorEnumerator( QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, IEqualityComparer<TGroupKey> keyComparer, CancellationToken cancellationToken) { Debug.Assert(source != null); _source = source; _keyComparer = keyComparer; _cancellationToken = cancellationToken; } //--------------------------------------------------------------------------------------- // MoveNext will invoke the entire query sub-tree, accumulating results into a hash- // table, upon the first call. Then for the first call and all subsequent calls, we will // just enumerate the key-set from the hash-table, retrieving groupings of key-elements. // internal override bool MoveNext(ref IGrouping<TGroupKey, TElement> currentElement, ref TOrderKey currentKey) { Debug.Assert(_source != null); // Lazy-init the mutable state. This also means we haven't yet built our lookup of // groupings, so we can go ahead and do that too. Mutables mutables = _mutables; if (mutables == null) { mutables = _mutables = new Mutables(); // Build the hash lookup and start enumerating the lookup at the beginning. mutables._hashLookup = BuildHashLookup(); Debug.Assert(mutables._hashLookup != null); mutables._hashLookupIndex = -1; } // Now, with a hash lookup in hand, we just enumerate the keys. So long // as the key-value lookup has elements, we have elements. if (++mutables._hashLookupIndex < mutables._hashLookup.Count) { currentElement = new GroupByGrouping<TGroupKey, TElement>( mutables._hashLookup[mutables._hashLookupIndex]); return true; } return false; } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected abstract HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>> BuildHashLookup(); protected override void Dispose(bool disposing) { _source.Dispose(); } } //--------------------------------------------------------------------------------------- // A specialization of the group by enumerator for yielding elements with the identity // function. // internal sealed class GroupByIdentityQueryOperatorEnumerator<TSource, TGroupKey, TOrderKey> : GroupByQueryOperatorEnumerator<TSource, TGroupKey, TSource, TOrderKey> { //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // internal GroupByIdentityQueryOperatorEnumerator( QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, IEqualityComparer<TGroupKey> keyComparer, CancellationToken cancellationToken) : base(source, keyComparer, cancellationToken) { } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected override HashLookup<Wrapper<TGroupKey>, ListChunk<TSource>> BuildHashLookup() { HashLookup<Wrapper<TGroupKey>, ListChunk<TSource>> hashlookup = new HashLookup<Wrapper<TGroupKey>, ListChunk<TSource>>(new WrapperEqualityComparer<TGroupKey>(_keyComparer)); Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>); TOrderKey sourceKeyUnused = default(TOrderKey); int i = 0; while (_source.MoveNext(ref sourceElement, ref sourceKeyUnused)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // Generate a key and place it into the hashtable. Wrapper<TGroupKey> key = new Wrapper<TGroupKey>(sourceElement.Second); // If the key already exists, we just append it to the existing list -- // otherwise we will create a new one and add it to that instead. ListChunk<TSource> currentValue = null; if (!hashlookup.TryGetValue(key, ref currentValue)) { const int INITIAL_CHUNK_SIZE = 2; currentValue = new ListChunk<TSource>(INITIAL_CHUNK_SIZE); hashlookup.Add(key, currentValue); } Debug.Assert(currentValue != null); // Call to the base class to yield the current value. currentValue.Add(sourceElement.First); } return hashlookup; } } //--------------------------------------------------------------------------------------- // A specialization of the group by enumerator for yielding elements with any arbitrary // element selection function. // internal sealed class GroupByElementSelectorQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> : GroupByQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> { private readonly Func<TSource, TElement> _elementSelector; // Function to select elements. //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // internal GroupByElementSelectorQueryOperatorEnumerator( QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, IEqualityComparer<TGroupKey> keyComparer, Func<TSource, TElement> elementSelector, CancellationToken cancellationToken) : base(source, keyComparer, cancellationToken) { Debug.Assert(elementSelector != null); _elementSelector = elementSelector; } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected override HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>> BuildHashLookup() { HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>> hashlookup = new HashLookup<Wrapper<TGroupKey>, ListChunk<TElement>>(new WrapperEqualityComparer<TGroupKey>(_keyComparer)); Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>); TOrderKey sourceKeyUnused = default(TOrderKey); int i = 0; while (_source.MoveNext(ref sourceElement, ref sourceKeyUnused)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // Generate a key and place it into the hashtable. Wrapper<TGroupKey> key = new Wrapper<TGroupKey>(sourceElement.Second); // If the key already exists, we just append it to the existing list -- // otherwise we will create a new one and add it to that instead. ListChunk<TElement> currentValue = null; if (!hashlookup.TryGetValue(key, ref currentValue)) { const int INITIAL_CHUNK_SIZE = 2; currentValue = new ListChunk<TElement>(INITIAL_CHUNK_SIZE); hashlookup.Add(key, currentValue); } Debug.Assert(currentValue != null); // Call to the base class to yield the current value. currentValue.Add(_elementSelector(sourceElement.First)); } return hashlookup; } } //--------------------------------------------------------------------------------------- // Ordered version of the GroupBy operator. // internal abstract class OrderedGroupByQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> : QueryOperatorEnumerator<IGrouping<TGroupKey, TElement>, TOrderKey> { protected readonly QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> _source; // The data source to enumerate. private readonly Func<TSource, TGroupKey> _keySelector; // The key selection routine. protected readonly IEqualityComparer<TGroupKey> _keyComparer; // The key comparison routine. protected readonly IComparer<TOrderKey> _orderComparer; // The comparison routine for order keys. protected readonly CancellationToken _cancellationToken; private Mutables _mutables; // All the mutable state. private class Mutables { internal HashLookup<Wrapper<TGroupKey>, GroupKeyData> _hashLookup; // The lookup with key-value mappings. internal int _hashLookupIndex; // The current index within the lookup. } //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // protected OrderedGroupByQueryOperatorEnumerator(QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, Func<TSource, TGroupKey> keySelector, IEqualityComparer<TGroupKey> keyComparer, IComparer<TOrderKey> orderComparer, CancellationToken cancellationToken) { Debug.Assert(source != null); Debug.Assert(keySelector != null); _source = source; _keySelector = keySelector; _keyComparer = keyComparer; _orderComparer = orderComparer; _cancellationToken = cancellationToken; } //--------------------------------------------------------------------------------------- // MoveNext will invoke the entire query sub-tree, accumulating results into a hash- // table, upon the first call. Then for the first call and all subsequent calls, we will // just enumerate the key-set from the hash-table, retrieving groupings of key-elements. // internal override bool MoveNext(ref IGrouping<TGroupKey, TElement> currentElement, ref TOrderKey currentKey) { Debug.Assert(_source != null); Debug.Assert(_keySelector != null); // Lazy-init the mutable state. This also means we haven't yet built our lookup of // groupings, so we can go ahead and do that too. Mutables mutables = _mutables; if (mutables == null) { mutables = _mutables = new Mutables(); // Build the hash lookup and start enumerating the lookup at the beginning. mutables._hashLookup = BuildHashLookup(); Debug.Assert(mutables._hashLookup != null); mutables._hashLookupIndex = -1; } // Now, with a hash lookup in hand, we just enumerate the keys. So long // as the key-value lookup has elements, we have elements. if (++mutables._hashLookupIndex < mutables._hashLookup.Count) { GroupKeyData value = mutables._hashLookup[mutables._hashLookupIndex].Value; currentElement = value._grouping; currentKey = value._orderKey; return true; } return false; } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected abstract HashLookup<Wrapper<TGroupKey>, GroupKeyData> BuildHashLookup(); protected override void Dispose(bool disposing) { _source.Dispose(); } //----------------------------------------------------------------------------------- // A data structure that holds information about elements with a particular key. // // This information includes two parts: // - An order key for the grouping. // - The grouping itself. The grouping consists of elements and the grouping key. // protected class GroupKeyData { internal TOrderKey _orderKey; internal OrderedGroupByGrouping<TGroupKey, TOrderKey, TElement> _grouping; internal GroupKeyData(TOrderKey orderKey, TGroupKey hashKey, IComparer<TOrderKey> orderComparer) { _orderKey = orderKey; _grouping = new OrderedGroupByGrouping<TGroupKey, TOrderKey, TElement>(hashKey, orderComparer); } } } //--------------------------------------------------------------------------------------- // A specialization of the ordered GroupBy enumerator for yielding elements with the identity // function. // internal sealed class OrderedGroupByIdentityQueryOperatorEnumerator<TSource, TGroupKey, TOrderKey> : OrderedGroupByQueryOperatorEnumerator<TSource, TGroupKey, TSource, TOrderKey> { //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // internal OrderedGroupByIdentityQueryOperatorEnumerator(QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, Func<TSource, TGroupKey> keySelector, IEqualityComparer<TGroupKey> keyComparer, IComparer<TOrderKey> orderComparer, CancellationToken cancellationToken) : base(source, keySelector, keyComparer, orderComparer, cancellationToken) { } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected override HashLookup<Wrapper<TGroupKey>, GroupKeyData> BuildHashLookup() { HashLookup<Wrapper<TGroupKey>, GroupKeyData> hashLookup = new HashLookup<Wrapper<TGroupKey>, GroupKeyData>( new WrapperEqualityComparer<TGroupKey>(_keyComparer)); Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>); TOrderKey sourceOrderKey = default(TOrderKey); int i = 0; while (_source.MoveNext(ref sourceElement, ref sourceOrderKey)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // Generate a key and place it into the hashtable. Wrapper<TGroupKey> key = new Wrapper<TGroupKey>(sourceElement.Second); // If the key already exists, we just append it to the existing list -- // otherwise we will create a new one and add it to that instead. GroupKeyData currentValue = null; if (hashLookup.TryGetValue(key, ref currentValue)) { if (_orderComparer.Compare(sourceOrderKey, currentValue._orderKey) < 0) { currentValue._orderKey = sourceOrderKey; } } else { currentValue = new GroupKeyData(sourceOrderKey, key.Value, _orderComparer); hashLookup.Add(key, currentValue); } Debug.Assert(currentValue != null); currentValue._grouping.Add(sourceElement.First, sourceOrderKey); } // Sort the elements within each group for (int j = 0; j < hashLookup.Count; j++) { hashLookup[j].Value._grouping.DoneAdding(); } return hashLookup; } } //--------------------------------------------------------------------------------------- // A specialization of the ordered GroupBy enumerator for yielding elements with any arbitrary // element selection function. // internal sealed class OrderedGroupByElementSelectorQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> : OrderedGroupByQueryOperatorEnumerator<TSource, TGroupKey, TElement, TOrderKey> { private readonly Func<TSource, TElement> _elementSelector; // Function to select elements. //--------------------------------------------------------------------------------------- // Instantiates a new group by enumerator. // internal OrderedGroupByElementSelectorQueryOperatorEnumerator(QueryOperatorEnumerator<Pair<TSource, TGroupKey>, TOrderKey> source, Func<TSource, TGroupKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TGroupKey> keyComparer, IComparer<TOrderKey> orderComparer, CancellationToken cancellationToken) : base(source, keySelector, keyComparer, orderComparer, cancellationToken) { Debug.Assert(elementSelector != null); _elementSelector = elementSelector; } //----------------------------------------------------------------------------------- // Builds the hash lookup, transforming from TSource to TElement through whatever means is appropriate. // protected override HashLookup<Wrapper<TGroupKey>, GroupKeyData> BuildHashLookup() { HashLookup<Wrapper<TGroupKey>, GroupKeyData> hashLookup = new HashLookup<Wrapper<TGroupKey>, GroupKeyData>( new WrapperEqualityComparer<TGroupKey>(_keyComparer)); Pair<TSource, TGroupKey> sourceElement = default(Pair<TSource, TGroupKey>); TOrderKey sourceOrderKey = default(TOrderKey); int i = 0; while (_source.MoveNext(ref sourceElement, ref sourceOrderKey)) { if ((i++ & CancellationState.POLL_INTERVAL) == 0) CancellationState.ThrowIfCanceled(_cancellationToken); // Generate a key and place it into the hashtable. Wrapper<TGroupKey> key = new Wrapper<TGroupKey>(sourceElement.Second); // If the key already exists, we just append it to the existing list -- // otherwise we will create a new one and add it to that instead. GroupKeyData currentValue = null; if (hashLookup.TryGetValue(key, ref currentValue)) { if (_orderComparer.Compare(sourceOrderKey, currentValue._orderKey) < 0) { currentValue._orderKey = sourceOrderKey; } } else { currentValue = new GroupKeyData(sourceOrderKey, key.Value, _orderComparer); hashLookup.Add(key, currentValue); } Debug.Assert(currentValue != null); // Call to the base class to yield the current value. currentValue._grouping.Add(_elementSelector(sourceElement.First), sourceOrderKey); } // Sort the elements within each group for (int j = 0; j < hashLookup.Count; j++) { hashLookup[j].Value._grouping.DoneAdding(); } return hashLookup; } } //--------------------------------------------------------------------------------------- // This little type implements the IGrouping<K,T> interface, and exposes a single // key-to-many-values mapping. // internal class GroupByGrouping<TGroupKey, TElement> : IGrouping<TGroupKey, TElement> { private readonly KeyValuePair<Wrapper<TGroupKey>, ListChunk<TElement>> _keyValues; // A key value pair. //--------------------------------------------------------------------------------------- // Constructs a new grouping out of the key value pair. // internal GroupByGrouping(KeyValuePair<Wrapper<TGroupKey>, ListChunk<TElement>> keyValues) { Debug.Assert(keyValues.Value != null); _keyValues = keyValues; } //--------------------------------------------------------------------------------------- // The key this mapping represents. // TGroupKey IGrouping<TGroupKey, TElement>.Key { get { return _keyValues.Key.Value; } } //--------------------------------------------------------------------------------------- // Access to value enumerators. // IEnumerator<TElement> IEnumerable<TElement>.GetEnumerator() { Debug.Assert(_keyValues.Value != null); return _keyValues.Value.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable<TElement>)this).GetEnumerator(); } } /// <summary> /// An ordered version of the grouping data structure. Represents an ordered group of elements that /// have the same grouping key. /// </summary> internal class OrderedGroupByGrouping<TGroupKey, TOrderKey, TElement> : IGrouping<TGroupKey, TElement> { private const int INITIAL_CHUNK_SIZE = 2; private readonly TGroupKey _groupKey; // The group key for this grouping private ListChunk<Pair<TOrderKey, TElement>> _values; // Values in this group private TElement[] _sortedValues; // Sorted values (allocated in DoneAdding) private readonly IComparer<TOrderKey> _orderComparer; // Comparer for order keys /// <summary> /// Constructs a new grouping /// </summary> internal OrderedGroupByGrouping( TGroupKey groupKey, IComparer<TOrderKey> orderComparer) { _groupKey = groupKey; _values = new ListChunk<Pair<TOrderKey, TElement>>(INITIAL_CHUNK_SIZE); _orderComparer = orderComparer; } /// <summary> /// The key this grouping represents. /// </summary> TGroupKey IGrouping<TGroupKey, TElement>.Key { get { return _groupKey; } } IEnumerator<TElement> IEnumerable<TElement>.GetEnumerator() { Debug.Assert(_sortedValues != null); return ((IEnumerable<TElement>)_sortedValues).GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return ((IEnumerable<TElement>)this).GetEnumerator(); } /// <summary> /// Add an element /// </summary> internal void Add(TElement value, TOrderKey orderKey) { Debug.Assert(_values != null); _values.Add(new Pair<TOrderKey, TElement>(orderKey, value)); } /// <summary> /// No more elements will be added, so we can sort the group now. /// </summary> internal void DoneAdding() { Debug.Assert(_values != null); int count = _values.Count; ListChunk<Pair<TOrderKey, TElement>> curChunk = _values; while ((curChunk = curChunk.Next) != null) { count += curChunk.Count; } TElement[] values = new TElement[count]; TOrderKey[] orderKeys = new TOrderKey[count]; int idx = 0; foreach (Pair<TOrderKey, TElement> p in _values) { orderKeys[idx] = p.First; values[idx] = p.Second; idx++; } Array.Sort(orderKeys, values, _orderComparer); _sortedValues = values; #if DEBUG _values = null; // Any future calls to Add() or DoneAdding() will fail #endif } } }
using System; using System.Threading; using System.IO; namespace JinxNeuralNetwork { /// <summary> /// Trains a NeuralNetwork on input and target data using AdaGrad. /// </summary> public class NeuralNetworkTrainer { public delegate bool StreamNextData(ref float[][] inp, ref float[][] targ); public const int LOSS_TYPE_AVERAGE = 0, LOSS_TYPE_MAX = 1, LOSS_TYPE_CROSSENTROPY = 2; public StreamNextData onStreamNextData = null; /// <summary> /// Delay(ms) in the execution thread. /// </summary> public int delay = 0; /// <summary> /// Learning rate(0-1). /// </summary> public float learningRate = 1e-1f; /// <summary> /// Enables stochastic skipping, skipping a random amount of training states to ensure uniformity. /// </summary> public bool stochasticSkipping = false; /// <summary> /// Desired loss to stop training at. /// </summary> public float desiredLoss = 1e-2f; /// <summary> /// Callback for when desiredLoss is reached. /// </summary> public NeuralNetworkEvolver.ReachedGoalEventFunction onReachedGoal = null; /// <summary> /// Amount of smoothing to apply to smooth loss regularization. /// </summary> public float lossSmoothing = 0.001f; /// <summary> /// Randomly shuffles input/training data order. /// </summary> public float shuffleChance = 0.0f; private int lossType; private int[] crossEntropyLossTargets; private float[][] inputData, targetData; private bool running = true, hasRecurring; private NeuralNetwork neuralNetwork; private NeuralNetworkContext[] stackedRuntimeContext; private NeuralNetworkFullContext[] stackedFullContext; private NeuralNetworkPropagationState[] stackedDerivativeMemory; private NeuralNetworkDerivativeMemory derivatives = new NeuralNetworkDerivativeMemory(); /// <summary> /// AdaGrad memory, use this for saving/loading training state. /// </summary> public NeuralNetworkAdaGradMemory adagradMemory = new NeuralNetworkAdaGradMemory(); private float lossDelta, bestLoss, newLoss, smoothLoss; private int skipN, dataIndex, unrollCount, lossSampleCount; private long iterations; private Thread thread; private int maxUnrollLength; private bool resetState; /// <summary> /// Create new NeuralNetworkTrainer. /// </summary> /// <param name="nn">NeuralNetwork to train.</param> /// <param name="inputDat">Input data.</param> /// <param name="targetDat">Target data.</param> /// <param name="maxUnrollLen">Memory state unroll times, for recurring layers.</param> /// <param name="losType">Loss calculation type, NeuralNetworkTrainer.LOSS_TYPE_AVERAGE/MAX/CROSSENTROPY.</param> public NeuralNetworkTrainer(NeuralNetwork nn, float[][] inputDat, float[][] targetDat, int maxUnrollLen, int losType) { neuralNetwork = nn; inputData = inputDat; targetData = targetDat; maxUnrollLength = maxUnrollLen; if (maxUnrollLength < 1) maxUnrollLength = 1; lossType = losType; //check for recurring layer, if need to stack and unroll if (nn.outputLayer.recurring) { hasRecurring = true; } else { for (int i = 0; i < nn.hiddenLayers.Length; i++) { if (nn.hiddenLayers[i].recurring) { hasRecurring = true; break; } } } derivatives.Setup(nn); adagradMemory.Setup(nn); adagradMemory.Reset(); int tunrollLen = maxUnrollLength; if (!hasRecurring) { tunrollLen = 1; } stackedRuntimeContext = new NeuralNetworkContext[tunrollLen]; stackedFullContext = new NeuralNetworkFullContext[tunrollLen]; stackedDerivativeMemory = new NeuralNetworkPropagationState[tunrollLen]; for (int i = 0; i < stackedRuntimeContext.Length; i++) { stackedRuntimeContext[i] = new NeuralNetworkContext(); stackedRuntimeContext[i].Setup(nn); stackedFullContext[i] = new NeuralNetworkFullContext(); stackedFullContext[i].Setup(nn); stackedDerivativeMemory[i] = new NeuralNetworkPropagationState(); stackedDerivativeMemory[i].Setup(nn, stackedRuntimeContext[i], stackedFullContext[i], derivatives); } } /// <summary> /// Initialize data states for training but don't start thread. /// </summary> public void StartInit() { //reset adagrad memory adagradMemory.learningRate = learningRate; if (lossType == LOSS_TYPE_CROSSENTROPY) { bestLoss = smoothLoss = (float)-Math.Log(1.0f / neuralNetwork.outputLayer.numberOfNeurons) * maxUnrollLength;//initial smooth loss } else { smoothLoss = 1.0f; bestLoss = 1.0f; } lossDelta = 1.0f; unrollCount = 0; skipN = 0; dataIndex = 0; iterations = 0; resetState = true; if (onStreamNextData != null) { onStreamNextData(ref inputData, ref targetData); } if (lossType == LOSS_TYPE_CROSSENTROPY) { crossEntropyLossTargets = new int[targetData.Length]; for (int i = 0; i < targetData.Length; i++) { int r = Utils.Largest(targetData[i], 0, targetData[i].Length); if (targetData[i][r] > 0.0f) crossEntropyLossTargets[i] = r; else crossEntropyLossTargets[i] = -1; } } running = true; } /// <summary> /// Initializes data states and starts training. /// </summary> public void Start() { StartInit(); //start thread = new Thread(processingThread); thread.Start(); } /// <summary> /// Stop training. /// </summary> public void Stop() { running = false; } /// <summary> /// Joins training thread. /// </summary> /// <param name="timeout"></param> /// <returns></returns> public bool Join(int timeout) { return thread.Join(timeout); } /// <summary> /// Get best loss. /// </summary> /// <returns>Loss.</returns> public float GetLoss() { return bestLoss; } /// <summary> /// Get smoothed loss. /// </summary> /// <returns></returns> public float GetSmoothLoss() { return smoothLoss; } /// <summary> /// Get number of iterations. /// </summary> /// <returns></returns> public long GetIterations() { return iterations; } //copies c1 recurring data to c2 private void CopyRecurringState(NeuralNetworkContext c1, NeuralNetworkContext c2) { int i = c1.hiddenRecurringData.Length; while (i-- > 0) { if (c1.hiddenRecurringData[i] == null) continue; Array.Copy(c1.hiddenRecurringData[i], c2.hiddenRecurringData[i], c1.hiddenRecurringData[i].Length); } } /// <summary> /// Run single iteration of learning, either 1 forward or backward propagation. /// </summary> public void Learn() { if (!running) return; if (resetState) { resetState = false; newLoss = 0.0f; lossSampleCount = 0; derivatives.Reset(); for (int i = 0; i < stackedRuntimeContext.Length; i++) { stackedRuntimeContext[i].Reset(true); stackedDerivativeMemory[i].Reset(); } if (hasRecurring && stochasticSkipping) { if (targetData.Length < maxUnrollLength) { skipN = 0; } else { skipN = (int)(Utils.NextInt(0, (targetData[dataIndex].Length % maxUnrollLength) + 1)); } } } //run forwards for maxUnrollLength and then run backwards for maxUnrollLength backpropagating through recurring if (skipN > 0) { //skip random # at beginning to apply a 'shuffle' if (hasRecurring) { Array.Copy(inputData[dataIndex], stackedRuntimeContext[0].inputData, stackedRuntimeContext[0].inputData.Length); neuralNetwork.Execute(stackedRuntimeContext[0]); } skipN--; } else { int unrollIndex = unrollCount; if (!hasRecurring) unrollIndex = 0; Array.Copy(inputData[dataIndex], stackedRuntimeContext[unrollIndex].inputData, stackedRuntimeContext[unrollIndex].inputData.Length); neuralNetwork.Execute_FullContext(stackedRuntimeContext[unrollIndex], stackedFullContext[unrollIndex]); unrollCount++; if (hasRecurring) { if (unrollCount >= maxUnrollLength || dataIndex + 1 >= targetData.Length) { //back propagate through stacked float nextLoss = 0.0f; int tdatIndex = dataIndex, nunroll = unrollCount; while (unrollCount-- > 0) { neuralNetwork.ExecuteBackwards(targetData[tdatIndex], stackedRuntimeContext[unrollCount], stackedFullContext[unrollCount], stackedDerivativeMemory[unrollCount], lossType, (lossType == LOSS_TYPE_CROSSENTROPY ? crossEntropyLossTargets[tdatIndex] : -1)); if (lossType == LOSS_TYPE_AVERAGE) { nextLoss += stackedDerivativeMemory[unrollCount].loss; } else { if (stackedDerivativeMemory[unrollCount].loss > nextLoss) { nextLoss = stackedDerivativeMemory[unrollCount].loss; } } tdatIndex--; } if (lossType == LOSS_TYPE_AVERAGE) { newLoss += nextLoss/(float)nunroll; lossSampleCount++; } else { if (nextLoss > newLoss) newLoss = nextLoss; } //learn adagradMemory.Apply(stackedDerivativeMemory[0]); derivatives.Reset(); unrollCount = 0; //copy recurring state over CopyRecurringState(stackedRuntimeContext[maxUnrollLength - 1], stackedRuntimeContext[0]); } else { //copy recurring state into next CopyRecurringState(stackedRuntimeContext[unrollCount - 1], stackedRuntimeContext[unrollCount]); } } else { neuralNetwork.ExecuteBackwards(targetData[dataIndex], stackedRuntimeContext[unrollIndex], stackedFullContext[unrollIndex], stackedDerivativeMemory[unrollIndex], lossType, (lossType == LOSS_TYPE_CROSSENTROPY ? crossEntropyLossTargets[dataIndex] : -1)); if (lossType == LOSS_TYPE_AVERAGE) { newLoss += stackedDerivativeMemory[unrollIndex].loss; lossSampleCount++; } else { if (stackedDerivativeMemory[unrollIndex].loss > newLoss) { newLoss = stackedDerivativeMemory[unrollIndex].loss; } } if (unrollCount >= maxUnrollLength || dataIndex + 1 >= targetData.Length) { //learn adagradMemory.Apply(stackedDerivativeMemory[0]); derivatives.Reset(); unrollCount = 0; } } } //advance index dataIndex++; if (dataIndex >= targetData.Length) { iterations++; dataIndex = 0; if (lossType == LOSS_TYPE_AVERAGE) newLoss /= (float)lossSampleCount; if (newLoss < bestLoss) { bestLoss = newLoss; } if (newLoss <= desiredLoss) { //hit goal, stop if (onReachedGoal != null) onReachedGoal(); running = false; return; } float lsl = smoothLoss; smoothLoss = smoothLoss * lossSmoothing + newLoss * (1.0f-lossSmoothing); lossDelta = lossDelta * lossSmoothing + (lsl - smoothLoss) * (1.0f-lossSmoothing); lossSampleCount = 0; newLoss = 0.0f; //stream new data if (onStreamNextData != null) { resetState = onStreamNextData(ref inputData, ref targetData); if (lossType == LOSS_TYPE_CROSSENTROPY) { crossEntropyLossTargets = new int[targetData.Length]; for (int i = 0; i < targetData.Length; i++) { int r = Utils.Largest(targetData[i], 0, targetData[i].Length); if (targetData[i][r] > 0.0f) crossEntropyLossTargets[i] = r; else crossEntropyLossTargets[i] = -1; } } } else { resetState = true; } if (shuffleChance > 0.0f && Utils.NextFloat01() < shuffleChance) { Utils.Shuffle(inputData, targetData); } } } private void processingThread() { while (running) { Learn(); if (delay > 0) Thread.Sleep(delay); } } /// <summary> /// Returns true if StartInit/Start init has been called. /// </summary> /// <returns></returns> public bool Running() { return running; } /// <summary> /// Get loss delta. /// </summary> /// <returns></returns> public float GetLossDelta() { return lossDelta; } } /// <summary> /// Derivative memory. /// </summary> public class NeuralNetworkDerivativeMemory { public float[][] weightMems, biasMems, recurrWeightMems, outputFullConnectedWeightMems, recurringBPBuffer, altRecurringBPBuffer; public void Setup(NeuralNetwork nn) { biasMems = new float[nn.hiddenLayers.Length + 1][]; weightMems = new float[nn.hiddenLayers.Length + 1][]; recurrWeightMems = new float[nn.hiddenLayers.Length][]; recurringBPBuffer = new float[nn.hiddenLayers.Length][]; altRecurringBPBuffer = new float[nn.hiddenLayers.Length][]; for (int i = 0; i < nn.hiddenLayers.Length; i++) { weightMems[i] = new float[nn.hiddenConnections[i].numberOfSynapses]; biasMems[i] = new float[nn.hiddenLayers[i].numberOfNeurons]; if (nn.hiddenLayers[i].recurring) { recurrWeightMems[i] = new float[nn.hiddenRecurringConnections[i].numberOfSynapses]; recurringBPBuffer[i] = new float[nn.hiddenLayers[i].numberOfNeurons]; altRecurringBPBuffer[i] = new float[nn.hiddenLayers[i].numberOfNeurons]; } } int lid = nn.hiddenLayers.Length; biasMems[lid] = new float[nn.outputLayer.numberOfNeurons]; weightMems[lid] = new float[nn.outputConnection.numberOfSynapses]; } public void SwapBPBuffers() { float[][] temp = recurringBPBuffer; recurringBPBuffer = altRecurringBPBuffer; altRecurringBPBuffer = temp; } public void Reset() { for (int i = 0; i < biasMems.Length; i++) { Utils.Fill(biasMems[i], 0.0f); Utils.Fill(weightMems[i], 0.0f); if (i < recurrWeightMems.Length && recurrWeightMems[i] != null) { Utils.Fill(recurrWeightMems[i], 0.0f); Utils.Fill(recurringBPBuffer[i], 0.0f); Utils.Fill(altRecurringBPBuffer[i], 0.0f); } if (outputFullConnectedWeightMems != null && i < outputFullConnectedWeightMems.Length) Utils.Fill(outputFullConnectedWeightMems[i], 0.0f); } } public void Scale(float s) { for (int i = 0; i < biasMems.Length; i++) { Utils.Multiply(biasMems[i], s); Utils.Multiply(weightMems[i], s); if (i < recurrWeightMems.Length && recurrWeightMems[i] != null) { Utils.Multiply(recurrWeightMems[i], s); } if (outputFullConnectedWeightMems != null && i < outputFullConnectedWeightMems.Length) Utils.Multiply(outputFullConnectedWeightMems[i], s); } } public void ResetOnlyBuffer() { for (int i = 0; i < biasMems.Length; i++) { if (i < recurrWeightMems.Length && recurrWeightMems[i] != null) { Utils.Fill(recurringBPBuffer[i], 0.0f); Utils.Fill(altRecurringBPBuffer[i], 0.0f); } } } } /// <summary> /// Back propagation state. /// </summary> public class NeuralNetworkPropagationState { public float loss; public float[][] weights, biases, recurrWeights, weightMems, biasMems, recurrWeightMems, buf, recurrBuf, state; public float[] inputMem; public NeuralNetworkDerivativeMemory derivativeMemory; public void Setup(NeuralNetwork nn, NeuralNetworkContext context, NeuralNetworkFullContext fullCtx, NeuralNetworkDerivativeMemory derivMem) { //initialize memory buffers state = new float[nn.hiddenLayers.Length][]; weights = new float[nn.hiddenLayers.Length + 1][]; biases = new float[nn.hiddenLayers.Length + 1][]; buf = new float[nn.hiddenLayers.Length + 1][]; recurrBuf = new float[nn.hiddenLayers.Length][]; biasMems = derivMem.biasMems; weightMems = derivMem.weightMems; recurrWeightMems = derivMem.recurrWeightMems; recurrWeights = new float[nn.hiddenLayers.Length][]; derivativeMemory = derivMem; for (int i = 0; i < nn.hiddenLayers.Length; i++) { state[i] = new float[nn.hiddenLayers[i].numberOfNeurons]; weights[i] = nn.hiddenConnections[i].weights; biases[i] = nn.hiddenLayers[i].biases; if (i == 0) { buf[i] = context.inputData; } else { buf[i] = fullCtx.hiddenBuffer[i - 1]; } if (nn.hiddenLayers[i].recurring) { recurrWeights[i] = nn.hiddenRecurringConnections[i].weights; recurrBuf[i] = fullCtx.hiddenRecurringBuffer[i]; } } int lid = nn.hiddenLayers.Length; weights[lid] = nn.outputConnection.weights; biases[lid] = nn.outputLayer.biases; if (lid > 0) { buf[lid] = fullCtx.hiddenBuffer[lid - 1]; } else { buf[lid] = context.inputData; } } public void Reset() { loss = 0.0f; for (int i = 0; i < buf.Length; i++) { Utils.Fill(buf[i], 0.0f); if (i < recurrBuf.Length && recurrBuf[i] != null) Utils.Fill(recurrBuf[i], 0.0f); } } } /// <summary> /// Struct for storing per-parameter learning rates when doing AdaGrad. /// </summary> public class NeuralNetworkAdaGradMemory { private const float EXPLODING_GRADIENT_CLAMP = 1.0f; private const double SQRT_EPSILON = 1e-8; public float learningRate; public float[][] weights, biases, recurringWeights; public void Setup(NeuralNetwork nn) { weights = new float[nn.hiddenLayers.Length + 1][]; biases = new float[nn.hiddenLayers.Length + 1][]; recurringWeights = new float[nn.hiddenLayers.Length][]; for (int i = 0; i < nn.hiddenLayers.Length; i++) { weights[i] = new float[nn.hiddenConnections[i].numberOfSynapses]; biases[i] = new float[nn.hiddenLayers[i].numberOfNeurons]; if (nn.hiddenLayers[i].recurring) recurringWeights[i] = new float[nn.hiddenRecurringConnections[i].numberOfSynapses]; } int lid = nn.hiddenLayers.Length; weights[lid] = new float[nn.outputConnection.numberOfSynapses]; biases[lid] = new float[nn.outputLayer.numberOfNeurons]; } /// <summary> /// Reset AdaGrad memory to 0. /// </summary> public void Reset() { for (int i = 0; i < weights.Length; i++) { Utils.Fill(weights[i], 0.0f); Utils.Fill(biases[i], 0.0f); if (i < recurringWeights.Length && recurringWeights[i] != null) Utils.Fill(recurringWeights[i], 0.0f); } } /// <summary> /// Partially reset adagrad memory, new memory = old memory * am. /// </summary> /// <param name="am"></param> public void ResetPartial(float am) { for (int i = 0; i < weights.Length; i++) { Utils.Multiply(weights[i], am); Utils.Multiply(biases[i], am); if (i < recurringWeights.Length && recurringWeights[i] != null) Utils.Multiply(recurringWeights[i], am); } } /// <summary> /// Save AdaGrad memory to stream. /// </summary> /// <param name="s"></param> public void Save(Stream s) { for (int i = 0; i < weights.Length; i++) { float[] f = weights[i]; int k = f.Length; while (k-- > 0) { s.Write(Utils.FloatToBytes(f[k]), 0, 4); } f = biases[i]; k = f.Length; while (k-- > 0) { s.Write(Utils.FloatToBytes(f[k]), 0, 4); } if (i < recurringWeights.Length && recurringWeights[i] != null) { f = recurringWeights[i]; k = f.Length; while (k-- > 0) { s.Write(Utils.FloatToBytes(f[k]), 0, 4); } } } } /// <summary> /// Load AdaGrad memory from stream. /// </summary> /// <param name="s"></param> public void Load(Stream s) { byte[] b = new byte[4]; for (int i = 0; i < weights.Length; i++) { float[] f = weights[i]; int k = f.Length; while (k-- > 0) { s.Read(b, 0, 4); f[k] = Utils.FloatFromBytes(b); } f = biases[i]; k = f.Length; while (k-- > 0) { s.Read(b, 0, 4); f[k] = Utils.FloatFromBytes(b); } if (i < recurringWeights.Length && recurringWeights[i] != null) { f = recurringWeights[i]; k = f.Length; while (k-- > 0) { s.Read(b, 0, 4); f[k] = Utils.FloatFromBytes(b); } } } } /// <summary> /// Add derivatives to learning rate and apply to network weights/biases /// </summary> /// <param name="derivMem"></param> public void Apply(NeuralNetworkPropagationState derivMem) { Apply(derivMem, derivMem.weights, derivMem.biases, derivMem.recurrWeights); } /// <summary> /// Add derivatives to learning rate and apply to network weights/biases. /// </summary> /// <param name="derivMem"></param> /// <param name="weight"></param> /// <param name="bias"></param> /// <param name="recurrWeight"></param> public void Apply(NeuralNetworkPropagationState derivMem, float[][] weight, float[][] bias, float[][] recurrWeight) { for (int i = 0; i < weights.Length; i++) { float[] t = weights[i], f = derivMem.weightMems[i], w = weight[i]; int k = f.Length; while (k-- > 0) { float m = t[k], d = f[k]; if (d < -EXPLODING_GRADIENT_CLAMP) d = -EXPLODING_GRADIENT_CLAMP; else if (d > EXPLODING_GRADIENT_CLAMP) d = EXPLODING_GRADIENT_CLAMP; m += d * d; w[k] -= (learningRate * d) / (float)Math.Sqrt(m + SQRT_EPSILON); t[k] = m; } t = biases[i]; f = derivMem.biasMems[i]; w = bias[i]; k = f.Length; while (k-- > 0) { float m = t[k], d = f[k]; if (d < -EXPLODING_GRADIENT_CLAMP) { d = -EXPLODING_GRADIENT_CLAMP; } else { if (d > EXPLODING_GRADIENT_CLAMP) { d = EXPLODING_GRADIENT_CLAMP; } } m += d * d; w[k] -= (learningRate * d) / (float)Math.Sqrt(m + SQRT_EPSILON); t[k] = m; } t = i < recurringWeights.Length ? recurringWeights[i] : null; if (t != null) { f = derivMem.recurrWeightMems[i]; w = recurrWeight[i]; k = f.Length; while (k-- > 0) { float m = t[k], d = f[k]; if (d < -EXPLODING_GRADIENT_CLAMP) { d = -EXPLODING_GRADIENT_CLAMP; } else { if (d > EXPLODING_GRADIENT_CLAMP) { d = EXPLODING_GRADIENT_CLAMP; } } m += d * d; w[k] -= (learningRate * d) / (float)Math.Sqrt(m + SQRT_EPSILON); t[k] = m; } } } } /// <summary> /// Add derivatives without per-parameter learning rate. /// </summary> /// <param name="derivMem"></param> /// <param name="weight"></param> /// <param name="bias"></param> /// <param name="recurrWeight"></param> public static void ApplyNoMemory(NeuralNetworkPropagationState derivMem, float[][] weight, float[][] bias, float[][] recurrWeight, float learningRate) { for (int i = 0; i < weight.Length; i++) { float[] f = derivMem.weightMems[i], w = weight[i]; int k = f.Length; while (k-- > 0) { float d = f[k]; if (d < -EXPLODING_GRADIENT_CLAMP) d = -EXPLODING_GRADIENT_CLAMP; else if (d > EXPLODING_GRADIENT_CLAMP) d = EXPLODING_GRADIENT_CLAMP; w[k] -= (learningRate * d); } f = derivMem.biasMems[i]; w = bias[i]; k = f.Length; while (k-- > 0) { float d = f[k]; if (d < -EXPLODING_GRADIENT_CLAMP) { d = -EXPLODING_GRADIENT_CLAMP; } else { if (d > EXPLODING_GRADIENT_CLAMP) { d = EXPLODING_GRADIENT_CLAMP; } } w[k] -= (learningRate * d); } if (recurrWeight[i] != null) { f = derivMem.recurrWeightMems[i]; w = recurrWeight[i]; k = f.Length; while (k-- > 0) { float d = f[k]; if (d < -EXPLODING_GRADIENT_CLAMP) { d = -EXPLODING_GRADIENT_CLAMP; } else { if (d > EXPLODING_GRADIENT_CLAMP) { d = EXPLODING_GRADIENT_CLAMP; } } w[k] -= (learningRate * d); } } } } } }
namespace SharpArch.Features.NHibernate { using System.Collections.Generic; using System.Reflection; using global::NHibernate; using global::NHibernate.Criterion; using SharpArch.Domain; using SharpArch.Domain.DomainModel; using SharpArch.Domain.PersistenceSupport; using SharpArch.Features.NHibernate.Contracts.Repositories; /// <summary> /// Provides a fully loaded DAO which may be created in a few ways including: /// * Direct instantiation; e.g., new GenericDao<Customer, string> /// * Spring configuration; e.g., <object id = "CustomerDao" type = "SharpArch.Data.NHibernateSupport.GenericDao&lt;CustomerAlias, string>, SharpArch.Data" autowire = "byName" /> /// </summary> public class NHibernateRepositoryWithTypedId<T, TId> : INHibernateRepositoryWithTypedId<T, TId> { #region Constants and Fields private IDbContext dbContext; #endregion #region Properties public virtual IDbContext DbContext { get { if (this.dbContext == null) { string factoryKey = SessionFactoryAttribute.GetKeyFrom(this); this.dbContext = new DbContext(factoryKey); } return this.dbContext; } } protected virtual ISession Session { get { string factoryKey = SessionFactoryAttribute.GetKeyFrom(this); return NHibernateSession.CurrentFor(factoryKey); } } #endregion #region Implemented Interfaces #region INHibernateRepositoryWithTypedId<T,TId> public virtual void Evict(T entity) { this.Session.Evict(entity); } public virtual IList<T> FindAll(T exampleInstance, params string[] propertiesToExclude) { ICriteria criteria = this.Session.CreateCriteria(typeof(T)); Example example = Example.Create(exampleInstance); foreach (string propertyToExclude in propertiesToExclude) { example.ExcludeProperty(propertyToExclude); } criteria.Add(example); return criteria.List<T>(); } public virtual IList<T> FindAll(IDictionary<string, object> propertyValuePairs) { Check.Require(propertyValuePairs != null && propertyValuePairs.Count > 0, "propertyValuePairs was null or empty; " + "it has to have at least one property/value pair in it"); ICriteria criteria = this.Session.CreateCriteria(typeof(T)); foreach (string key in propertyValuePairs.Keys) { criteria.Add(propertyValuePairs[key] != null ? Restrictions.Eq(key, propertyValuePairs[key]) : Restrictions.IsNull(key)); } return criteria.List<T>(); } public virtual T FindOne(T exampleInstance, params string[] propertiesToExclude) { IList<T> foundList = this.FindAll(exampleInstance, propertiesToExclude); if (foundList.Count > 1) { throw new NonUniqueResultException(foundList.Count); } if (foundList.Count == 1) { return foundList[0]; } return default(T); } public virtual T FindOne(IDictionary<string, object> propertyValuePairs) { IList<T> foundList = this.FindAll(propertyValuePairs); if (foundList.Count > 1) { throw new NonUniqueResultException(foundList.Count); } if (foundList.Count == 1) { return foundList[0]; } return default(T); } public virtual T Get(TId id, Enums.LockMode lockMode) { return this.Session.Get<T>(id, ConvertFrom(lockMode)); } public virtual T Load(TId id) { return this.Session.Load<T>(id); } public virtual T Load(TId id, Enums.LockMode lockMode) { return this.Session.Load<T>(id, ConvertFrom(lockMode)); } public virtual T Save(T entity) { this.Session.Save(entity); return entity; } public virtual T Update(T entity) { this.Session.Update(entity); return entity; } #endregion #region IRepositoryWithTypedId<T,TId> public virtual void Delete(T entity) { this.Session.Delete(entity); } public virtual T Get(TId id) { return this.Session.Get<T>(id); } public virtual IList<T> GetAll() { ICriteria criteria = this.Session.CreateCriteria(typeof(T)); return criteria.List<T>(); } /// <summary> /// Although SaveOrUpdate _can_ be invoked to update an object with an assigned Id, you are /// hereby forced instead to use Save/Update for better clarity. /// </summary> public virtual T SaveOrUpdate(T entity) { Check.Require(!(entity is IHasAssignedId<TId>), "For better clarity and reliability, Entities with an assigned Id must call Save or Update"); this.Session.SaveOrUpdate(entity); return entity; } #endregion #endregion #region Methods /// <summary> /// Translates a domain layer lock mode into an NHibernate lock mode via reflection. This is /// provided to facilitate developing the domain layer without a direct dependency on the /// NHibernate assembly. /// </summary> private static LockMode ConvertFrom(Enums.LockMode lockMode) { FieldInfo translatedLockMode = typeof(LockMode).GetField(lockMode.ToString(), BindingFlags.Public | BindingFlags.Static); Check.Ensure(translatedLockMode != null, "The provided lock mode , '" + lockMode + ",' " + "could not be translated into an NHibernate.LockMode. This is probably because " + "NHibernate was updated and now has different lock modes which are out of synch " + "with the lock modes maintained in the domain layer."); return (LockMode)translatedLockMode.GetValue(null); } #endregion } }
/* Copyright 2006 - 2010 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Net; using System.Text; using System.Collections; using OpenSource.UPnP.AV; using System.Text.RegularExpressions; namespace OpenSource.UPnP.AV.CdsMetadata { /// <summary> /// Interface for determining whether a media object /// matches a set of comparison parameters. /// </summary> public interface IMediaComparer { /// <summary> /// Returns true if the object matches the comparer's criteria. /// </summary> /// <param name="mediaObject"></param> /// <returns></returns> bool IsMatch(IUPnPMedia mediaObject); } /// <summary> /// <para> /// </para> /// /// <para>The language /// has a formal Extended Backus-Naur Form (EBNF) description below. <b>Bold</b> /// faced items are literal and <i>italicized</i> items are defined terms. /// <list type="table"> /// <listheader><term>Defined Terms</term><description>Term Definition</description></listheader> /// <item> /// <term><i>searchexp</i></term> /// <description> /// <para><i>relexp</i> |</para> /// <para><i>searchexp</i> <i>logop</i> <i>searchexp</i> |</para> /// <para><b>(</b> <i>searchexp</i> <b>)</b></para> /// </description> /// </item> /// /// <item> /// <term><i>logop</i></term> /// <description> /// <b>and</b> | <b>or</b> /// </description> /// </item> /// /// <item> /// <term><i>relexp</i></term> /// <description> /// <para><i>property relop delim-value</i> |</para> /// <para><i>property stringop delim-value</i> |</para> /// <para><i>property existence-op true-false</i> |</para> /// <para><i>property derived-op delim-value</i></para> /// </description> /// </item> /// /// <item> /// <term><i>relop</i></term> /// <description> /// <b>=</b> | <b>!=</b> | <b>&lt;</b> | <b>&lt;=</b> | <b>&gt;</b> | <b>&gt;=</b> /// </description> /// </item> /// /// <item> /// <term><i>stringop</i></term> /// <description> /// <b>contains</b> | <b>doesNotContain</b> /// </description> /// </item> /// /// <item> /// <term><i>existence-op</i></term> /// <description> /// <b>exists</b> /// </description> /// </item> /// /// <item> /// <term><i>true-false</i></term> /// <description> /// <b>true</b> | <b>false</b> /// </description> /// </item> /// /// <item> /// <term><i>delim-value</i></term> /// <description> /// <i>dquote escape-value dquote</i> /// </description> /// </item> /// /// <item> /// <term><i>dquote</i></term> /// <description> /// <b>"</b> (remark: double quote character, UTF-8 code 0x22) /// </description> /// </item> /// /// <item> /// <term><i>delim-value</i></term> /// <description> /// <i>dquote escape-value dquote</i> /// </description> /// </item> /// /// <item> /// <term><i>property</i></term> /// <description> /// A property name as defined in the ContentDirectory specification. /// Property names that are standardized can be obtained from a /// <see cref="Tags"/> /// object, using the indexer [] operator with a value from the /// <see cref="CommonPropertyNames"/> /// enumeration. Metadata properties that are attributes of /// "item", "container", and "res" elements in ContentDirectory /// XML responses can be obtained as static strings from the /// <see cref="Tags.PropertyAttributes"/> /// class. /// </description> /// </item> /// /// <item> /// <term><i>escape-value</i></term> /// <description> /// A string properly escaped such that a backslash <b>\</b> character /// is represented as <b>\\</b> and a double-quote <b>"</b> character /// is represented as <b>\"</b>. /// </description> /// </item> /// </list> /// </para> /// </summary> public class MediaComparer : IMediaComparer { /// <summary> /// This indicates if the /// <see cref="IUPnPMedia"/> /// object matches the Boolean expression. /// </summary> /// <param name="entry">the media object that should be compared against the expression</param> /// <returns>true, indicates a match against the expression</returns> public virtual bool IsMatch(IUPnPMedia entry) { return this.EvaluateMedia(entry); } /// <summary> /// State machine enumeration for the next /// expected token in an expression. /// </summary> private enum RelExpStates { unknown, expectOp, expectValue } /// <summary> /// If true, then all comparisons are case insensitive. /// </summary> public bool IgnoreCase = true; /// <summary> /// This constructor takes a Boolean expression in infix format. /// Some examples /// are shown below. Assume <b>T</b> is an instance of the /// <see cref="Tags"/> /// class. /// <list type="table"> /// <item> /// T[CommonPropertyNames.title] contains "foo" /// <term> /// </term> /// <description> /// <see cref="MediaComparer.IsMatch"/> will return /// true on media objects that have <i>foo</i> in the title. /// </description> /// </item> /// /// <item> /// T[CommonPropertyNames.creator] doesNotContain "\"HappyBand\"" /// <term> /// </term> /// <description> /// <see cref="MediaComparer.IsMatch"/> will return /// true on media objects that do not have <i>"HappyBand"</i> in the title. /// </description> /// </item> /// /// <item> /// (T[CommonPropertyNames.Class] = "object.item.audioItem.musicTrack" and Tags.PropertyAttributes.res_size > "40") or (T[CommonPropertyNames.author] exists true) /// <term> /// </term> /// <description> /// <see cref="MediaComparer.IsMatch"/> will return /// true on media objects that are music tracks with at least one resource greater than 40 bytes /// OR on media objects that have a value set for the author metadata. /// </description> /// </item> /// </list> /// </summary> /// <param name="infix">The boolean infix expression conforming to the syntax and semantics of ContentDirectory's boolean query language.</param> /// <exception cref="OpenSource.UPnP.AV.CdsMetadata.Error_MalformedSearchCriteria"> /// Thrown if the infix expression has a syntax error. /// </exception> public MediaComparer (string infix) { string allstring = infix.Trim(); if ((allstring == "") || (allstring == "*")) { this.m_AlwaysMatch = true; return; } //Initialize an empty stack and empty result string variable. // Stack stack = new Stack(); m_Postfix = new Queue(); RelExpStates state = RelExpStates.unknown; TokenResult token; while (infix.Length > 0) { infix = infix.Trim(); token = GetToken(ref infix, state); switch (state) { case RelExpStates.unknown: if (token.TokenType == Tokens.PropertyName) { state = RelExpStates.expectOp; } break; case RelExpStates.expectOp: if (token.TokenType != Tokens.Operator) { throw new UPnPCustomException(402, "Invalid Args: Invalid operator " + token.Data); } state = RelExpStates.expectValue; break; case RelExpStates.expectValue: if (token.TokenType != Tokens.PropertyValue) { throw new UPnPCustomException(402, "Invalid Args: Unexpected value " + token.Data); } state = RelExpStates.unknown; break; } switch (token.TokenType) { case Tokens.Operator: if (token.OpToken == OperatorTokens.LParen) { //left paren // stack.Push(token); } else if (token.OpToken == OperatorTokens.RParen) { //right paren // TokenResult tr = new TokenResult(false); do { if (stack.Count > 0) { tr = (TokenResult) stack.Pop(); if (tr.OpToken != OperatorTokens.LParen) { m_Postfix.Enqueue(tr); } } else { throw new UPnPCustomException(402, "Invalid Args: Missing Left Parenthesis."); } } while (tr.OpToken != OperatorTokens.LParen); } else { //standard operator // if (token.OpToken == OperatorTokens.Invalid) { throw new Exception("bad code"); } while ( (stack.Count > 0) && ( ((TokenResult) stack.Peek()).Precedence >= token.Precedence) && ( ((TokenResult) stack.Peek()).OpToken != OperatorTokens.LParen) ) { // While stack is not empty && // top operator has higher or equal precedence... // pop operator and stuff into queue m_Postfix.Enqueue( stack.Pop() ); } stack.Push(token); } break; case Tokens.PropertyName: m_Postfix.Enqueue(token); TagExtractor te = new TagExtractor(token.Data); this.m_PE[token.Data] = te; break; case Tokens.PropertyValue: m_Postfix.Enqueue(token); break; } } // pop remaining items in stack and stuff into queue // while (stack.Count > 0) { TokenResult tr = (TokenResult) stack.Pop(); if (tr.OpToken != OperatorTokens.LParen) { m_Postfix.Enqueue( tr ); } } } /// <summary> /// Implements the core logic for evaluating a media object /// against an expression. /// String comparison methods are always case insensitive. /// </summary> /// <param name="media">the media to evaluate</param> /// <returns>true, if the media object is a match</returns> private bool EvaluateMedia (IUPnPMedia media) { if (this.m_AlwaysMatch == true) return true; // postfix expression is complete...now evaluate // Queue postfix = new Queue(this.m_Postfix); Stack stack = new Stack(); while (postfix.Count > 0) { if (((TokenResult)postfix.Peek()).TokenType == Tokens.PropertyName) { TokenResult lo = (TokenResult) postfix.Dequeue(); //left operand TokenResult ro = (TokenResult) postfix.Dequeue(); //right operand TokenResult op = (TokenResult) postfix.Dequeue(); //operator //evaluate sub-expression against media // bool exp = Evaluate(media, lo.Data, op.OpToken, ro.Data); stack.Push(exp); } else if (((TokenResult)postfix.Peek()).TokenType == Tokens.Operator) { TokenResult op = (TokenResult) postfix.Dequeue(); //logical operator bool ro = (bool) stack.Pop(); bool lo = (bool) stack.Pop(); bool result = false; if (op.OpToken == OperatorTokens.And) { result = (lo && ro); } else if (op.OpToken == OperatorTokens.Or) { result = (lo || ro); } else { throw new UPnPCustomException(402, "Invalid Args: Badly formed SearchCriteria."); } stack.Push(result); } else { throw new UPnPCustomException(402, "Invalid Args: Bad SearchCriteria."); } } return (bool) stack.Pop(); } /// <summary> /// Evaluates a subexpression of a whole expression. /// </summary> /// <param name="media">the media object with the metadata</param> /// <param name="prop">the property/attribute metadata to examine</param> /// <param name="op">the operator to use for examination</param> /// <param name="val">the value to compare against in string form</param> /// <returns></returns> /// <exception cref="OpenSource.UPnP.AV.CdsMetadata.Error_MalformedSearchCriteria"> /// Thrown when the expression provided at constructor time could not /// be used to evaluate the media because of syntax errors in /// the expression. /// </exception> private bool Evaluate(IUPnPMedia media, string prop, OperatorTokens op, string val) { bool result = false; TagExtractor te = (TagExtractor) this.m_PE[prop]; IList values = te.Extract(media); if (op == OperatorTokens.Exists) { bool testVal = (string.Compare(val, "true", true) == 0); result = ( ((values.Count > 0) && (testVal)) || ((values.Count == 0) && (testVal == false)) ); } else { foreach (object testVal in values) { int opCode = (int) op; if ((opCode >= (int) OperatorTokens.Equal) && (opCode <= (int) OperatorTokens.GreaterThanEqualTo)) { // Compare using a relational operator // try { int relResult = MetadataValueComparer.CompareTagValues(testVal, val, this.IgnoreCase); if (relResult == 0) { result = true; break; } } catch //(Exception e) { string opString = Enum.GetName(typeof(OperatorTokens), opCode); throw new Error_MalformedSearchCriteria("("+val+") "+opString+" "+testVal.ToString()+") could not occur."); } } else if (op == OperatorTokens.Contains) { string tv = testVal.ToString(); int pos = tv.IndexOf(val); result = (pos >= 0); } else if (op == OperatorTokens.DoesNotContain) { string tv = testVal.ToString(); int pos = tv.IndexOf(val); result = (pos < 0); } else if (op == OperatorTokens.DerivedFrom) { string tv = testVal.ToString(); result = tv.StartsWith(val); } } } return result; } /// <summary> /// Takes a portion of the expression and determines if the /// expression begins with an operator. If so, return /// the operator code. The operator token is removed /// from the expression. /// </summary> /// <param name="exp">infix expression</param> /// <param name="operators">the set of operators to search for</param> /// <returns></returns> private TokenResult SeachOperators (ref string exp, ICollection operators) { TokenResult token = new TokenResult(false); foreach (string op in operators) { if (exp.StartsWith(op)) { token = new TokenResult(op); exp = exp.Substring(op.Length); if (token.OpToken == OperatorTokens.Invalid) { throw new Exception("Bad code in GetToken()"); } break; } } return token; } /// <summary> /// Obtains the next token, given the current state and the expression. /// </summary> /// <param name="exp">the remaining portion of the infix expression to translate into a postfix expression</param> /// <param name="state">the state determines what types of tokens can be accepted.</param> /// <returns></returns> private TokenResult GetToken(ref string exp, RelExpStates state) { TokenResult token = new TokenResult(false); bool searchOps1 = false; bool searchOps2 = false; bool searchOps3 = false; bool parsePropName = false; bool parsePropValue = false; switch (state) { case RelExpStates.unknown: //look for everything searchOps1 = true; searchOps3 = true; parsePropName = true; break; case RelExpStates.expectOp: //look for relop, stringop, derived-op searchOps2 = true; break; case RelExpStates.expectValue: parsePropValue = true; break; } //remove any preceding whitespace chars exp = exp.Trim(); if (exp != "") { if ((searchOps1) && (token.TokenType == Tokens.Invalid)) { token = this.SeachOperators(ref exp, Operators1); } if ((searchOps2) && (token.TokenType == Tokens.Invalid)) { token = this.SeachOperators(ref exp, Operators2); } if ((searchOps3) && (token.TokenType == Tokens.Invalid)) { token = this.SeachOperators(ref exp, Operators3); } if (token.TokenType == Tokens.Invalid) { if (parsePropValue) { //it must be an operand... so parse the next whitespace delimited string or //extract the next string enclosed by quotes if ((exp.StartsWith("true")) || (exp.StartsWith("false"))) { // This is a value for an existence-op operation token = new TokenResult(exp, false); exp = exp.Substring(exp.Length); } else if (exp.StartsWith("\"")) { // This is a value operand that is delimited // by another double-quote int endQuote = 1; int escape = 0; bool endQuoteFound = false; // find the next end-quote that is not // an escaped end-quote. while (!endQuoteFound) { endQuote = exp.IndexOf("\"", endQuote); escape = exp.IndexOf("\\\"", escape); if ( (escape < 0) || (escape == endQuote - 1) || (escape > endQuote) ) { endQuoteFound = true; } } if (endQuote <= 0) { StringBuilder msg = new StringBuilder(exp.Length); msg.AppendFormat("Invalid Args: Unterminated end-quote in SearchCriteria, near \"{0}\"", exp); throw new UPnPCustomException(402, msg.ToString()); } string unescaped = exp.Substring(1, endQuote-1); string escaped = unescaped.Replace("\\\\", "\\").Replace("\\\"", "\""); token = new TokenResult(escaped, false); exp = exp.Substring(endQuote+1); } else { // Assume the CP provided a white-space delimited value without quotes //int endPos = exp.IndexOf(" "); int endPos = this.FindNextIndexOf(exp, WHITESPACESANDENDPAREN); string str = exp; if (endPos > 0) { str = exp.Substring(0, endPos); } token = new TokenResult(str, false); exp = exp.Substring(str.Length); } } else { // This is a property name, that is delimited by // a whitespace. //int endPos = exp.IndexOf(" "); int endPos = this.FindNextIndexOf(exp, WHITESPACES); if (endPos > 0) { string prop = exp.Substring(0, endPos); token = new TokenResult(prop, true); exp = exp.Substring(prop.Length); } else { throw new Error_MalformedSearchCriteria("Property name has not been properly delimited."); } } } } if ( (token.TokenType == Tokens.Invalid) ) { throw new UPnPCustomException(402, "Invalid Args: Invalid SearchCriteria string."); } return token; } private int FindNextIndexOf(string exp, char[] chars) { int pos = -1; foreach (char c in chars) { int p; if (pos > 0) { p = exp.IndexOf(c, 0, pos); } else if (pos != 0) { p = exp.IndexOf(c, 0); } else { break; } if (p >= 0) { if (pos == -1) { pos = p; } else { if (p < pos) { pos = p; } } } } return pos; } private static char[] WHITESPACES = { '\x09', '\x0A', '\x0B', '\x0C', '\x0D', '\x20', '\x22', '\x2A' }; private static char[] WHITESPACESANDENDPAREN = { '\x09', '\x0A', '\x0B', '\x0C', '\x0D', '\x20', '\x22', '\x2A', ')' }; /// <summary> /// An empty expression matches all items. /// </summary> private bool m_AlwaysMatch = false; /// <summary> /// Contains the postfix expression as a queue of tokens. /// </summary> private Queue m_Postfix; /// <summary> /// Hashtable with a mapping of property/attribute names as keys, /// to TagExtractor objects. /// </summary> private Hashtable m_PE = new Hashtable(); /// <summary> /// Enumerates the different types of tokens in the infix expression. /// </summary> private enum Tokens { Invalid, PropertyName, PropertyValue, Operator, } /// <summary> /// Set of operators with highest order are parenthetical ops. /// </summary> private static string[] Operators1 = { "(", ")" }; /// <summary> /// Second highest order of operators are relational, string, and existence operators /// </summary> private static string[] Operators2 = { "=", "!=", "<", "<=", ">", ">=", "contains", "doesNotContain", "exists", "derivedfrom"}; /// <summary> /// logical operators are the lowest order of operators. /// </summary> private static string[] Operators3 = { "and", "or" }; /// <summary> /// Complete set of operators. The order of these operators must match the order /// of each of the operator sets in OperatorsX. /// </summary> private static string[] Operators = { "(", ")", "=", "!=", "<", "<=", ">", ">=", "contains", "doesNotContain", "exists", "derivedfrom", "and", "or", }; /// <summary> /// Enumeration of the operators. The order of these operators matter, /// as the indices map directly into the Operator string array. /// </summary> private enum OperatorTokens { LParen = 0, RParen, Equal, NotEqual, LessThan, LessThanEqualTo, GreaterThan, GreaterThanEqualTo, Contains, DoesNotContain, Exists, DerivedFrom, And, Or, Invalid, } /// <summary> /// Used for token-delimited textparsing. /// </summary> private static DText DT = new DText(); /// <summary> /// Used for obtaining the attribute and tag names of standard metadata properties. /// </summary> private static Tags T = Tags.GetInstance(); /// <summary> /// Structure is used when extracting a token from the infix expression /// while translating into a postfix expression. /// </summary> private struct TokenResult { /// <summary> /// The type of token. /// </summary> public readonly Tokens TokenType; /// <summary> /// Data associated with the token. Used only when /// the token is a property name or value. /// </summary> public readonly string Data; /// <summary> /// The operator value, if the token is an operator. /// </summary> public readonly OperatorTokens OpToken; /// <summary> /// The precedence of the operator. 10,20,30. /// </summary> public readonly int Precedence; /// <summary> /// Default constructor - considered a parameterless constructor. /// </summary> /// <param name="ignored">This parameter is ignored.</param> public TokenResult(bool ignored) { TokenType = Tokens.Invalid; Data = ""; OpToken = OperatorTokens.Invalid; Precedence = -1; } /// <summary> /// Used when the token is a property name or property value. /// </summary> /// <param name="data">the string value of the propery name or value</param> /// <param name="dataIsProperty">true, if a property name</param> public TokenResult(string data, bool dataIsProperty) { Data = data.Trim(); if (dataIsProperty) { TokenType = Tokens.PropertyName; } else { TokenType = Tokens.PropertyValue; } OpToken = OperatorTokens.Invalid; Precedence = -1; } /// <summary> /// Used when the token is an operator. /// </summary> /// <param name="operatorString">the string representation of the operator</param> public TokenResult(string operatorString) { TokenType = Tokens.Operator; Data = ""; OpToken = OperatorTokens.Invalid; Precedence = -1; int i; i=0; foreach (string op in Operators1) { if (String.Compare(op, operatorString) == 0) { OpToken = (OperatorTokens) ((int) OperatorTokens.LParen + i) ; Precedence = 30; break; } i++; } if (OpToken == OperatorTokens.Invalid) { i=0; foreach (string op in Operators2) { if (String.Compare(op, operatorString) == 0) { OpToken = (OperatorTokens) ((int) OperatorTokens.Equal + i) ; Precedence = 20; break; } i++; } } if (OpToken == OperatorTokens.Invalid) { i=0; foreach (string op in Operators3) { if (String.Compare(op, operatorString) == 0) { OpToken = (OperatorTokens) ((int) OperatorTokens.And + i) ; Precedence = 10; break; } i++; } } } } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.IO; using System.Threading; using ManagedBass; using ManagedBass.Fx; using osu.Framework.IO; using System.Threading.Tasks; using osu.Framework.Audio.Callbacks; namespace osu.Framework.Audio.Track { public sealed class TrackBass : Track, IBassAudio { public const int BYTES_PER_SAMPLE = 4; private AsyncBufferStream dataStream; /// <summary> /// Should this track only be used for preview purposes? This suggests it has not yet been fully loaded. /// </summary> public bool Preview { get; private set; } /// <summary> /// The handle for this track, if there is one. /// </summary> private int activeStream; /// <summary> /// The handle for adjusting tempo. /// </summary> private int tempoAdjustStream; /// <summary> /// This marks if the track is paused, or stopped to the end. /// </summary> private bool isPlayed; private long byteLength; /// <summary> /// The last position that a seek will succeed for. /// </summary> private double lastSeekablePosition; private FileCallbacks fileCallbacks; private SyncCallback stopCallback; private SyncCallback endCallback; private volatile bool isLoaded; public override bool IsLoaded => isLoaded; /// <summary> /// Constructs a new <see cref="TrackBass"/> from provided audio data. /// </summary> /// <param name="data">The sample data stream.</param> /// <param name="quick">If true, the track will not be fully loaded, and should only be used for preview purposes. Defaults to false.</param> public TrackBass(Stream data, bool quick = false) { if (data == null) throw new ArgumentNullException(nameof(data)); // todo: support this internally to match the underlying Track implementation (which can support this). const float tempo_minimum_supported = 0.05f; AggregateTempo.ValueChanged += t => { if (t.NewValue < tempo_minimum_supported) throw new ArgumentException($"{nameof(TrackBass)} does not support {nameof(Tempo)} specifications below {tempo_minimum_supported}. Use {nameof(Frequency)} instead."); }; EnqueueAction(() => { Preview = quick; activeStream = prepareStream(data, quick); // will be -1 in case of an error double seconds = Bass.ChannelBytes2Seconds(activeStream, byteLength = Bass.ChannelGetLength(activeStream)); bool success = seconds >= 0; if (success) { Length = seconds * 1000; // Bass does not allow seeking to the end of the track, so the last available position is 1 sample before. lastSeekablePosition = Bass.ChannelBytes2Seconds(activeStream, byteLength - BYTES_PER_SAMPLE) * 1000; Bass.ChannelGetAttribute(activeStream, ChannelAttribute.Frequency, out float frequency); initialFrequency = frequency; bitrate = (int)Bass.ChannelGetAttribute(activeStream, ChannelAttribute.Bitrate); stopCallback = new SyncCallback((a, b, c, d) => RaiseFailed()); endCallback = new SyncCallback((a, b, c, d) => { if (!Looping) RaiseCompleted(); }); Bass.ChannelSetSync(activeStream, SyncFlags.Stop, 0, stopCallback.Callback, stopCallback.Handle); Bass.ChannelSetSync(activeStream, SyncFlags.End, 0, endCallback.Callback, endCallback.Handle); isLoaded = true; bassAmplitudeProcessor?.SetChannel(activeStream); } }); InvalidateState(); } private int prepareStream(Stream data, bool quick) { //encapsulate incoming stream with async buffer if it isn't already. dataStream = data as AsyncBufferStream ?? new AsyncBufferStream(data, quick ? 8 : -1); fileCallbacks = new FileCallbacks(new DataStreamFileProcedures(dataStream)); BassFlags flags = Preview ? 0 : BassFlags.Decode | BassFlags.Prescan; int stream = Bass.CreateStream(StreamSystem.NoBuffer, flags, fileCallbacks.Callbacks, fileCallbacks.Handle); if (!Preview) { // We assign the BassFlags.Decode streams to the device "bass_nodevice" to prevent them from getting // cleaned up during a Bass.Free call. This is necessary for seamless switching between audio devices. // Further, we provide the flag BassFlags.FxFreeSource such that freeing the stream also frees // all parent decoding streams. const int bass_nodevice = 0x20000; Bass.ChannelSetDevice(stream, bass_nodevice); tempoAdjustStream = BassFx.TempoCreate(stream, BassFlags.Decode | BassFlags.FxFreeSource); Bass.ChannelSetDevice(tempoAdjustStream, bass_nodevice); stream = BassFx.ReverseCreate(tempoAdjustStream, 5f, BassFlags.Default | BassFlags.FxFreeSource); Bass.ChannelSetAttribute(stream, ChannelAttribute.TempoUseQuickAlgorithm, 1); Bass.ChannelSetAttribute(stream, ChannelAttribute.TempoOverlapMilliseconds, 4); Bass.ChannelSetAttribute(stream, ChannelAttribute.TempoSequenceMilliseconds, 30); } return stream; } /// <summary> /// Returns whether the playback state is considered to be running or not. /// This will only return true for <see cref="PlaybackState.Playing"/> and <see cref="PlaybackState.Stalled"/>. /// </summary> private static bool isRunningState(PlaybackState state) => state == PlaybackState.Playing || state == PlaybackState.Stalled; void IBassAudio.UpdateDevice(int deviceIndex) { Bass.ChannelSetDevice(activeStream, deviceIndex); BassUtils.CheckFaulted(true); // Bass may leave us in an invalid state after the output device changes (this is true for "No sound" device) // if the observed state was playing before change, we should force things into a good state. if (isPlayed) { // While on windows, changing to "No sound" changes the playback state correctly, // on macOS it is left in a playing-but-stalled state. Forcefully stopping first fixes this. stopInternal(); startInternal(); } } private BassAmplitudeProcessor bassAmplitudeProcessor; protected override void UpdateState() { var running = isRunningState(Bass.ChannelIsActive(activeStream)); var bytePosition = Bass.ChannelGetPosition(activeStream); // because device validity check isn't done frequently, when switching to "No sound" device, // there will be a brief time where this track will be stopped, before we resume it manually (see comments in UpdateDevice(int).) // this makes us appear to be playing, even if we may not be. isRunning = running || (isPlayed && bytePosition != byteLength); Interlocked.Exchange(ref currentTime, Bass.ChannelBytes2Seconds(activeStream, bytePosition) * 1000); bassAmplitudeProcessor?.Update(); base.UpdateState(); } protected override void Dispose(bool disposing) { if (activeStream != 0) { isRunning = false; Bass.ChannelStop(activeStream); Bass.StreamFree(activeStream); } activeStream = 0; dataStream?.Dispose(); dataStream = null; fileCallbacks?.Dispose(); fileCallbacks = null; stopCallback?.Dispose(); stopCallback = null; endCallback?.Dispose(); endCallback = null; base.Dispose(disposing); } public override bool IsDummyDevice => false; public override void Stop() { base.Stop(); StopAsync().Wait(); } public Task StopAsync() => EnqueueAction(() => { stopInternal(); isPlayed = false; }); private bool stopInternal() => isRunningState(Bass.ChannelIsActive(activeStream)) && Bass.ChannelPause(activeStream); private int direction; private void setDirection(bool reverse) { direction = reverse ? -1 : 1; Bass.ChannelSetAttribute(activeStream, ChannelAttribute.ReverseDirection, direction); } public override void Start() { base.Start(); StartAsync().Wait(); } public Task StartAsync() => EnqueueAction(() => { if (startInternal()) isPlayed = true; }); private bool startInternal() { // Bass will restart the track if it has reached its end. This behavior isn't desirable so block locally. if (Bass.ChannelGetPosition(activeStream) == byteLength) return false; return Bass.ChannelPlay(activeStream); } public override bool Seek(double seek) => SeekAsync(seek).Result; public async Task<bool> SeekAsync(double seek) { // At this point the track may not yet be loaded which is indicated by a 0 length. // In that case we still want to return true, hence the conservative length. double conservativeLength = Length == 0 ? double.MaxValue : lastSeekablePosition; double conservativeClamped = Math.Clamp(seek, 0, conservativeLength); await EnqueueAction(() => { double clamped = Math.Clamp(seek, 0, Length); long pos = Bass.ChannelSeconds2Bytes(activeStream, clamped / 1000d); if (pos != Bass.ChannelGetPosition(activeStream)) Bass.ChannelSetPosition(activeStream, pos); }); return conservativeClamped == seek; } private double currentTime; public override double CurrentTime => currentTime; private volatile bool isRunning; public override bool IsRunning => isRunning; internal override void OnStateChanged() { base.OnStateChanged(); setDirection(AggregateFrequency.Value < 0); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.Volume, AggregateVolume.Value); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.Pan, AggregateBalance.Value); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.Frequency, bassFreq); Bass.ChannelSetAttribute(tempoAdjustStream, ChannelAttribute.Tempo, (Math.Abs(AggregateTempo.Value) - 1) * 100); } private volatile float initialFrequency; private int bassFreq => (int)Math.Clamp(Math.Abs(initialFrequency * AggregateFrequency.Value), 100, 100000); private volatile int bitrate; public override int? Bitrate => bitrate; public override ChannelAmplitudes CurrentAmplitudes => (bassAmplitudeProcessor ??= new BassAmplitudeProcessor(activeStream)).CurrentAmplitudes; } }
using System; using System.Collections.Generic; using System.Data.SqlClient; using System.Data; using VotingInfo.Database.Contracts; using VotingInfo.Database.Contracts.Data; /////////////////////////////////////////////////////////// //Do not modify this file. Use a partial class to extend.// /////////////////////////////////////////////////////////// // This file contains static implementations of the ElectionCandidateLogic // Add your own static methods by making a new partial class. // You cannot override static methods, instead override the methods // located in ElectionCandidateLogicBase by making a partial class of ElectionCandidateLogic // and overriding the base methods. namespace VotingInfo.Database.Logic.Data { public partial class ElectionCandidateLogic { //Put your code in a separate file. This is auto generated. /// <summary> /// Run ElectionCandidate_Insert. /// </summary> /// <param name="fldContentInspectionId">Value for ContentInspectionId</param> /// <param name="fldCandidateId">Value for CandidateId</param> /// <param name="fldElectionId">Value for ElectionId</param> /// <param name="fldIsWinner">Value for IsWinner</param> /// <param name="fldReportedVoteCount">Value for ReportedVoteCount</param> public static int? InsertNow(int fldContentInspectionId , int fldCandidateId , int fldElectionId , bool fldIsWinner , bool fldReportedVoteCount ) { return (new ElectionCandidateLogic()).Insert(fldContentInspectionId , fldCandidateId , fldElectionId , fldIsWinner , fldReportedVoteCount ); } /// <summary> /// Run ElectionCandidate_Insert. /// </summary> /// <param name="fldContentInspectionId">Value for ContentInspectionId</param> /// <param name="fldCandidateId">Value for CandidateId</param> /// <param name="fldElectionId">Value for ElectionId</param> /// <param name="fldIsWinner">Value for IsWinner</param> /// <param name="fldReportedVoteCount">Value for ReportedVoteCount</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> public static int? InsertNow(int fldContentInspectionId , int fldCandidateId , int fldElectionId , bool fldIsWinner , bool fldReportedVoteCount , SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).Insert(fldContentInspectionId , fldCandidateId , fldElectionId , fldIsWinner , fldReportedVoteCount , connection, transaction); } /// <summary> /// Insert by providing a populated data row container /// </summary> /// <param name="row">The table row data to use</param> /// <returns>The number of rows affected.</returns> public static int InsertNow(ElectionCandidateContract row) { return (new ElectionCandidateLogic()).Insert(row); } /// <summary> /// Insert by providing a populated data contract /// </summary> /// <param name="row">The table row data to use</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int InsertNow(ElectionCandidateContract row, SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).Insert(row, connection, transaction); } /// <summary> /// Insert the rows in bulk, uses the same connection (faster). /// </summary> /// <param name="rows">The table rows to Insert</param> /// <returns>The number of rows affected.</returns> public static int InsertAllNow(List<ElectionCandidateContract> rows) { return (new ElectionCandidateLogic()).InsertAll(rows); } /// <summary> /// Insert the rows in bulk, uses the same connection (faster), in a provided transaction scrope. /// </summary> /// <param name="rows">The table rows to Insert</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int InsertAllNow(List<ElectionCandidateContract> rows, SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).InsertAll(rows, connection, transaction); } /// <summary> /// Run ElectionCandidate_Update. /// </summary> /// <param name="fldElectionCandidateId">Value for ElectionCandidateId</param> /// <param name="fldContentInspectionId">Value for ContentInspectionId</param> /// <param name="fldCandidateId">Value for CandidateId</param> /// <param name="fldElectionId">Value for ElectionId</param> /// <param name="fldIsWinner">Value for IsWinner</param> /// <param name="fldReportedVoteCount">Value for ReportedVoteCount</param> /// <returns>The number of rows affected.</returns> public static int UpdateNow(int fldElectionCandidateId , int fldContentInspectionId , int fldCandidateId , int fldElectionId , bool fldIsWinner , bool fldReportedVoteCount ) { return (new ElectionCandidateLogic()).Update(fldElectionCandidateId , fldContentInspectionId , fldCandidateId , fldElectionId , fldIsWinner , fldReportedVoteCount ); } /// <summary> /// Run ElectionCandidate_Update. /// </summary> /// <param name="fldElectionCandidateId">Value for ElectionCandidateId</param> /// <param name="fldContentInspectionId">Value for ContentInspectionId</param> /// <param name="fldCandidateId">Value for CandidateId</param> /// <param name="fldElectionId">Value for ElectionId</param> /// <param name="fldIsWinner">Value for IsWinner</param> /// <param name="fldReportedVoteCount">Value for ReportedVoteCount</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int UpdateNow(int fldElectionCandidateId , int fldContentInspectionId , int fldCandidateId , int fldElectionId , bool fldIsWinner , bool fldReportedVoteCount , SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).Update(fldElectionCandidateId , fldContentInspectionId , fldCandidateId , fldElectionId , fldIsWinner , fldReportedVoteCount , connection, transaction); } /// <summary> /// Update by providing a populated data row container /// </summary> /// <param name="row">The table row data to use</param> /// <returns>The number of rows affected.</returns> public static int UpdateNow(ElectionCandidateContract row) { return (new ElectionCandidateLogic()).Update(row); } /// <summary> /// Update by providing a populated data contract /// </summary> /// <param name="row">The table row data to use</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int UpdateNow(ElectionCandidateContract row, SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).Update(row, connection, transaction); } /// <summary> /// Update the rows in bulk, uses the same connection (faster). /// </summary> /// <param name="rows">The table rows to Update</param> /// <returns>The number of rows affected.</returns> public static int UpdateAllNow(List<ElectionCandidateContract> rows) { return (new ElectionCandidateLogic()).UpdateAll(rows); } /// <summary> /// Update the rows in bulk, uses the same connection (faster), in a provided transaction scrope. /// </summary> /// <param name="rows">The table rows to Update</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int UpdateAllNow(List<ElectionCandidateContract> rows, SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).UpdateAll(rows, connection, transaction); } /// <summary> /// Run ElectionCandidate_Delete. /// </summary> /// <param name="fldElectionCandidateId">Value for ElectionCandidateId</param> /// <returns>The number of rows affected.</returns> public static int DeleteNow(int fldElectionCandidateId ) { return (new ElectionCandidateLogic()).Delete(fldElectionCandidateId ); } /// <summary> /// Run ElectionCandidate_Delete. /// </summary> /// <param name="fldElectionCandidateId">Value for ElectionCandidateId</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int DeleteNow(int fldElectionCandidateId , SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).Delete(fldElectionCandidateId , connection, transaction); } /// <summary> /// Delete by providing a populated data row container /// </summary> /// <param name="row">The table row data to use</param> /// <returns>The number of rows affected.</returns> public static int DeleteNow(ElectionCandidateContract row) { return (new ElectionCandidateLogic()).Delete(row); } /// <summary> /// Delete by providing a populated data contract /// </summary> /// <param name="row">The table row data to use</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int DeleteNow(ElectionCandidateContract row, SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).Delete(row, connection, transaction); } /// <summary> /// Delete the rows in bulk, uses the same connection (faster). /// </summary> /// <param name="rows">The table rows to Delete</param> /// <returns>The number of rows affected.</returns> public static int DeleteAllNow(List<ElectionCandidateContract> rows) { return (new ElectionCandidateLogic()).DeleteAll(rows); } /// <summary> /// Delete the rows in bulk, uses the same connection (faster), in a provided transaction scrope. /// </summary> /// <param name="rows">The table rows to Delete</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int DeleteAllNow(List<ElectionCandidateContract> rows, SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).DeleteAll(rows, connection, transaction); } /// <summary> /// Determine if the table contains a row with the existing values /// </summary> /// <param name="fldElectionCandidateId">Value for ElectionCandidateId</param> /// <returns>True, if the values exist, or false.</returns> public static bool ExistsNow(int fldElectionCandidateId ) { return (new ElectionCandidateLogic()).Exists(fldElectionCandidateId ); } /// <summary> /// Determine if the table contains a row with the existing values /// </summary> /// <param name="fldElectionCandidateId">Value for ElectionCandidateId</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>True, if the values exist, or false.</returns> public static bool ExistsNow(int fldElectionCandidateId , SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).Exists(fldElectionCandidateId , connection, transaction); } /// <summary> /// Run ElectionCandidate_SelectAll, and return results as a list of ElectionCandidateRow. /// </summary> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectAllNow() { var driver = new ElectionCandidateLogic(); driver.SelectAll(); return driver.Results; } /// <summary> /// Run ElectionCandidate_SelectAll, and return results as a list of ElectionCandidateRow. /// </summary> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectAllNow(SqlConnection connection, SqlTransaction transaction) { var driver = new ElectionCandidateLogic(); driver.SelectAll(connection, transaction); return driver.Results; } /// <summary> /// Run ElectionCandidate_SelectBy_ElectionCandidateId, and return results as a list of ElectionCandidateRow. /// </summary> /// <param name="fldElectionCandidateId">Value for ElectionCandidateId</param> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectBy_ElectionCandidateIdNow(int fldElectionCandidateId ) { var driver = new ElectionCandidateLogic(); driver.SelectBy_ElectionCandidateId(fldElectionCandidateId ); return driver.Results; } /// <summary> /// Run ElectionCandidate_SelectBy_ElectionCandidateId, and return results as a list of ElectionCandidateRow. /// </summary> /// <param name="fldElectionCandidateId">Value for ElectionCandidateId</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectBy_ElectionCandidateIdNow(int fldElectionCandidateId , SqlConnection connection, SqlTransaction transaction) { var driver = new ElectionCandidateLogic(); driver.SelectBy_ElectionCandidateId(fldElectionCandidateId , connection, transaction); return driver.Results; } /// <summary> /// Run ElectionCandidate_SelectBy_ContentInspectionId, and return results as a list of ElectionCandidateRow. /// </summary> /// <param name="fldContentInspectionId">Value for ContentInspectionId</param> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectBy_ContentInspectionIdNow(int fldContentInspectionId ) { var driver = new ElectionCandidateLogic(); driver.SelectBy_ContentInspectionId(fldContentInspectionId ); return driver.Results; } /// <summary> /// Run ElectionCandidate_SelectBy_ContentInspectionId, and return results as a list of ElectionCandidateRow. /// </summary> /// <param name="fldContentInspectionId">Value for ContentInspectionId</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectBy_ContentInspectionIdNow(int fldContentInspectionId , SqlConnection connection, SqlTransaction transaction) { var driver = new ElectionCandidateLogic(); driver.SelectBy_ContentInspectionId(fldContentInspectionId , connection, transaction); return driver.Results; } /// <summary> /// Run ElectionCandidate_SelectBy_CandidateId, and return results as a list of ElectionCandidateRow. /// </summary> /// <param name="fldCandidateId">Value for CandidateId</param> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectBy_CandidateIdNow(int fldCandidateId ) { var driver = new ElectionCandidateLogic(); driver.SelectBy_CandidateId(fldCandidateId ); return driver.Results; } /// <summary> /// Run ElectionCandidate_SelectBy_CandidateId, and return results as a list of ElectionCandidateRow. /// </summary> /// <param name="fldCandidateId">Value for CandidateId</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectBy_CandidateIdNow(int fldCandidateId , SqlConnection connection, SqlTransaction transaction) { var driver = new ElectionCandidateLogic(); driver.SelectBy_CandidateId(fldCandidateId , connection, transaction); return driver.Results; } /// <summary> /// Run ElectionCandidate_SelectBy_ElectionId, and return results as a list of ElectionCandidateRow. /// </summary> /// <param name="fldElectionId">Value for ElectionId</param> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectBy_ElectionIdNow(int fldElectionId ) { var driver = new ElectionCandidateLogic(); driver.SelectBy_ElectionId(fldElectionId ); return driver.Results; } /// <summary> /// Run ElectionCandidate_SelectBy_ElectionId, and return results as a list of ElectionCandidateRow. /// </summary> /// <param name="fldElectionId">Value for ElectionId</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>A collection of ElectionCandidateRow.</returns> public static List<ElectionCandidateContract> SelectBy_ElectionIdNow(int fldElectionId , SqlConnection connection, SqlTransaction transaction) { var driver = new ElectionCandidateLogic(); driver.SelectBy_ElectionId(fldElectionId , connection, transaction); return driver.Results; } /// <summary> /// Read all ElectionCandidate rows from the provided reader into the list structure of ElectionCandidateRows /// </summary> /// <param name="reader">The result of running a sql command.</param> /// <returns>A populated ElectionCandidateRows or an empty ElectionCandidateRows if there are no results.</returns> public static List<ElectionCandidateContract> ReadAllNow(SqlDataReader reader) { var driver = new ElectionCandidateLogic(); driver.ReadAll(reader); return driver.Results; } /// <summary>"); /// Advance one, and read values into a ElectionCandidate /// </summary> /// <param name="reader">The result of running a sql command.</param>"); /// <returns>A ElectionCandidate or null if there are no results.</returns> public static ElectionCandidateContract ReadOneNow(SqlDataReader reader) { var driver = new ElectionCandidateLogic(); return driver.ReadOne(reader) ? driver.Results[0] : null; } /// <summary> /// Saves the row, either by inserting (when the identity key is null) or by updating (identity key has value). /// </summary> /// <param name="row">The data to save</param> /// <returns>The number of rows affected.</returns> public static int SaveNow(ElectionCandidateContract row) { if(row.ElectionCandidateId == null) { return InsertNow(row); } else { return UpdateNow(row); } } /// <summary> /// Saves the row, either by inserting (when the identity key is null) or by updating (identity key has value). /// </summary> /// <param name="row">The data to save</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int SaveNow(ElectionCandidateContract row, SqlConnection connection, SqlTransaction transaction) { if(row.ElectionCandidateId == null) { return InsertNow(row, connection, transaction); } else { return UpdateNow(row, connection, transaction); } } /// <summary> /// Save the rows in bulk, uses the same connection (faster). /// </summary> /// <param name="rows">The table rows to Save</param> /// <returns>The number of rows affected.</returns> public static int SaveAllNow(List<ElectionCandidateContract> rows) { return (new ElectionCandidateLogic()).SaveAll(rows); } /// <summary> /// Save the rows in bulk, uses the same connection (faster), in a provided transaction scrope. /// </summary> /// <param name="rows">The table rows to Save</param> /// <param name="connection">The SqlConnection to use</param> /// <param name="transaction">The SqlTransaction to use</param> /// <returns>The number of rows affected.</returns> public static int SaveAllNow(List<ElectionCandidateContract> rows, SqlConnection connection, SqlTransaction transaction) { return (new ElectionCandidateLogic()).SaveAll(rows, connection, transaction); } } }
using System; using System.IO; using System.Linq; using GitVersion.BuildAgents; using GitVersion.Common; using GitVersion.Extensions; using GitVersion.Helpers; using GitVersion.Logging; using GitVersion.Model.Configuration; using Microsoft.Extensions.Options; namespace GitVersion { public class GitPreparer : IGitPreparer { private readonly ILog log; private readonly IEnvironment environment; private readonly IMutatingGitRepository repository; private readonly IOptions<GitVersionOptions> options; private readonly IGitRepositoryInfo repositoryInfo; private readonly IRepositoryStore repositoryStore; private readonly ICurrentBuildAgent buildAgent; private RetryAction<LockedFileException> retryAction; private const string DefaultRemoteName = "origin"; public GitPreparer(ILog log, IEnvironment environment, ICurrentBuildAgent buildAgent, IOptions<GitVersionOptions> options, IMutatingGitRepository repository, IGitRepositoryInfo repositoryInfo, IRepositoryStore repositoryStore) { this.log = log ?? throw new ArgumentNullException(nameof(log)); this.environment = environment ?? throw new ArgumentNullException(nameof(environment)); this.repository = repository ?? throw new ArgumentNullException(nameof(repository)); this.options = options ?? throw new ArgumentNullException(nameof(options)); this.repositoryInfo = repositoryInfo ?? throw new ArgumentNullException(nameof(repositoryInfo)); this.repositoryStore = repositoryStore ?? throw new ArgumentNullException(nameof(repositoryStore)); this.buildAgent = buildAgent; retryAction = new RetryAction<LockedFileException>(); } public void Prepare() { var gitVersionOptions = options.Value; // Normalize if we are running on build server var normalizeGitDirectory = !gitVersionOptions.Settings.NoNormalize && buildAgent != null; var shouldCleanUpRemotes = buildAgent != null && buildAgent.ShouldCleanUpRemotes(); var currentBranch = ResolveCurrentBranch(); var dotGitDirectory = repositoryInfo.DotGitDirectory; var projectRoot = repositoryInfo.ProjectRootDirectory; log.Info($"Project root is: {projectRoot}"); log.Info($"DotGit directory is: {dotGitDirectory}"); if (string.IsNullOrEmpty(dotGitDirectory) || string.IsNullOrEmpty(projectRoot)) { throw new Exception($"Failed to prepare or find the .git directory in path '{gitVersionOptions.WorkingDirectory}'."); } PrepareInternal(normalizeGitDirectory, currentBranch, shouldCleanUpRemotes); } private void PrepareInternal(bool normalizeGitDirectory, string currentBranch, bool shouldCleanUpRemotes = false) { var gitVersionOptions = options.Value; if (!string.IsNullOrWhiteSpace(gitVersionOptions.RepositoryInfo.TargetUrl)) { CreateDynamicRepository(currentBranch); } else { if (!normalizeGitDirectory) return; if (shouldCleanUpRemotes) { CleanupDuplicateOrigin(); } NormalizeGitDirectory(currentBranch, false); } } private string ResolveCurrentBranch() { var gitVersionOptions = options.Value; var targetBranch = gitVersionOptions.RepositoryInfo.TargetBranch; if (buildAgent == null) { return targetBranch; } var isDynamicRepository = !string.IsNullOrWhiteSpace(gitVersionOptions.RepositoryInfo.DynamicRepositoryClonePath); var currentBranch = buildAgent.GetCurrentBranch(isDynamicRepository) ?? targetBranch; log.Info("Branch from build environment: " + currentBranch); return currentBranch; } private void CleanupDuplicateOrigin() { var remoteToKeep = DefaultRemoteName; // check that we have a remote that matches defaultRemoteName if not take the first remote if (!repository.Remotes.Any(remote => remote.Name.Equals(DefaultRemoteName, StringComparison.InvariantCultureIgnoreCase))) { remoteToKeep = repository.Remotes.First().Name; } var duplicateRemotes = repository.Remotes .Where(remote => !remote.Name.Equals(remoteToKeep, StringComparison.InvariantCultureIgnoreCase)) .Select(remote => remote.Name); // remove all remotes that are considered duplicates foreach (var remoteName in duplicateRemotes) { repository.Remotes.Remove(remoteName); } } private void CreateDynamicRepository(string targetBranch) { var gitVersionOptions = options.Value; if (string.IsNullOrWhiteSpace(targetBranch)) { throw new Exception("Dynamic Git repositories must have a target branch (/b)"); } var gitDirectory = repositoryInfo.DynamicGitRepositoryPath; using (log.IndentLog($"Creating dynamic repository at '{gitDirectory}'")) { var authentication = gitVersionOptions.Authentication; if (!Directory.Exists(gitDirectory)) { CloneRepository(gitVersionOptions.RepositoryInfo.TargetUrl, gitDirectory, authentication); } else { log.Info("Git repository already exists"); } NormalizeGitDirectory(targetBranch, true); } } private void NormalizeGitDirectory(string targetBranch, bool isDynamicRepository) { using (log.IndentLog($"Normalizing git directory for branch '{targetBranch}'")) { // Normalize (download branches) before using the branch NormalizeGitDirectory(options.Value.Settings.NoFetch, targetBranch, isDynamicRepository); } } private void CloneRepository(string repositoryUrl, string gitDirectory, AuthenticationInfo auth) { using (log.IndentLog($"Cloning repository from url '{repositoryUrl}'")) { retryAction.Execute(() => repository.Clone(repositoryUrl, gitDirectory, auth)); } } /// <summary> /// Normalization of a git directory turns all remote branches into local branches, /// turns pull request refs into a real branch and a few other things. /// This is designed to be run *only on the build server* which checks out repositories in different ways. /// It is not recommended to run normalization against a local repository /// </summary> private void NormalizeGitDirectory(bool noFetch, string currentBranchName, bool isDynamicRepository) { var authentication = options.Value.Authentication; // Need to ensure the HEAD does not move, this is essentially a BugCheck var expectedSha = repository.Head.Tip.Sha; var expectedBranchName = repository.Head.Name.Canonical; try { var remote = EnsureOnlyOneRemoteIsDefined(); //If noFetch is enabled, then GitVersion will assume that the git repository is normalized before execution, so that fetching from remotes is not required. if (noFetch) { log.Info("Skipping fetching, if GitVersion does not calculate your version as expected you might need to allow fetching or use dynamic repositories"); } else { var refSpecs = string.Join(", ", remote.FetchRefSpecs.Select(r => r.Specification)); log.Info($"Fetching from remote '{remote.Name}' using the following refspecs: {refSpecs}."); retryAction.Execute(() => repository.Fetch(remote.Name, Enumerable.Empty<string>(), authentication, null)); } EnsureLocalBranchExistsForCurrentBranch(remote, currentBranchName); CreateOrUpdateLocalBranchesFromRemoteTrackingOnes(remote.Name); var currentBranch = repositoryStore.FindBranch(currentBranchName); // Bug fix for https://github.com/GitTools/GitVersion/issues/1754, head maybe have been changed // if this is a dynamic repository. But only allow this in case the branches are different (branch switch) if (expectedSha != repository.Head.Tip.Sha && (isDynamicRepository || currentBranch is null || !repository.Head.Equals(currentBranch))) { var newExpectedSha = repository.Head.Tip.Sha; var newExpectedBranchName = repository.Head.Name.Canonical; log.Info($"Head has moved from '{expectedBranchName} | {expectedSha}' => '{newExpectedBranchName} | {newExpectedSha}', allowed since this is a dynamic repository"); expectedSha = newExpectedSha; } var headSha = repository.Refs.Head.TargetIdentifier; if (!repository.IsHeadDetached) { log.Info($"HEAD points at branch '{headSha}'."); return; } log.Info($"HEAD is detached and points at commit '{headSha}'."); log.Info($"Local Refs:{System.Environment.NewLine}" + string.Join(System.Environment.NewLine, repository.Refs.FromGlob("*").Select(r => $"{r.Name.Canonical} ({r.TargetIdentifier})"))); // In order to decide whether a fake branch is required or not, first check to see if any local branches have the same commit SHA of the head SHA. // If they do, go ahead and checkout that branch // If no, go ahead and check out a new branch, using the known commit SHA as the pointer var localBranchesWhereCommitShaIsHead = repository.Branches.Where(b => !b.IsRemote && b.Tip.Sha == headSha).ToList(); var matchingCurrentBranch = !string.IsNullOrEmpty(currentBranchName) ? localBranchesWhereCommitShaIsHead.SingleOrDefault(b => b.Name.Canonical.Replace("/heads/", "/") == currentBranchName.Replace("/heads/", "/")) : null; if (matchingCurrentBranch != null) { log.Info($"Checking out local branch '{currentBranchName}'."); Checkout(matchingCurrentBranch.Name.Canonical); } else if (localBranchesWhereCommitShaIsHead.Count > 1) { var branchNames = localBranchesWhereCommitShaIsHead.Select(r => r.Name.Canonical); var csvNames = string.Join(", ", branchNames); const string moveBranchMsg = "Move one of the branches along a commit to remove warning"; log.Warning($"Found more than one local branch pointing at the commit '{headSha}' ({csvNames})."); var main = localBranchesWhereCommitShaIsHead.SingleOrDefault(n => n.Name.EquivalentTo(Config.MainBranchKey)); if (main != null) { log.Warning("Because one of the branches is 'main', will build main." + moveBranchMsg); Checkout(Config.MainBranchKey); } else { var branchesWithoutSeparators = localBranchesWhereCommitShaIsHead.Where(b => !b.Name.Friendly.Contains('/') && !b.Name.Friendly.Contains('-')).ToList(); if (branchesWithoutSeparators.Count == 1) { var branchWithoutSeparator = branchesWithoutSeparators[0]; log.Warning($"Choosing {branchWithoutSeparator.Name.Canonical} as it is the only branch without / or - in it. " + moveBranchMsg); Checkout(branchWithoutSeparator.Name.Canonical); } else { throw new WarningException("Failed to try and guess branch to use. " + moveBranchMsg); } } } else if (localBranchesWhereCommitShaIsHead.Count == 0) { log.Info($"No local branch pointing at the commit '{headSha}'. Fake branch needs to be created."); retryAction.Execute(() => repository.CreateBranchForPullRequestBranch(authentication)); } else { log.Info($"Checking out local branch 'refs/heads/{localBranchesWhereCommitShaIsHead[0]}'."); Checkout(localBranchesWhereCommitShaIsHead[0].Name.Friendly); } } finally { if (repository.Head.Tip.Sha != expectedSha) { if (environment.GetEnvironmentVariable("IGNORE_NORMALISATION_GIT_HEAD_MOVE") != "1") { // Whoa, HEAD has moved, it shouldn't have. We need to blow up because there is a bug in normalisation throw new BugException($@"GitVersion has a bug, your HEAD has moved after repo normalisation. To disable this error set an environmental variable called IGNORE_NORMALISATION_GIT_HEAD_MOVE to 1 Please run `git {GitExtensions.CreateGitLogArgs(100)}` and submit it along with your build log (with personal info removed) in a new issue at https://github.com/GitTools/GitVersion"); } } } } private IRemote EnsureOnlyOneRemoteIsDefined() { var remotes = repository.Remotes; var howMany = remotes.Count(); if (howMany == 1) { var remote = remotes.Single(); log.Info($"One remote found ({remote.Name} -> '{remote.Url}')."); if (remote.FetchRefSpecs.Any(r => r.Source == "refs/heads/*")) return remote; var allBranchesFetchRefSpec = $"+refs/heads/*:refs/remotes/{remote.Name}/*"; log.Info($"Adding refspec: {allBranchesFetchRefSpec}"); remotes.Update(remote.Name, allBranchesFetchRefSpec); return remote; } var message = $"{howMany} remote(s) have been detected. When being run on a build server, the Git repository is expected to bear one (and no more than one) remote."; throw new WarningException(message); } private void CreateOrUpdateLocalBranchesFromRemoteTrackingOnes(string remoteName) { var prefix = $"refs/remotes/{remoteName}/"; var remoteHeadCanonicalName = $"{prefix}HEAD"; var headReferenceName = ReferenceName.Parse(remoteHeadCanonicalName); var remoteTrackingReferences = repository.Refs .FromGlob(prefix + "*") .Where(r => !r.Name.Equals(headReferenceName)); foreach (var remoteTrackingReference in remoteTrackingReferences) { var remoteTrackingReferenceName = remoteTrackingReference.Name.Canonical; var branchName = remoteTrackingReferenceName.Substring(prefix.Length); var localCanonicalName = "refs/heads/" + branchName; var referenceName = ReferenceName.Parse(localCanonicalName); // We do not want to touch our current branch if (repository.Head.Name.EquivalentTo(branchName)) continue; if (repository.Refs.Any(x => x.Name.Equals(referenceName))) { var localRef = repository.Refs[localCanonicalName]; if (localRef.TargetIdentifier == remoteTrackingReference.TargetIdentifier) { log.Info($"Skipping update of '{remoteTrackingReference.Name.Canonical}' as it already matches the remote ref."); continue; } var remoteRefTipId = remoteTrackingReference.ReferenceTargetId; log.Info($"Updating local ref '{localRef.Name.Canonical}' to point at {remoteRefTipId}."); retryAction.Execute(() => repository.Refs.UpdateTarget(localRef, remoteRefTipId)); continue; } log.Info($"Creating local branch from remote tracking '{remoteTrackingReference.Name.Canonical}'."); repository.Refs.Add(localCanonicalName, remoteTrackingReference.TargetIdentifier, true); var branch = repository.Branches[branchName]; repository.Branches.UpdateTrackedBranch(branch, remoteTrackingReferenceName); } } public void EnsureLocalBranchExistsForCurrentBranch(IRemote remote, string currentBranch) { if (remote is null) { throw new ArgumentNullException(nameof(remote)); } if (string.IsNullOrEmpty(currentBranch)) return; var isRef = currentBranch.Contains("refs"); var isBranch = currentBranch.Contains("refs/heads"); var localCanonicalName = !isRef ? "refs/heads/" + currentBranch : isBranch ? currentBranch : currentBranch.Replace("refs/", "refs/heads/"); var repoTip = repository.Head.Tip; // We currently have the rep.Head of the *default* branch, now we need to look up the right one var originCanonicalName = $"{remote.Name}/{currentBranch}"; var originBranch = repository.Branches[originCanonicalName]; if (originBranch != null) { repoTip = originBranch.Tip; } var repoTipId = repoTip.Id; var referenceName = ReferenceName.Parse(localCanonicalName); if (repository.Branches.All(b => !b.Name.Equals(referenceName))) { log.Info(isBranch ? $"Creating local branch {referenceName}" : $"Creating local branch {referenceName} pointing at {repoTipId}"); repository.Refs.Add(localCanonicalName, repoTipId.Sha); } else { log.Info(isBranch ? $"Updating local branch {referenceName} to point at {repoTip}" : $"Updating local branch {referenceName} to match ref {currentBranch}"); var localRef = repository.Refs[localCanonicalName]; retryAction.Execute(() => repository.Refs.UpdateTarget(localRef, repoTipId)); } Checkout(localCanonicalName); } private void Checkout(string commitOrBranchSpec) { retryAction.Execute(() => repository.Checkout(commitOrBranchSpec)); } } }
// Copyright (c) 2021 Alachisoft // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License using System; using Alachisoft.NCache.Persistence; using Alachisoft.NCache.Runtime.Serialization; using Alachisoft.NCache.Runtime.Serialization.IO; namespace Alachisoft.NCache.Caching { public class EventId : ICloneable, ICompactSerializable { private string _eventUniqueId; private long _operationCounter; private int _eventCounter; private EventType _eventType; private int _hashCode = -1; public EventId() { } public override bool Equals(object obj) { EventId eventId = obj as EventId; if (eventId == null) return false; if (_operationCounter == eventId._operationCounter && _eventCounter == eventId.EventCounter && _hashCode == eventId._hashCode) return true; return false; } private static bool AreEqual(object A, object B) { if (A == null && B == null) return true; else return A.Equals(B); } public EventId(string eventUniqueId, long operationCounter, int eventCounter) { _eventUniqueId = eventUniqueId; _operationCounter = operationCounter; _eventCounter = eventCounter; } public string EventUniqueID { get { return _eventUniqueId; } set { _eventUniqueId = value; } } public long OperationCounter { get { return _operationCounter; } set { _operationCounter = value; } } public int EventCounter { get { return _eventCounter; } set { _eventCounter = value; } } public EventType EventType { get { return _eventType; } set { _eventType = value; } } public static EventId CreateEventId(OperationID opId) { EventId eventId = new EventId(); if (opId != null) { eventId._eventUniqueId = opId.OperationId; eventId._operationCounter = opId.OpCounter; } return eventId; } public override int GetHashCode() { if (_hashCode == -1 && _eventUniqueId == null) return base.GetHashCode(); else if(_hashCode == -1) _hashCode = (_eventUniqueId+_eventCounter.ToString()+":"+OperationCounter.ToString()+":"+_eventType.ToString()).GetHashCode(); return _hashCode; } public static EventId ReadEventIdInfo(CompactReader reader) { bool flag = reader.ReadBoolean(); if (flag) { EventId item = new EventId(); item.Deserialize(reader); return item; } return null; } internal static void WriteEventIdInfo(CompactWriter writer, EventId item) { if (item == null) { writer.Write(false); return; } else { writer.Write(true); item.Serialize(writer); } } #region ICloneable Members public object Clone() { EventId ei = new EventId(); lock (this) { ei._eventUniqueId = _eventUniqueId; ei._operationCounter = _operationCounter; ei._eventCounter = _eventCounter; ei._eventType = _eventType; } return ei; } #endregion public void Deserialize(Runtime.Serialization.IO.CompactReader reader) { _eventCounter = reader.ReadInt32(); _eventUniqueId = (string)reader.ReadObject(); _operationCounter = reader.ReadInt64(); _eventType = (EventType)reader.ReadInt32(); } public void Serialize(Runtime.Serialization.IO.CompactWriter writer) { writer.Write(_eventCounter); writer.WriteObject(_eventUniqueId); writer.Write(_operationCounter); writer.Write((int)_eventType); } } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.IO; using System.Xml; using Newtonsoft.Json4.Utilities; using Newtonsoft.Json4.Linq; using System.Globalization; namespace Newtonsoft.Json4 { /// <summary> /// Specifies the state of the <see cref="JsonWriter"/>. /// </summary> public enum WriteState { /// <summary> /// An exception has been thrown, which has left the <see cref="JsonWriter"/> in an invalid state. /// You may call the <see cref="JsonWriter.Close"/> method to put the <see cref="JsonWriter"/> in the <c>Closed</c> state. /// Any other <see cref="JsonWriter"/> method calls results in an <see cref="InvalidOperationException"/> being thrown. /// </summary> Error, /// <summary> /// The <see cref="JsonWriter.Close"/> method has been called. /// </summary> Closed, /// <summary> /// An object is being written. /// </summary> Object, /// <summary> /// A array is being written. /// </summary> Array, /// <summary> /// A constructor is being written. /// </summary> Constructor, /// <summary> /// A property is being written. /// </summary> Property, /// <summary> /// A write method has not been called. /// </summary> Start } /// <summary> /// Specifies formatting options for the <see cref="JsonTextWriter"/>. /// </summary> public enum Formatting { /// <summary> /// No special formatting is applied. This is the default. /// </summary> None, /// <summary> /// Causes child objects to be indented according to the <see cref="JsonTextWriter.Indentation"/> and <see cref="JsonTextWriter.IndentChar"/> settings. /// </summary> Indented } /// <summary> /// Represents a writer that provides a fast, non-cached, forward-only way of generating Json data. /// </summary> public abstract class JsonWriter : IDisposable { internal enum State { Start, Property, ObjectStart, Object, ArrayStart, Array, ConstructorStart, Constructor, Bytes, Closed, Error } // array that gives a new state based on the current state an the token being written private static readonly State[][] StateArray; internal static readonly State[][] StateArrayTempate = new[] { // Start PropertyName ObjectStart Object ArrayStart Array ConstructorStart Constructor Closed Error // /* None */new[]{ State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error }, /* StartObject */new[]{ State.ObjectStart, State.ObjectStart, State.Error, State.Error, State.ObjectStart, State.ObjectStart, State.ObjectStart, State.ObjectStart, State.Error, State.Error }, /* StartArray */new[]{ State.ArrayStart, State.ArrayStart, State.Error, State.Error, State.ArrayStart, State.ArrayStart, State.ArrayStart, State.ArrayStart, State.Error, State.Error }, /* StartConstructor */new[]{ State.ConstructorStart, State.ConstructorStart, State.Error, State.Error, State.ConstructorStart, State.ConstructorStart, State.ConstructorStart, State.ConstructorStart, State.Error, State.Error }, /* StartProperty */new[]{ State.Property, State.Error, State.Property, State.Property, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error }, /* Comment */new[]{ State.Start, State.Property, State.ObjectStart, State.Object, State.ArrayStart, State.Array, State.Constructor, State.Constructor, State.Error, State.Error }, /* Raw */new[]{ State.Start, State.Property, State.ObjectStart, State.Object, State.ArrayStart, State.Array, State.Constructor, State.Constructor, State.Error, State.Error }, /* Value (this will be copied) */new[]{ State.Start, State.Object, State.Error, State.Error, State.Array, State.Array, State.Constructor, State.Constructor, State.Error, State.Error } }; internal static State[][] BuildStateArray() { var allStates = StateArrayTempate.ToList(); var errorStates = StateArrayTempate[0]; var valueStates = StateArrayTempate[7]; foreach (JsonToken valueToken in EnumUtils.GetValues(typeof(JsonToken))) { if (allStates.Count <= (int)valueToken) { switch (valueToken) { case JsonToken.Integer: case JsonToken.Float: case JsonToken.String: case JsonToken.Boolean: case JsonToken.Null: case JsonToken.Undefined: case JsonToken.Date: case JsonToken.Bytes: allStates.Add(valueStates); break; default: allStates.Add(errorStates); break; } } } return allStates.ToArray(); } static JsonWriter() { StateArray = BuildStateArray(); } private int _top; private readonly List<JTokenType> _stack; private State _currentState; private Formatting _formatting; /// <summary> /// Gets or sets a value indicating whether the underlying stream or /// <see cref="TextReader"/> should be closed when the writer is closed. /// </summary> /// <value> /// true to close the underlying stream or <see cref="TextReader"/> when /// the writer is closed; otherwise false. The default is true. /// </value> public bool CloseOutput { get; set; } /// <summary> /// Gets the top. /// </summary> /// <value>The top.</value> protected internal int Top { get { return _top; } } /// <summary> /// Gets the state of the writer. /// </summary> public WriteState WriteState { get { switch (_currentState) { case State.Error: return WriteState.Error; case State.Closed: return WriteState.Closed; case State.Object: case State.ObjectStart: return WriteState.Object; case State.Array: case State.ArrayStart: return WriteState.Array; case State.Constructor: case State.ConstructorStart: return WriteState.Constructor; case State.Property: return WriteState.Property; case State.Start: return WriteState.Start; default: throw new JsonWriterException("Invalid state: " + _currentState); } } } /// <summary> /// Indicates how the output is formatted. /// </summary> public Formatting Formatting { get { return _formatting; } set { _formatting = value; } } /// <summary> /// Creates an instance of the <c>JsonWriter</c> class. /// </summary> protected JsonWriter() { _stack = new List<JTokenType>(8); _stack.Add(JTokenType.None); _currentState = State.Start; _formatting = Formatting.None; CloseOutput = true; } private void Push(JTokenType value) { _top++; if (_stack.Count <= _top) _stack.Add(value); else _stack[_top] = value; } private JTokenType Pop() { JTokenType value = Peek(); _top--; return value; } private JTokenType Peek() { return _stack[_top]; } /// <summary> /// Flushes whatever is in the buffer to the underlying streams and also flushes the underlying stream. /// </summary> public abstract void Flush(); /// <summary> /// Closes this stream and the underlying stream. /// </summary> public virtual void Close() { AutoCompleteAll(); } /// <summary> /// Writes the beginning of a Json object. /// </summary> public virtual void WriteStartObject() { AutoComplete(JsonToken.StartObject); Push(JTokenType.Object); } /// <summary> /// Writes the end of a Json object. /// </summary> public virtual void WriteEndObject() { AutoCompleteClose(JsonToken.EndObject); } /// <summary> /// Writes the beginning of a Json array. /// </summary> public virtual void WriteStartArray() { AutoComplete(JsonToken.StartArray); Push(JTokenType.Array); } /// <summary> /// Writes the end of an array. /// </summary> public virtual void WriteEndArray() { AutoCompleteClose(JsonToken.EndArray); } /// <summary> /// Writes the start of a constructor with the given name. /// </summary> /// <param name="name">The name of the constructor.</param> public virtual void WriteStartConstructor(string name) { AutoComplete(JsonToken.StartConstructor); Push(JTokenType.Constructor); } /// <summary> /// Writes the end constructor. /// </summary> public virtual void WriteEndConstructor() { AutoCompleteClose(JsonToken.EndConstructor); } /// <summary> /// Writes the property name of a name/value pair on a Json object. /// </summary> /// <param name="name">The name of the property.</param> public virtual void WritePropertyName(string name) { AutoComplete(JsonToken.PropertyName); } /// <summary> /// Writes the end of the current Json object or array. /// </summary> public virtual void WriteEnd() { WriteEnd(Peek()); } /// <summary> /// Writes the current <see cref="JsonReader"/> token. /// </summary> /// <param name="reader">The <see cref="JsonReader"/> to read the token from.</param> public void WriteToken(JsonReader reader) { ValidationUtils.ArgumentNotNull(reader, "reader"); int initialDepth; if (reader.TokenType == JsonToken.None) initialDepth = -1; else if (!IsStartToken(reader.TokenType)) initialDepth = reader.Depth + 1; else initialDepth = reader.Depth; WriteToken(reader, initialDepth); } internal void WriteToken(JsonReader reader, int initialDepth) { do { switch (reader.TokenType) { case JsonToken.None: // read to next break; case JsonToken.StartObject: WriteStartObject(); break; case JsonToken.StartArray: WriteStartArray(); break; case JsonToken.StartConstructor: string constructorName = reader.Value.ToString(); // write a JValue date when the constructor is for a date if (string.Equals(constructorName, "Date", StringComparison.Ordinal)) WriteConstructorDate(reader); else WriteStartConstructor(reader.Value.ToString()); break; case JsonToken.PropertyName: WritePropertyName(reader.Value.ToString()); break; case JsonToken.Comment: WriteComment(reader.Value.ToString()); break; case JsonToken.Integer: WriteValue(Convert.ToInt64(reader.Value, CultureInfo.InvariantCulture)); break; case JsonToken.Float: WriteValue(Convert.ToDouble(reader.Value, CultureInfo.InvariantCulture)); break; case JsonToken.String: WriteValue(reader.Value.ToString()); break; case JsonToken.Boolean: WriteValue(Convert.ToBoolean(reader.Value, CultureInfo.InvariantCulture)); break; case JsonToken.Null: WriteNull(); break; case JsonToken.Undefined: WriteUndefined(); break; case JsonToken.EndObject: WriteEndObject(); break; case JsonToken.EndArray: WriteEndArray(); break; case JsonToken.EndConstructor: WriteEndConstructor(); break; case JsonToken.Date: WriteValue((DateTime)reader.Value); break; case JsonToken.Raw: WriteRawValue((string)reader.Value); break; case JsonToken.Bytes: WriteValue((byte[])reader.Value); break; default: throw MiscellaneousUtils.CreateArgumentOutOfRangeException("TokenType", reader.TokenType, "Unexpected token type."); } } while ( // stop if we have reached the end of the token being read initialDepth - 1 < reader.Depth - (IsEndToken(reader.TokenType) ? 1 : 0) && reader.Read()); } private void WriteConstructorDate(JsonReader reader) { if (!reader.Read()) throw new Exception("Unexpected end when reading date constructor."); if (reader.TokenType != JsonToken.Integer) throw new Exception("Unexpected token when reading date constructor. Expected Integer, got " + reader.TokenType); long ticks = (long)reader.Value; DateTime date = JsonConvert.ConvertJavaScriptTicksToDateTime(ticks); if (!reader.Read()) throw new Exception("Unexpected end when reading date constructor."); if (reader.TokenType != JsonToken.EndConstructor) throw new Exception("Unexpected token when reading date constructor. Expected EndConstructor, got " + reader.TokenType); WriteValue(date); } private bool IsEndToken(JsonToken token) { switch (token) { case JsonToken.EndObject: case JsonToken.EndArray: case JsonToken.EndConstructor: return true; default: return false; } } private bool IsStartToken(JsonToken token) { switch (token) { case JsonToken.StartObject: case JsonToken.StartArray: case JsonToken.StartConstructor: return true; default: return false; } } private void WriteEnd(JTokenType type) { switch (type) { case JTokenType.Object: WriteEndObject(); break; case JTokenType.Array: WriteEndArray(); break; case JTokenType.Constructor: WriteEndConstructor(); break; default: throw new JsonWriterException("Unexpected type when writing end: " + type); } } private void AutoCompleteAll() { while (_top > 0) { WriteEnd(); } } private JTokenType GetTypeForCloseToken(JsonToken token) { switch (token) { case JsonToken.EndObject: return JTokenType.Object; case JsonToken.EndArray: return JTokenType.Array; case JsonToken.EndConstructor: return JTokenType.Constructor; default: throw new JsonWriterException("No type for token: " + token); } } private JsonToken GetCloseTokenForType(JTokenType type) { switch (type) { case JTokenType.Object: return JsonToken.EndObject; case JTokenType.Array: return JsonToken.EndArray; case JTokenType.Constructor: return JsonToken.EndConstructor; default: throw new JsonWriterException("No close token for type: " + type); } } private void AutoCompleteClose(JsonToken tokenBeingClosed) { // write closing symbol and calculate new state int levelsToComplete = 0; for (int i = 0; i < _top; i++) { int currentLevel = _top - i; if (_stack[currentLevel] == GetTypeForCloseToken(tokenBeingClosed)) { levelsToComplete = i + 1; break; } } if (levelsToComplete == 0) throw new JsonWriterException("No token to close."); for (int i = 0; i < levelsToComplete; i++) { JsonToken token = GetCloseTokenForType(Pop()); if (_formatting == Formatting.Indented) { if (_currentState != State.ObjectStart && _currentState != State.ArrayStart) WriteIndent(); } WriteEnd(token); } JTokenType currentLevelType = Peek(); switch (currentLevelType) { case JTokenType.Object: _currentState = State.Object; break; case JTokenType.Array: _currentState = State.Array; break; case JTokenType.Constructor: _currentState = State.Array; break; case JTokenType.None: _currentState = State.Start; break; default: throw new JsonWriterException("Unknown JsonType: " + currentLevelType); } } /// <summary> /// Writes the specified end token. /// </summary> /// <param name="token">The end token to write.</param> protected virtual void WriteEnd(JsonToken token) { } /// <summary> /// Writes indent characters. /// </summary> protected virtual void WriteIndent() { } /// <summary> /// Writes the JSON value delimiter. /// </summary> protected virtual void WriteValueDelimiter() { } /// <summary> /// Writes an indent space. /// </summary> protected virtual void WriteIndentSpace() { } internal void AutoComplete(JsonToken tokenBeingWritten) { // gets new state based on the current state and what is being written State newState = StateArray[(int)tokenBeingWritten][(int)_currentState]; if (newState == State.Error) throw new JsonWriterException("Token {0} in state {1} would result in an invalid JSON object.".FormatWith(CultureInfo.InvariantCulture, tokenBeingWritten.ToString(), _currentState.ToString())); if ((_currentState == State.Object || _currentState == State.Array || _currentState == State.Constructor) && tokenBeingWritten != JsonToken.Comment) { WriteValueDelimiter(); } else if (_currentState == State.Property) { if (_formatting == Formatting.Indented) WriteIndentSpace(); } if (_formatting == Formatting.Indented) { WriteState writeState = WriteState; // don't indent a property when it is the first token to be written (i.e. at the start) if ((tokenBeingWritten == JsonToken.PropertyName && writeState != WriteState.Start) || writeState == WriteState.Array || writeState == WriteState.Constructor) { WriteIndent(); } } _currentState = newState; } #region WriteValue methods /// <summary> /// Writes a null value. /// </summary> public virtual void WriteNull() { AutoComplete(JsonToken.Null); } /// <summary> /// Writes an undefined value. /// </summary> public virtual void WriteUndefined() { AutoComplete(JsonToken.Undefined); } /// <summary> /// Writes raw JSON without changing the writer's state. /// </summary> /// <param name="json">The raw JSON to write.</param> public virtual void WriteRaw(string json) { } /// <summary> /// Writes raw JSON where a value is expected and updates the writer's state. /// </summary> /// <param name="json">The raw JSON to write.</param> public virtual void WriteRawValue(string json) { // hack. want writer to change state as if a value had been written AutoComplete(JsonToken.Undefined); WriteRaw(json); } /// <summary> /// Writes a <see cref="String"/> value. /// </summary> /// <param name="value">The <see cref="String"/> value to write.</param> public virtual void WriteValue(string value) { AutoComplete(JsonToken.String); } /// <summary> /// Writes a <see cref="Int32"/> value. /// </summary> /// <param name="value">The <see cref="Int32"/> value to write.</param> public virtual void WriteValue(int value) { AutoComplete(JsonToken.Integer); } /// <summary> /// Writes a <see cref="UInt32"/> value. /// </summary> /// <param name="value">The <see cref="UInt32"/> value to write.</param> [CLSCompliant(false)] public virtual void WriteValue(uint value) { AutoComplete(JsonToken.Integer); } /// <summary> /// Writes a <see cref="Int64"/> value. /// </summary> /// <param name="value">The <see cref="Int64"/> value to write.</param> public virtual void WriteValue(long value) { AutoComplete(JsonToken.Integer); } /// <summary> /// Writes a <see cref="UInt64"/> value. /// </summary> /// <param name="value">The <see cref="UInt64"/> value to write.</param> [CLSCompliant(false)] public virtual void WriteValue(ulong value) { AutoComplete(JsonToken.Integer); } /// <summary> /// Writes a <see cref="Single"/> value. /// </summary> /// <param name="value">The <see cref="Single"/> value to write.</param> public virtual void WriteValue(float value) { AutoComplete(JsonToken.Float); } /// <summary> /// Writes a <see cref="Double"/> value. /// </summary> /// <param name="value">The <see cref="Double"/> value to write.</param> public virtual void WriteValue(double value) { AutoComplete(JsonToken.Float); } /// <summary> /// Writes a <see cref="Boolean"/> value. /// </summary> /// <param name="value">The <see cref="Boolean"/> value to write.</param> public virtual void WriteValue(bool value) { AutoComplete(JsonToken.Boolean); } /// <summary> /// Writes a <see cref="Int16"/> value. /// </summary> /// <param name="value">The <see cref="Int16"/> value to write.</param> public virtual void WriteValue(short value) { AutoComplete(JsonToken.Integer); } /// <summary> /// Writes a <see cref="UInt16"/> value. /// </summary> /// <param name="value">The <see cref="UInt16"/> value to write.</param> [CLSCompliant(false)] public virtual void WriteValue(ushort value) { AutoComplete(JsonToken.Integer); } /// <summary> /// Writes a <see cref="Char"/> value. /// </summary> /// <param name="value">The <see cref="Char"/> value to write.</param> public virtual void WriteValue(char value) { AutoComplete(JsonToken.String); } /// <summary> /// Writes a <see cref="Byte"/> value. /// </summary> /// <param name="value">The <see cref="Byte"/> value to write.</param> public virtual void WriteValue(byte value) { AutoComplete(JsonToken.Integer); } /// <summary> /// Writes a <see cref="SByte"/> value. /// </summary> /// <param name="value">The <see cref="SByte"/> value to write.</param> [CLSCompliant(false)] public virtual void WriteValue(sbyte value) { AutoComplete(JsonToken.Integer); } /// <summary> /// Writes a <see cref="Decimal"/> value. /// </summary> /// <param name="value">The <see cref="Decimal"/> value to write.</param> public virtual void WriteValue(decimal value) { AutoComplete(JsonToken.Float); } /// <summary> /// Writes a <see cref="DateTime"/> value. /// </summary> /// <param name="value">The <see cref="DateTime"/> value to write.</param> public virtual void WriteValue(DateTime value) { AutoComplete(JsonToken.Date); } #if !PocketPC && !NET20 /// <summary> /// Writes a <see cref="DateTimeOffset"/> value. /// </summary> /// <param name="value">The <see cref="DateTimeOffset"/> value to write.</param> public virtual void WriteValue(DateTimeOffset value) { AutoComplete(JsonToken.Date); } #endif /// <summary> /// Writes a <see cref="Guid"/> value. /// </summary> /// <param name="value">The <see cref="Guid"/> value to write.</param> public virtual void WriteValue(Guid value) { AutoComplete(JsonToken.String); } /// <summary> /// Writes a <see cref="TimeSpan"/> value. /// </summary> /// <param name="value">The <see cref="TimeSpan"/> value to write.</param> public virtual void WriteValue(TimeSpan value) { AutoComplete(JsonToken.String); } /// <summary> /// Writes a <see cref="Nullable{Int32}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Int32}"/> value to write.</param> public virtual void WriteValue(int? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{UInt32}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{UInt32}"/> value to write.</param> [CLSCompliant(false)] public virtual void WriteValue(uint? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{Int64}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Int64}"/> value to write.</param> public virtual void WriteValue(long? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{UInt64}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{UInt64}"/> value to write.</param> [CLSCompliant(false)] public virtual void WriteValue(ulong? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{Single}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Single}"/> value to write.</param> public virtual void WriteValue(float? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{Double}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Double}"/> value to write.</param> public virtual void WriteValue(double? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{Boolean}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Boolean}"/> value to write.</param> public virtual void WriteValue(bool? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{Int16}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Int16}"/> value to write.</param> public virtual void WriteValue(short? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{UInt16}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{UInt16}"/> value to write.</param> [CLSCompliant(false)] public virtual void WriteValue(ushort? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{Char}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Char}"/> value to write.</param> public virtual void WriteValue(char? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{Byte}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Byte}"/> value to write.</param> public virtual void WriteValue(byte? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{SByte}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{SByte}"/> value to write.</param> [CLSCompliant(false)] public virtual void WriteValue(sbyte? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{Decimal}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Decimal}"/> value to write.</param> public virtual void WriteValue(decimal? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{DateTime}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{DateTime}"/> value to write.</param> public virtual void WriteValue(DateTime? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } #if !PocketPC && !NET20 /// <summary> /// Writes a <see cref="Nullable{DateTimeOffset}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{DateTimeOffset}"/> value to write.</param> public virtual void WriteValue(DateTimeOffset? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } #endif /// <summary> /// Writes a <see cref="Nullable{Guid}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{Guid}"/> value to write.</param> public virtual void WriteValue(Guid? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="Nullable{TimeSpan}"/> value. /// </summary> /// <param name="value">The <see cref="Nullable{TimeSpan}"/> value to write.</param> public virtual void WriteValue(TimeSpan? value) { if (value == null) WriteNull(); else WriteValue(value.Value); } /// <summary> /// Writes a <see cref="T:Byte[]"/> value. /// </summary> /// <param name="value">The <see cref="T:Byte[]"/> value to write.</param> public virtual void WriteValue(byte[] value) { if (value == null) WriteNull(); else AutoComplete(JsonToken.Bytes); } /// <summary> /// Writes a <see cref="Uri"/> value. /// </summary> /// <param name="value">The <see cref="Uri"/> value to write.</param> public virtual void WriteValue(Uri value) { if (value == null) WriteNull(); else AutoComplete(JsonToken.String); } /// <summary> /// Writes a <see cref="Object"/> value. /// An error will raised if the value cannot be written as a single JSON token. /// </summary> /// <param name="value">The <see cref="Object"/> value to write.</param> public virtual void WriteValue(object value) { if (value == null) { WriteNull(); return; } else if (value is IConvertible) { IConvertible convertible = value as IConvertible; switch (convertible.GetTypeCode()) { case TypeCode.String: WriteValue(convertible.ToString(CultureInfo.InvariantCulture)); return; case TypeCode.Char: WriteValue(convertible.ToChar(CultureInfo.InvariantCulture)); return; case TypeCode.Boolean: WriteValue(convertible.ToBoolean(CultureInfo.InvariantCulture)); return; case TypeCode.SByte: WriteValue(convertible.ToSByte(CultureInfo.InvariantCulture)); return; case TypeCode.Int16: WriteValue(convertible.ToInt16(CultureInfo.InvariantCulture)); return; case TypeCode.UInt16: WriteValue(convertible.ToUInt16(CultureInfo.InvariantCulture)); return; case TypeCode.Int32: WriteValue(convertible.ToInt32(CultureInfo.InvariantCulture)); return; case TypeCode.Byte: WriteValue(convertible.ToByte(CultureInfo.InvariantCulture)); return; case TypeCode.UInt32: WriteValue(convertible.ToUInt32(CultureInfo.InvariantCulture)); return; case TypeCode.Int64: WriteValue(convertible.ToInt64(CultureInfo.InvariantCulture)); return; case TypeCode.UInt64: WriteValue(convertible.ToUInt64(CultureInfo.InvariantCulture)); return; case TypeCode.Single: WriteValue(convertible.ToSingle(CultureInfo.InvariantCulture)); return; case TypeCode.Double: WriteValue(convertible.ToDouble(CultureInfo.InvariantCulture)); return; case TypeCode.DateTime: WriteValue(convertible.ToDateTime(CultureInfo.InvariantCulture)); return; case TypeCode.Decimal: WriteValue(convertible.ToDecimal(CultureInfo.InvariantCulture)); return; case TypeCode.DBNull: WriteNull(); return; } } #if !PocketPC && !NET20 else if (value is DateTimeOffset) { WriteValue((DateTimeOffset)value); return; } #endif else if (value is byte[]) { WriteValue((byte[])value); return; } else if (value is Guid) { WriteValue((Guid)value); return; } else if (value is Uri) { WriteValue((Uri)value); return; } else if (value is TimeSpan) { WriteValue((TimeSpan)value); return; } throw new ArgumentException("Unsupported type: {0}. Use the JsonSerializer class to get the object's JSON representation.".FormatWith(CultureInfo.InvariantCulture, value.GetType())); } #endregion /// <summary> /// Writes out a comment <code>/*...*/</code> containing the specified text. /// </summary> /// <param name="text">Text to place inside the comment.</param> public virtual void WriteComment(string text) { AutoComplete(JsonToken.Comment); } /// <summary> /// Writes out the given white space. /// </summary> /// <param name="ws">The string of white space characters.</param> public virtual void WriteWhitespace(string ws) { if (ws != null) { if (!StringUtils.IsWhiteSpace(ws)) throw new JsonWriterException("Only white space characters should be used."); } } void IDisposable.Dispose() { Dispose(true); } private void Dispose(bool disposing) { if (_currentState != State.Closed) Close(); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using System.Collections.Generic; using System.ComponentModel.Design; using System.Diagnostics; using System.Linq; using System.Reflection; using System.Drawing; using System.Threading; namespace System.ComponentModel { /// <summary> /// This type description provider provides type information through /// reflection. Unless someone has provided a custom type description /// provider for a type or instance, or unless an instance implements /// ICustomTypeDescriptor, any query for type information will go through /// this class. There should be a single instance of this class associated /// with "object", as it can provide all type information for any type. /// </summary> internal sealed partial class ReflectTypeDescriptionProvider : TypeDescriptionProvider { // Hastable of Type -> ReflectedTypeData. ReflectedTypeData contains all // of the type information we have gathered for a given type. // private Hashtable _typeData; // This is the signature we look for when creating types that are generic, but // want to know what type they are dealing with. Enums are a good example of this; // there is one enum converter that can work with all enums, but it needs to know // the type of enum it is dealing with. // private static Type[] s_typeConstructor = new Type[] { typeof(Type) }; // This is where we store the various converters, etc for the intrinsic types. // private static Hashtable s_editorTables; private static Hashtable s_intrinsicTypeConverters; // For converters, etc that are bound to class attribute data, rather than a class // type, we have special key sentinel values that we put into the hash table. // private static object s_intrinsicReferenceKey = new object(); private static object s_intrinsicNullableKey = new object(); // The key we put into IDictionaryService to store our cache dictionary. // private static object s_dictionaryKey = new object(); // This is a cache on top of core reflection. The cache // builds itself recursively, so if you ask for the properties // on Control, Component and object are also automatically filled // in. The keys to the property and event caches are types. // The keys to the attribute cache are either MemberInfos or types. // private static Hashtable s_propertyCache; private static Hashtable s_eventCache; private static Hashtable s_attributeCache; private static Hashtable s_extendedPropertyCache; // These are keys we stuff into our object cache. We use this // cache data to store extender provider info for an object. // private static readonly Guid s_extenderPropertiesKey = Guid.NewGuid(); private static readonly Guid s_extenderProviderPropertiesKey = Guid.NewGuid(); // These are attributes that, when we discover them on interfaces, we do // not merge them into the attribute set for a class. private static readonly Type[] s_skipInterfaceAttributeList = new Type[] { #if FEATURE_SKIP_INTERFACE typeof(System.Runtime.InteropServices.GuidAttribute), typeof(System.Runtime.InteropServices.InterfaceTypeAttribute) #endif typeof(System.Runtime.InteropServices.ComVisibleAttribute), }; internal static Guid ExtenderProviderKey { get; } = Guid.NewGuid(); private static readonly object s_internalSyncObject = new object(); /// <summary> /// Creates a new ReflectTypeDescriptionProvider. The type is the /// type we will obtain type information for. /// </summary> internal ReflectTypeDescriptionProvider() { } private static Hashtable EditorTables => LazyInitializer.EnsureInitialized(ref s_editorTables, () => new Hashtable(4)); /// <summary> /// This is a table we create for intrinsic types. /// There should be entries here ONLY for intrinsic /// types, as all other types we should be able to /// add attributes directly as metadata. /// </summary> private static Hashtable IntrinsicTypeConverters => LazyInitializer.EnsureInitialized(ref s_intrinsicTypeConverters, () => new Hashtable { // Add the intrinsics // [typeof(bool)] = typeof(BooleanConverter), [typeof(byte)] = typeof(ByteConverter), [typeof(SByte)] = typeof(SByteConverter), [typeof(char)] = typeof(CharConverter), [typeof(double)] = typeof(DoubleConverter), [typeof(string)] = typeof(StringConverter), [typeof(int)] = typeof(Int32Converter), [typeof(short)] = typeof(Int16Converter), [typeof(long)] = typeof(Int64Converter), [typeof(float)] = typeof(SingleConverter), [typeof(UInt16)] = typeof(UInt16Converter), [typeof(UInt32)] = typeof(UInt32Converter), [typeof(UInt64)] = typeof(UInt64Converter), [typeof(object)] = typeof(TypeConverter), [typeof(void)] = typeof(TypeConverter), [typeof(DateTime)] = typeof(DateTimeConverter), [typeof(DateTimeOffset)] = typeof(DateTimeOffsetConverter), [typeof(Decimal)] = typeof(DecimalConverter), [typeof(TimeSpan)] = typeof(TimeSpanConverter), [typeof(Guid)] = typeof(GuidConverter), [typeof(Uri)] = typeof(UriTypeConverter), [typeof(Color)] = typeof(ColorConverter), [typeof(Point)] = typeof(PointConverter), [typeof(Rectangle)] = typeof(RectangleConverter), [typeof(Size)] = typeof(SizeConverter), [typeof(SizeF)] = typeof(SizeFConverter), // Special cases for things that are not bound to a specific type // [typeof(Array)] = typeof(ArrayConverter), [typeof(ICollection)] = typeof(CollectionConverter), [typeof(Enum)] = typeof(EnumConverter), [s_intrinsicNullableKey] = typeof(NullableConverter), }); private static Hashtable PropertyCache => LazyInitializer.EnsureInitialized(ref s_propertyCache, () => new Hashtable()); private static Hashtable EventCache => LazyInitializer.EnsureInitialized(ref s_eventCache, () => new Hashtable()); private static Hashtable AttributeCache => LazyInitializer.EnsureInitialized(ref s_attributeCache, () => new Hashtable()); private static Hashtable ExtendedPropertyCache => LazyInitializer.EnsureInitialized(ref s_extendedPropertyCache, () => new Hashtable()); /// <summary> /// Adds an editor table for the given editor base type. /// ypically, editors are specified as metadata on an object. If no metadata for a /// equested editor base type can be found on an object, however, the /// ypeDescriptor will search an editor /// able for the editor type, if one can be found. /// </summary> internal static void AddEditorTable(Type editorBaseType, Hashtable table) { if (editorBaseType == null) { throw new ArgumentNullException(nameof(editorBaseType)); } if (table == null) { Debug.Fail("COMPAT: Editor table should not be null"); // don't throw; RTM didn't so we can't do it either. } lock (s_internalSyncObject) { Hashtable editorTables = EditorTables; if (!editorTables.ContainsKey(editorBaseType)) { editorTables[editorBaseType] = table; } } } /// <summary> /// CreateInstance implementation. We delegate to Activator. /// </summary> public override object CreateInstance(IServiceProvider provider, Type objectType, Type[] argTypes, object[] args) { Debug.Assert(objectType != null, "Should have arg-checked before coming in here"); object obj = null; if (argTypes != null) { obj = objectType.GetConstructor(argTypes)?.Invoke(args); } else { if (args != null) { argTypes = new Type[args.Length]; for (int idx = 0; idx < args.Length; idx++) { if (args[idx] != null) { argTypes[idx] = args[idx].GetType(); } else { argTypes[idx] = typeof(object); } } } else { argTypes = Array.Empty<Type>(); } obj = objectType.GetConstructor(argTypes)?.Invoke(args); } return obj ?? Activator.CreateInstance(objectType, args); } /// <summary> /// Helper method to create editors and type converters. This checks to see if the /// type implements a Type constructor, and if it does it invokes that ctor. /// Otherwise, it just tries to create the type. /// </summary> private static object CreateInstance(Type objectType, Type callingType) { return objectType.GetConstructor(s_typeConstructor)?.Invoke(new object[] { callingType }) ?? Activator.CreateInstance(objectType); } /// <summary> /// Retrieves custom attributes. /// </summary> internal AttributeCollection GetAttributes(Type type) { ReflectedTypeData td = GetTypeData(type, true); return td.GetAttributes(); } /// <summary> /// Our implementation of GetCache sits on top of IDictionaryService. /// </summary> public override IDictionary GetCache(object instance) { IComponent comp = instance as IComponent; if (comp != null && comp.Site != null) { IDictionaryService ds = comp.Site.GetService(typeof(IDictionaryService)) as IDictionaryService; if (ds != null) { IDictionary dict = ds.GetValue(s_dictionaryKey) as IDictionary; if (dict == null) { dict = new Hashtable(); ds.SetValue(s_dictionaryKey, dict); } return dict; } } return null; } /// <summary> /// Retrieves the class name for our type. /// </summary> internal string GetClassName(Type type) { ReflectedTypeData td = GetTypeData(type, true); return td.GetClassName(null); } /// <summary> /// Retrieves the component name from the site. /// </summary> internal string GetComponentName(Type type, object instance) { ReflectedTypeData td = GetTypeData(type, true); return td.GetComponentName(instance); } /// <summary> /// Retrieves the type converter. If instance is non-null, /// it will be used to retrieve attributes. Otherwise, _type /// will be used. /// </summary> internal TypeConverter GetConverter(Type type, object instance) { ReflectedTypeData td = GetTypeData(type, true); return td.GetConverter(instance); } /// <summary> /// Return the default event. The default event is determined by the /// presence of a DefaultEventAttribute on the class. /// </summary> internal EventDescriptor GetDefaultEvent(Type type, object instance) { ReflectedTypeData td = GetTypeData(type, true); return td.GetDefaultEvent(instance); } /// <summary> /// Return the default property. /// </summary> internal PropertyDescriptor GetDefaultProperty(Type type, object instance) { ReflectedTypeData td = GetTypeData(type, true); return td.GetDefaultProperty(instance); } /// <summary> /// Retrieves the editor for the given base type. /// </summary> internal object GetEditor(Type type, object instance, Type editorBaseType) { ReflectedTypeData td = GetTypeData(type, true); return td.GetEditor(instance, editorBaseType); } /// <summary> /// Retrieves a default editor table for the given editor base type. /// </summary> private static Hashtable GetEditorTable(Type editorBaseType) { Hashtable editorTables = EditorTables; object table = editorTables[editorBaseType]; if (table == null) { // Before we give up, it is possible that the // class initializer for editorBaseType hasn't // actually run. // System.Runtime.CompilerServices.RuntimeHelpers.RunClassConstructor(editorBaseType.TypeHandle); table = editorTables[editorBaseType]; // If the table is still null, then throw a // sentinel in there so we don't // go through this again. // if (table == null) { lock (s_internalSyncObject) { table = editorTables[editorBaseType]; if (table == null) { editorTables[editorBaseType] = editorTables; } } } } // Look for our sentinel value that indicates // we have already tried and failed to get // a table. // if (table == editorTables) { table = null; } return (Hashtable)table; } /// <summary> /// Retrieves the events for this type. /// </summary> internal EventDescriptorCollection GetEvents(Type type) { ReflectedTypeData td = GetTypeData(type, true); return td.GetEvents(); } /// <summary> /// Retrieves custom extender attributes. We don't support /// extender attributes, so we always return an empty collection. /// </summary> internal AttributeCollection GetExtendedAttributes(object instance) { return AttributeCollection.Empty; } /// <summary> /// Retrieves the class name for our type. /// </summary> internal string GetExtendedClassName(object instance) { return GetClassName(instance.GetType()); } /// <summary> /// Retrieves the component name from the site. /// </summary> internal string GetExtendedComponentName(object instance) { return GetComponentName(instance.GetType(), instance); } /// <summary> /// Retrieves the type converter. If instance is non-null, /// it will be used to retrieve attributes. Otherwise, _type /// will be used. /// </summary> internal TypeConverter GetExtendedConverter(object instance) { return GetConverter(instance.GetType(), instance); } /// <summary> /// Return the default event. The default event is determined by the /// presence of a DefaultEventAttribute on the class. /// </summary> internal EventDescriptor GetExtendedDefaultEvent(object instance) { return null; // we don't support extended events. } /// <summary> /// Return the default property. /// </summary> internal PropertyDescriptor GetExtendedDefaultProperty(object instance) { return null; // extender properties are never the default. } /// <summary> /// Retrieves the editor for the given base type. /// </summary> internal object GetExtendedEditor(object instance, Type editorBaseType) { return GetEditor(instance.GetType(), instance, editorBaseType); } /// <summary> /// Retrieves the events for this type. /// </summary> internal EventDescriptorCollection GetExtendedEvents(object instance) { return EventDescriptorCollection.Empty; } /// <summary> /// Retrieves the properties for this type. /// </summary> internal PropertyDescriptorCollection GetExtendedProperties(object instance) { // Is this object a sited component? If not, then it // doesn't have any extender properties. // Type componentType = instance.GetType(); // Check the component for extender providers. We prefer // IExtenderListService, but will use the container if that's // all we have. In either case, we check the list of extenders // against previously stored data in the object cache. If // the cache is up to date, we just return the extenders in the // cache. // IExtenderProvider[] extenders = GetExtenderProviders(instance); IDictionary cache = TypeDescriptor.GetCache(instance); if (extenders.Length == 0) { return PropertyDescriptorCollection.Empty; } // Ok, we have a set of extenders. Now, check to see if there // are properties already in our object cache. If there aren't, // then we will need to create them. // PropertyDescriptorCollection properties = null; if (cache != null) { properties = cache[s_extenderPropertiesKey] as PropertyDescriptorCollection; } if (properties != null) { return properties; } // Unlike normal properties, it is fine for there to be properties with // duplicate names here. // List<PropertyDescriptor> propertyList = null; for (int idx = 0; idx < extenders.Length; idx++) { PropertyDescriptor[] propertyArray = ReflectGetExtendedProperties(extenders[idx]); if (propertyList == null) { propertyList = new List<PropertyDescriptor>(propertyArray.Length * extenders.Length); } for (int propIdx = 0; propIdx < propertyArray.Length; propIdx++) { PropertyDescriptor prop = propertyArray[propIdx]; ExtenderProvidedPropertyAttribute eppa = prop.Attributes[typeof(ExtenderProvidedPropertyAttribute)] as ExtenderProvidedPropertyAttribute; Debug.Assert(eppa != null, $"Extender property {prop.Name} has no provider attribute. We will skip it."); if (eppa != null) { Type receiverType = eppa.ReceiverType; if (receiverType != null) { if (receiverType.IsAssignableFrom(componentType)) { propertyList.Add(prop); } } } } } // propertyHash now contains ExtendedPropertyDescriptor objects // for each extended property. // if (propertyList != null) { PropertyDescriptor[] fullArray = new PropertyDescriptor[propertyList.Count]; propertyList.CopyTo(fullArray, 0); properties = new PropertyDescriptorCollection(fullArray, true); } else { properties = PropertyDescriptorCollection.Empty; } if (cache != null) { cache[s_extenderPropertiesKey] = properties; } return properties; } protected internal override IExtenderProvider[] GetExtenderProviders(object instance) { if (instance == null) { throw new ArgumentNullException(nameof(instance)); } IComponent component = instance as IComponent; if (component != null && component.Site != null) { IExtenderListService extenderList = component.Site.GetService(typeof(IExtenderListService)) as IExtenderListService; IDictionary cache = TypeDescriptor.GetCache(instance); if (extenderList != null) { return GetExtenders(extenderList.GetExtenderProviders(), instance, cache); } #if FEATURE_COMPONENT_COLLECTION else { IContainer cont = component.Site.Container; if (cont != null) { return GetExtenders(cont.Components, instance, cache); } } #endif } return Array.Empty<IExtenderProvider>(); } /// <summary> /// GetExtenders builds a list of extender providers from /// a collection of components. It validates the extenders /// against any cached collection of extenders in the /// cache. If there is a discrepancy, this will erase /// any cached extender properties from the cache and /// save the updated extender list. If there is no /// discrepancy this will simply return the cached list. /// </summary> private static IExtenderProvider[] GetExtenders(ICollection components, object instance, IDictionary cache) { bool newExtenders = false; int extenderCount = 0; IExtenderProvider[] existingExtenders = null; //CanExtend is expensive. We will remember results of CanExtend for the first 64 extenders and using "long canExtend" as a bit vector. // we want to avoid memory allocation as well so we don't use some more sophisticated data structure like an array of booleans UInt64 canExtend = 0; int maxCanExtendResults = 64; // currentExtenders is what we intend to return. If the caller passed us // the return value from IExtenderListService, components will already be // an IExtenderProvider[]. If not, then we must treat components as an // opaque collection. We spend a great deal of energy here to avoid // copying or allocating memory because this method is called every // time a component is asked for its properties. IExtenderProvider[] currentExtenders = components as IExtenderProvider[]; if (cache != null) { existingExtenders = cache[ExtenderProviderKey] as IExtenderProvider[]; } if (existingExtenders == null) { newExtenders = true; } int curIdx = 0; int idx = 0; if (currentExtenders != null) { for (curIdx = 0; curIdx < currentExtenders.Length; curIdx++) { if (currentExtenders[curIdx].CanExtend(instance)) { extenderCount++; // Performance:We would like to call CanExtend as little as possible therefore we remember its result if (curIdx < maxCanExtendResults) canExtend |= (UInt64)1 << curIdx; if (!newExtenders && (idx >= existingExtenders.Length || currentExtenders[curIdx] != existingExtenders[idx++])) { newExtenders = true; } } } } else if (components != null) { foreach (object obj in components) { IExtenderProvider prov = obj as IExtenderProvider; if (prov != null && prov.CanExtend(instance)) { extenderCount++; if (curIdx < maxCanExtendResults) canExtend |= (UInt64)1 << curIdx; if (!newExtenders && (idx >= existingExtenders.Length || prov != existingExtenders[idx++])) { newExtenders = true; } } curIdx++; } } if (existingExtenders != null && extenderCount != existingExtenders.Length) { newExtenders = true; } if (newExtenders) { if (currentExtenders == null || extenderCount != currentExtenders.Length) { IExtenderProvider[] newExtenderArray = new IExtenderProvider[extenderCount]; curIdx = 0; idx = 0; if (currentExtenders != null && extenderCount > 0) { while (curIdx < currentExtenders.Length) { if ((curIdx < maxCanExtendResults && (canExtend & ((UInt64)1 << curIdx)) != 0) || (curIdx >= maxCanExtendResults && currentExtenders[curIdx].CanExtend(instance))) { Debug.Assert(idx < extenderCount, "There are more extenders than we expect"); newExtenderArray[idx++] = currentExtenders[curIdx]; } curIdx++; } Debug.Assert(idx == extenderCount, "Wrong number of extenders"); } else if (extenderCount > 0) { foreach (var component in components) { IExtenderProvider p = component as IExtenderProvider; if (p != null && ((curIdx < maxCanExtendResults && (canExtend & ((UInt64)1 << curIdx)) != 0) || (curIdx >= maxCanExtendResults && p.CanExtend(instance)))) { Debug.Assert(idx < extenderCount, "There are more extenders than we expect"); newExtenderArray[idx++] = p; } curIdx++; } Debug.Assert(idx == extenderCount, "Wrong number of extenders"); } currentExtenders = newExtenderArray; } if (cache != null) { cache[ExtenderProviderKey] = currentExtenders; cache.Remove(s_extenderPropertiesKey); } } else { currentExtenders = existingExtenders; } return currentExtenders; } /// <summary> /// Retrieves the owner for a property. /// </summary> internal object GetExtendedPropertyOwner(object instance, PropertyDescriptor pd) { return GetPropertyOwner(instance.GetType(), instance, pd); } ////////////////////////////////////////////////////////// /// <summary> /// Provides a type descriptor for the given object. We only support this /// if the object is a component that /// </summary> public override ICustomTypeDescriptor GetExtendedTypeDescriptor(object instance) { Debug.Fail("This should never be invoked. TypeDescriptionNode should wrap for us."); return null; } /// <summary> /// The name of the specified component, or null if the component has no name. /// In many cases this will return the same value as GetComponentName. If the /// component resides in a nested container or has other nested semantics, it may /// return a different fully qualfied name. /// /// If not overridden, the default implementation of this method will call /// GetComponentName. /// </summary> public override string GetFullComponentName(object component) { IComponent comp = component as IComponent; INestedSite ns = comp?.Site as INestedSite; if (ns != null) { return ns.FullName; } return TypeDescriptor.GetComponentName(component); } /// <summary> /// Returns an array of types we have populated metadata for that live /// in the current module. /// </summary> internal Type[] GetPopulatedTypes(Module module) { List<Type> typeList = new List<Type>(); // Manual use of IDictionaryEnumerator instead of foreach to avoid DictionaryEntry box allocations. IDictionaryEnumerator e = _typeData.GetEnumerator(); while (e.MoveNext()) { DictionaryEntry de = e.Entry; Type type = (Type)de.Key; ReflectedTypeData typeData = (ReflectedTypeData)de.Value; if (type.Module == module && typeData.IsPopulated) { typeList.Add(type); } } return typeList.ToArray(); } /// <summary> /// Retrieves the properties for this type. /// </summary> internal PropertyDescriptorCollection GetProperties(Type type) { ReflectedTypeData td = GetTypeData(type, true); return td.GetProperties(); } /// <summary> /// Retrieves the owner for a property. /// </summary> internal object GetPropertyOwner(Type type, object instance, PropertyDescriptor pd) { return TypeDescriptor.GetAssociation(type, instance); } /// <summary> /// Returns an Type for the given type. Since type implements IReflect, /// we just return objectType. /// </summary> public override Type GetReflectionType(Type objectType, object instance) { Debug.Assert(objectType != null, "Should have arg-checked before coming in here"); return objectType; } /// <summary> /// Returns the type data for the given type, or /// null if there is no type data for the type yet and /// createIfNeeded is false. /// </summary> private ReflectedTypeData GetTypeData(Type type, bool createIfNeeded) { ReflectedTypeData td = null; if (_typeData != null) { td = (ReflectedTypeData)_typeData[type]; if (td != null) { return td; } } lock (s_internalSyncObject) { if (_typeData != null) { td = (ReflectedTypeData)_typeData[type]; } if (td == null && createIfNeeded) { td = new ReflectedTypeData(type); if (_typeData == null) { _typeData = new Hashtable(); } _typeData[type] = td; } } return td; } /// <summary> /// This method returns a custom type descriptor for the given type / object. /// The objectType parameter is always valid, but the instance parameter may /// be null if no instance was passed to TypeDescriptor. The method should /// return a custom type descriptor for the object. If the method is not /// interested in providing type information for the object it should /// return null. /// </summary> public override ICustomTypeDescriptor GetTypeDescriptor(Type objectType, object instance) { Debug.Fail("This should never be invoked. TypeDescriptionNode should wrap for us."); return null; } /// <summary> /// Retrieves a type from a name. /// </summary> private static Type GetTypeFromName(string typeName) { Type t = Type.GetType(typeName); if (t == null) { int commaIndex = typeName.IndexOf(','); if (commaIndex != -1) { // At design time, it's possible for us to reuse // an assembly but add new types. The app domain // will cache the assembly based on identity, however, // so it could be looking in the previous version // of the assembly and not finding the type. We work // around this by looking for the non-assembly qualified // name, which causes the domain to raise a type // resolve event. // t = Type.GetType(typeName.Substring(0, commaIndex)); } } return t; } /// <summary> /// This method returns true if the data cache in this reflection /// type descriptor has data in it. /// </summary> internal bool IsPopulated(Type type) { ReflectedTypeData td = GetTypeData(type, false); if (td != null) { return td.IsPopulated; } return false; } /// <summary> /// Static helper API around reflection to get and cache /// custom attributes. This does not recurse, but it will /// walk interfaces on the type. Interfaces are added /// to the end, so merging should be done from length - 1 /// to 0. /// </summary> internal static Attribute[] ReflectGetAttributes(Type type) { Hashtable attributeCache = AttributeCache; Attribute[] attrs = (Attribute[])attributeCache[type]; if (attrs != null) { return attrs; } lock (s_internalSyncObject) { attrs = (Attribute[])attributeCache[type]; if (attrs == null) { // Get the type's attributes. // attrs = type.GetCustomAttributes(typeof(Attribute), false).OfType<Attribute>().ToArray(); attributeCache[type] = attrs; } } return attrs; } /// <summary> /// Static helper API around reflection to get and cache /// custom attributes. This does not recurse to the base class. /// </summary> internal static Attribute[] ReflectGetAttributes(MemberInfo member) { Hashtable attributeCache = AttributeCache; Attribute[] attrs = (Attribute[])attributeCache[member]; if (attrs != null) { return attrs; } lock (s_internalSyncObject) { attrs = (Attribute[])attributeCache[member]; if (attrs == null) { // Get the member's attributes. // attrs = member.GetCustomAttributes(typeof(Attribute), false).OfType<Attribute>().ToArray(); attributeCache[member] = attrs; } } return attrs; } /// <summary> /// Static helper API around reflection to get and cache /// events. This does not recurse to the base class. /// </summary> private static EventDescriptor[] ReflectGetEvents(Type type) { Hashtable eventCache = EventCache; EventDescriptor[] events = (EventDescriptor[])eventCache[type]; if (events != null) { return events; } lock (s_internalSyncObject) { events = (EventDescriptor[])eventCache[type]; if (events == null) { BindingFlags bindingFlags = BindingFlags.DeclaredOnly | BindingFlags.Public | BindingFlags.Instance; // Get the type's events. Events may have their add and // remove methods individually overridden in a derived // class, but at some point in the base class chain both // methods must exist. If we find an event that doesn't // have both add and remove, we skip it here, because it // will be picked up in our base class scan. // EventInfo[] eventInfos = type.GetEvents(bindingFlags); events = new EventDescriptor[eventInfos.Length]; int eventCount = 0; for (int idx = 0; idx < eventInfos.Length; idx++) { EventInfo eventInfo = eventInfos[idx]; // GetEvents returns events that are on nonpublic types // if those types are from our assembly. Screen these. // if ((!(eventInfo.DeclaringType.IsPublic || eventInfo.DeclaringType.IsNestedPublic)) && (eventInfo.DeclaringType.Assembly == typeof(ReflectTypeDescriptionProvider).Assembly)) { Debug.Fail("Hey, assumption holds true. Rip this assert."); continue; } if (eventInfo.AddMethod != null && eventInfo.RemoveMethod != null) { events[eventCount++] = new ReflectEventDescriptor(type, eventInfo); } } if (eventCount != events.Length) { EventDescriptor[] newEvents = new EventDescriptor[eventCount]; Array.Copy(events, 0, newEvents, 0, eventCount); events = newEvents; } #if DEBUG foreach (EventDescriptor dbgEvent in events) { Debug.Assert(dbgEvent != null, "Holes in event array for type " + type); } #endif eventCache[type] = events; } } return events; } /// <summary> /// This performs the actual reflection needed to discover /// extender properties. If object caching is supported this /// will maintain a cache of property descriptors on the /// extender provider. Extender properties are actually two /// property descriptors in one. There is a chunk of per-class /// data in a ReflectPropertyDescriptor that defines the /// parameter types and get and set methods of the extended property, /// and there is an ExtendedPropertyDescriptor that combines this /// with an extender provider object to create what looks like a /// normal property. ReflectGetExtendedProperties maintains two /// separate caches for these two sets: a static one for the /// ReflectPropertyDescriptor values that don't change for each /// provider instance, and a per-provider cache that contains /// the ExtendedPropertyDescriptors. /// </summary> private static PropertyDescriptor[] ReflectGetExtendedProperties(IExtenderProvider provider) { IDictionary cache = TypeDescriptor.GetCache(provider); PropertyDescriptor[] properties; if (cache != null) { properties = cache[s_extenderProviderPropertiesKey] as PropertyDescriptor[]; if (properties != null) { return properties; } } // Our per-instance cache missed. We have never seen this instance of the // extender provider before. See if we can find our class-based // property store. // Type providerType = provider.GetType(); Hashtable extendedPropertyCache = ExtendedPropertyCache; ReflectPropertyDescriptor[] extendedProperties = (ReflectPropertyDescriptor[])extendedPropertyCache[providerType]; if (extendedProperties == null) { lock (s_internalSyncObject) { extendedProperties = (ReflectPropertyDescriptor[])extendedPropertyCache[providerType]; // Our class-based property store failed as well, so we need to build up the set of // extended properties here. // if (extendedProperties == null) { AttributeCollection attributes = TypeDescriptor.GetAttributes(providerType); List<ReflectPropertyDescriptor> extendedList = new List<ReflectPropertyDescriptor>(attributes.Count); foreach (Attribute attr in attributes) { ProvidePropertyAttribute provideAttr = attr as ProvidePropertyAttribute; if (provideAttr != null) { Type receiverType = GetTypeFromName(provideAttr.ReceiverTypeName); if (receiverType != null) { MethodInfo getMethod = providerType.GetMethod("Get" + provideAttr.PropertyName, new Type[] { receiverType }); if (getMethod != null && !getMethod.IsStatic && getMethod.IsPublic) { MethodInfo setMethod = providerType.GetMethod("Set" + provideAttr.PropertyName, new Type[] { receiverType, getMethod.ReturnType }); if (setMethod != null && (setMethod.IsStatic || !setMethod.IsPublic)) { setMethod = null; } extendedList.Add(new ReflectPropertyDescriptor(providerType, provideAttr.PropertyName, getMethod.ReturnType, receiverType, getMethod, setMethod, null)); } } } } extendedProperties = new ReflectPropertyDescriptor[extendedList.Count]; extendedList.CopyTo(extendedProperties, 0); extendedPropertyCache[providerType] = extendedProperties; } } } // Now that we have our extended properties we can build up a list of callable properties. These can be // returned to the user. // properties = new PropertyDescriptor[extendedProperties.Length]; for (int idx = 0; idx < extendedProperties.Length; idx++) { ReflectPropertyDescriptor rpd = extendedProperties[idx]; properties[idx] = new ExtendedPropertyDescriptor(rpd, rpd.ExtenderGetReceiverType(), provider, null); } if (cache != null) { cache[s_extenderProviderPropertiesKey] = properties; } return properties; } /// <summary> /// Static helper API around reflection to get and cache /// properties. This does not recurse to the base class. /// </summary> private static PropertyDescriptor[] ReflectGetProperties(Type type) { Hashtable propertyCache = PropertyCache; PropertyDescriptor[] properties = (PropertyDescriptor[])propertyCache[type]; if (properties != null) { return properties; } lock (s_internalSyncObject) { properties = (PropertyDescriptor[])propertyCache[type]; if (properties == null) { BindingFlags bindingFlags = BindingFlags.DeclaredOnly | BindingFlags.Public | BindingFlags.Instance; // Get the type's properties. Properties may have their // get and set methods individually overridden in a derived // class, so if we find a missing method we need to walk // down the base class chain to find it. We actually merge // "new" properties of the same name, so we must preserve // the member info for each method individually. // PropertyInfo[] propertyInfos = type.GetProperties(bindingFlags); properties = new PropertyDescriptor[propertyInfos.Length]; int propertyCount = 0; for (int idx = 0; idx < propertyInfos.Length; idx++) { PropertyInfo propertyInfo = propertyInfos[idx]; // Today we do not support parameterized properties. // if (propertyInfo.GetIndexParameters().Length > 0) { continue; } MethodInfo getMethod = propertyInfo.GetMethod; MethodInfo setMethod = propertyInfo.SetMethod; string name = propertyInfo.Name; // If the property only overrode "set", then we don't // pick it up here. Rather, we just merge it in from // the base class list. // If a property had at least a get method, we consider it. We don't // consider write-only properties. // if (getMethod != null) { properties[propertyCount++] = new ReflectPropertyDescriptor(type, name, propertyInfo.PropertyType, propertyInfo, getMethod, setMethod, null); } } if (propertyCount != properties.Length) { PropertyDescriptor[] newProperties = new PropertyDescriptor[propertyCount]; Array.Copy(properties, 0, newProperties, 0, propertyCount); properties = newProperties; } Debug.Assert(!properties.Any(dbgProp => dbgProp == null), $"Holes in property array for type {type}"); propertyCache[type] = properties; } } return properties; } /// <summary> /// Refreshes the contents of this type descriptor. This does not /// actually requery, but it will clear our state so the next /// query re-populates. /// </summary> internal void Refresh(Type type) { ReflectedTypeData td = GetTypeData(type, false); td?.Refresh(); } /// <summary> /// Searches the provided intrinsic hashtable for a match with the object type. /// At the beginning, the hashtable contains types for the various converters. /// As this table is searched, the types for these objects /// are replaced with instances, so we only create as needed. This method /// does the search up the base class hierarchy and will create instances /// for types as needed. These instances are stored back into the table /// for the base type, and for the original component type, for fast access. /// </summary> private static object SearchIntrinsicTable(Hashtable table, Type callingType) { object hashEntry = null; // We take a lock on this table. Nothing in this code calls out to // other methods that lock, so it should be fairly safe to grab this // lock. Also, this allows multiple intrinsic tables to be searched // at once. // lock (table) { Type baseType = callingType; while (baseType != null && baseType != typeof(object)) { hashEntry = table[baseType]; // If the entry is a late-bound type, then try to // resolve it. // string typeString = hashEntry as string; if (typeString != null) { hashEntry = Type.GetType(typeString); if (hashEntry != null) { table[baseType] = hashEntry; } } if (hashEntry != null) { break; } baseType = baseType.BaseType; } // Now make a scan through each value in the table, looking for interfaces. // If we find one, see if the object implements the interface. // if (hashEntry == null) { // Manual use of IDictionaryEnumerator instead of foreach to avoid DictionaryEntry box allocations. IDictionaryEnumerator e = table.GetEnumerator(); while (e.MoveNext()) { DictionaryEntry de = e.Entry; Type keyType = de.Key as Type; if (keyType != null && keyType.IsInterface && keyType.IsAssignableFrom(callingType)) { hashEntry = de.Value; string typeString = hashEntry as string; if (typeString != null) { hashEntry = Type.GetType(typeString); if (hashEntry != null) { table[callingType] = hashEntry; } } if (hashEntry != null) { break; } } } } // Special case converters // if (hashEntry == null) { if (callingType.IsGenericType && callingType.GetGenericTypeDefinition() == typeof(Nullable<>)) { // Check if it is a nullable value hashEntry = table[s_intrinsicNullableKey]; } else if (callingType.IsInterface) { // Finally, check to see if the component type is some unknown interface. // We have a custom converter for that. hashEntry = table[s_intrinsicReferenceKey]; } } // Interfaces do not derive from object, so we // must handle the case of no hash entry here. // if (hashEntry == null) { hashEntry = table[typeof(object)]; } // If the entry is a type, create an instance of it and then // replace the entry. This way we only need to create once. // We can only do this if the object doesn't want a type // in its constructor. // Type type = hashEntry as Type; if (type != null) { hashEntry = CreateInstance(type, callingType); if (type.GetConstructor(s_typeConstructor) == null) { table[callingType] = hashEntry; } } } return hashEntry; } } }
namespace Termie { partial class Form2 { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.groupBox1 = new System.Windows.Forms.GroupBox(); this.comboBox6 = new System.Windows.Forms.ComboBox(); this.comboBox5 = new System.Windows.Forms.ComboBox(); this.comboBox4 = new System.Windows.Forms.ComboBox(); this.comboBox3 = new System.Windows.Forms.ComboBox(); this.comboBox2 = new System.Windows.Forms.ComboBox(); this.label6 = new System.Windows.Forms.Label(); this.label5 = new System.Windows.Forms.Label(); this.label4 = new System.Windows.Forms.Label(); this.label3 = new System.Windows.Forms.Label(); this.label2 = new System.Windows.Forms.Label(); this.label1 = new System.Windows.Forms.Label(); this.comboBox1 = new System.Windows.Forms.ComboBox(); this.groupBox2 = new System.Windows.Forms.GroupBox(); this.checkBox1 = new System.Windows.Forms.CheckBox(); this.checkBox5 = new System.Windows.Forms.CheckBox(); this.checkBox4 = new System.Windows.Forms.CheckBox(); this.checkBox3 = new System.Windows.Forms.CheckBox(); this.checkBox2 = new System.Windows.Forms.CheckBox(); this.radioButton4 = new System.Windows.Forms.RadioButton(); this.radioButton3 = new System.Windows.Forms.RadioButton(); this.radioButton2 = new System.Windows.Forms.RadioButton(); this.radioButton1 = new System.Windows.Forms.RadioButton(); this.button1 = new System.Windows.Forms.Button(); this.button2 = new System.Windows.Forms.Button(); this.textBox1 = new System.Windows.Forms.TextBox(); this.button3 = new System.Windows.Forms.Button(); this.groupBox3 = new System.Windows.Forms.GroupBox(); this.groupBox1.SuspendLayout(); this.groupBox2.SuspendLayout(); this.groupBox3.SuspendLayout(); this.SuspendLayout(); // // groupBox1 // this.groupBox1.Controls.Add(this.comboBox6); this.groupBox1.Controls.Add(this.comboBox5); this.groupBox1.Controls.Add(this.comboBox4); this.groupBox1.Controls.Add(this.comboBox3); this.groupBox1.Controls.Add(this.comboBox2); this.groupBox1.Controls.Add(this.label6); this.groupBox1.Controls.Add(this.label5); this.groupBox1.Controls.Add(this.label4); this.groupBox1.Controls.Add(this.label3); this.groupBox1.Controls.Add(this.label2); this.groupBox1.Controls.Add(this.label1); this.groupBox1.Controls.Add(this.comboBox1); this.groupBox1.Location = new System.Drawing.Point(13, 13); this.groupBox1.Name = "groupBox1"; this.groupBox1.Size = new System.Drawing.Size(172, 192); this.groupBox1.TabIndex = 0; this.groupBox1.TabStop = false; this.groupBox1.Text = "Port configuration"; // // comboBox6 // this.comboBox6.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.comboBox6.FormattingEnabled = true; this.comboBox6.Location = new System.Drawing.Point(9, 158); this.comboBox6.Name = "comboBox6"; this.comboBox6.Size = new System.Drawing.Size(136, 21); this.comboBox6.TabIndex = 11; // // comboBox5 // this.comboBox5.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.comboBox5.FormattingEnabled = true; this.comboBox5.Location = new System.Drawing.Point(68, 116); this.comboBox5.Name = "comboBox5"; this.comboBox5.Size = new System.Drawing.Size(77, 21); this.comboBox5.TabIndex = 10; // // comboBox4 // this.comboBox4.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.comboBox4.FormattingEnabled = true; this.comboBox4.Location = new System.Drawing.Point(68, 92); this.comboBox4.Name = "comboBox4"; this.comboBox4.Size = new System.Drawing.Size(77, 21); this.comboBox4.TabIndex = 9; // // comboBox3 // this.comboBox3.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.comboBox3.FormattingEnabled = true; this.comboBox3.Location = new System.Drawing.Point(68, 68); this.comboBox3.Name = "comboBox3"; this.comboBox3.Size = new System.Drawing.Size(77, 21); this.comboBox3.TabIndex = 8; // // comboBox2 // this.comboBox2.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.comboBox2.FormattingEnabled = true; this.comboBox2.Location = new System.Drawing.Point(68, 44); this.comboBox2.Name = "comboBox2"; this.comboBox2.Size = new System.Drawing.Size(77, 21); this.comboBox2.TabIndex = 7; // // label6 // this.label6.AutoSize = true; this.label6.Location = new System.Drawing.Point(6, 143); this.label6.Name = "label6"; this.label6.Size = new System.Drawing.Size(119, 13); this.label6.TabIndex = 6; this.label6.Text = "Hardware Handshaking"; // // label5 // this.label5.AutoSize = true; this.label5.Location = new System.Drawing.Point(6, 96); this.label5.Name = "label5"; this.label5.Size = new System.Drawing.Size(33, 13); this.label5.TabIndex = 5; this.label5.Text = "Parity"; // // label4 // this.label4.AutoSize = true; this.label4.Location = new System.Drawing.Point(6, 119); this.label4.Name = "label4"; this.label4.Size = new System.Drawing.Size(48, 13); this.label4.TabIndex = 4; this.label4.Text = "Stop bits"; // // label3 // this.label3.AutoSize = true; this.label3.Location = new System.Drawing.Point(6, 73); this.label3.Name = "label3"; this.label3.Size = new System.Drawing.Size(49, 13); this.label3.TabIndex = 3; this.label3.Text = "Data bits"; // // label2 // this.label2.AutoSize = true; this.label2.Location = new System.Drawing.Point(6, 48); this.label2.Name = "label2"; this.label2.Size = new System.Drawing.Size(53, 13); this.label2.TabIndex = 2; this.label2.Text = "Baud rate"; // // label1 // this.label1.AutoSize = true; this.label1.Location = new System.Drawing.Point(6, 23); this.label1.Name = "label1"; this.label1.Size = new System.Drawing.Size(26, 13); this.label1.TabIndex = 1; this.label1.Text = "Port"; // // comboBox1 // this.comboBox1.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList; this.comboBox1.FormattingEnabled = true; this.comboBox1.Location = new System.Drawing.Point(68, 20); this.comboBox1.Name = "comboBox1"; this.comboBox1.Size = new System.Drawing.Size(77, 21); this.comboBox1.TabIndex = 0; // // groupBox2 // this.groupBox2.Controls.Add(this.checkBox1); this.groupBox2.Controls.Add(this.checkBox5); this.groupBox2.Controls.Add(this.checkBox4); this.groupBox2.Controls.Add(this.checkBox3); this.groupBox2.Controls.Add(this.checkBox2); this.groupBox2.Controls.Add(this.radioButton4); this.groupBox2.Controls.Add(this.radioButton3); this.groupBox2.Controls.Add(this.radioButton2); this.groupBox2.Controls.Add(this.radioButton1); this.groupBox2.Location = new System.Drawing.Point(201, 12); this.groupBox2.Name = "groupBox2"; this.groupBox2.Size = new System.Drawing.Size(182, 193); this.groupBox2.TabIndex = 1; this.groupBox2.TabStop = false; this.groupBox2.Text = "Options"; // // checkBox1 // this.checkBox1.AutoSize = true; this.checkBox1.Location = new System.Drawing.Point(15, 102); this.checkBox1.Name = "checkBox1"; this.checkBox1.Size = new System.Drawing.Size(78, 17); this.checkBox1.TabIndex = 8; this.checkBox1.Text = "Hex output"; this.checkBox1.UseVisualStyleBackColor = true; // // checkBox5 // this.checkBox5.AutoSize = true; this.checkBox5.Location = new System.Drawing.Point(15, 170); this.checkBox5.Name = "checkBox5"; this.checkBox5.Size = new System.Drawing.Size(118, 17); this.checkBox5.TabIndex = 7; this.checkBox5.Text = "Filter case sensitive"; this.checkBox5.UseVisualStyleBackColor = true; // // checkBox4 // this.checkBox4.AutoSize = true; this.checkBox4.Location = new System.Drawing.Point(15, 153); this.checkBox4.Name = "checkBox4"; this.checkBox4.Size = new System.Drawing.Size(80, 17); this.checkBox4.TabIndex = 6; this.checkBox4.Text = "Stay on top"; this.checkBox4.UseVisualStyleBackColor = true; // // checkBox3 // this.checkBox3.AutoSize = true; this.checkBox3.Location = new System.Drawing.Point(15, 136); this.checkBox3.Name = "checkBox3"; this.checkBox3.Size = new System.Drawing.Size(79, 17); this.checkBox3.TabIndex = 5; this.checkBox3.Text = "Local echo"; this.checkBox3.UseVisualStyleBackColor = true; // // checkBox2 // this.checkBox2.AutoSize = true; this.checkBox2.Location = new System.Drawing.Point(15, 119); this.checkBox2.Name = "checkBox2"; this.checkBox2.Size = new System.Drawing.Size(109, 17); this.checkBox2.TabIndex = 4; this.checkBox2.Text = "Monospaced font"; this.checkBox2.UseVisualStyleBackColor = true; // // radioButton4 // this.radioButton4.AutoSize = true; this.radioButton4.Location = new System.Drawing.Point(15, 73); this.radioButton4.Name = "radioButton4"; this.radioButton4.Size = new System.Drawing.Size(95, 17); this.radioButton4.TabIndex = 3; this.radioButton4.TabStop = true; this.radioButton4.Text = "Append CR-LF"; this.radioButton4.UseVisualStyleBackColor = true; // // radioButton3 // this.radioButton3.AutoSize = true; this.radioButton3.Location = new System.Drawing.Point(15, 55); this.radioButton3.Name = "radioButton3"; this.radioButton3.Size = new System.Drawing.Size(77, 17); this.radioButton3.TabIndex = 2; this.radioButton3.TabStop = true; this.radioButton3.Text = "Append LF"; this.radioButton3.UseVisualStyleBackColor = true; // // radioButton2 // this.radioButton2.AutoSize = true; this.radioButton2.Location = new System.Drawing.Point(15, 37); this.radioButton2.Name = "radioButton2"; this.radioButton2.Size = new System.Drawing.Size(80, 17); this.radioButton2.TabIndex = 1; this.radioButton2.TabStop = true; this.radioButton2.Text = "Append CR"; this.radioButton2.UseVisualStyleBackColor = true; // // radioButton1 // this.radioButton1.AutoSize = true; this.radioButton1.Location = new System.Drawing.Point(15, 19); this.radioButton1.Name = "radioButton1"; this.radioButton1.Size = new System.Drawing.Size(100, 17); this.radioButton1.TabIndex = 0; this.radioButton1.TabStop = true; this.radioButton1.Text = "Append nothing"; this.radioButton1.UseVisualStyleBackColor = true; // // button1 // this.button1.Location = new System.Drawing.Point(400, 25); this.button1.Name = "button1"; this.button1.Size = new System.Drawing.Size(75, 23); this.button1.TabIndex = 2; this.button1.Text = "OK"; this.button1.UseVisualStyleBackColor = true; this.button1.Click += new System.EventHandler(this.button1_Click); // // button2 // this.button2.Location = new System.Drawing.Point(400, 62); this.button2.Name = "button2"; this.button2.Size = new System.Drawing.Size(75, 23); this.button2.TabIndex = 3; this.button2.Text = "Cancel"; this.button2.UseVisualStyleBackColor = true; this.button2.Click += new System.EventHandler(this.button2_Click); // // textBox1 // this.textBox1.Location = new System.Drawing.Point(8, 19); this.textBox1.Name = "textBox1"; this.textBox1.ReadOnly = true; this.textBox1.Size = new System.Drawing.Size(263, 20); this.textBox1.TabIndex = 5; // // button3 // this.button3.Location = new System.Drawing.Point(280, 17); this.button3.Name = "button3"; this.button3.Size = new System.Drawing.Size(75, 23); this.button3.TabIndex = 6; this.button3.Text = "Change"; this.button3.UseVisualStyleBackColor = true; this.button3.Click += new System.EventHandler(this.button3_Click); // // groupBox3 // this.groupBox3.Controls.Add(this.textBox1); this.groupBox3.Controls.Add(this.button3); this.groupBox3.Location = new System.Drawing.Point(14, 208); this.groupBox3.Name = "groupBox3"; this.groupBox3.Size = new System.Drawing.Size(368, 50); this.groupBox3.TabIndex = 7; this.groupBox3.TabStop = false; this.groupBox3.Text = "Log File"; // // Form2 // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.ClientSize = new System.Drawing.Size(492, 270); this.ControlBox = false; this.Controls.Add(this.groupBox3); this.Controls.Add(this.button2); this.Controls.Add(this.button1); this.Controls.Add(this.groupBox2); this.Controls.Add(this.groupBox1); this.Name = "Form2"; this.Text = "Serial port settings"; this.groupBox1.ResumeLayout(false); this.groupBox1.PerformLayout(); this.groupBox2.ResumeLayout(false); this.groupBox2.PerformLayout(); this.groupBox3.ResumeLayout(false); this.groupBox3.PerformLayout(); this.ResumeLayout(false); } #endregion private System.Windows.Forms.GroupBox groupBox1; private System.Windows.Forms.ComboBox comboBox1; private System.Windows.Forms.GroupBox groupBox2; private System.Windows.Forms.Button button1; private System.Windows.Forms.Button button2; private System.Windows.Forms.Label label6; private System.Windows.Forms.Label label5; private System.Windows.Forms.Label label4; private System.Windows.Forms.Label label3; private System.Windows.Forms.Label label2; private System.Windows.Forms.Label label1; private System.Windows.Forms.ComboBox comboBox4; private System.Windows.Forms.ComboBox comboBox3; private System.Windows.Forms.ComboBox comboBox2; private System.Windows.Forms.ComboBox comboBox5; private System.Windows.Forms.CheckBox checkBox5; private System.Windows.Forms.CheckBox checkBox4; private System.Windows.Forms.CheckBox checkBox3; private System.Windows.Forms.CheckBox checkBox2; private System.Windows.Forms.RadioButton radioButton4; private System.Windows.Forms.RadioButton radioButton3; private System.Windows.Forms.RadioButton radioButton2; private System.Windows.Forms.RadioButton radioButton1; private System.Windows.Forms.ComboBox comboBox6; private System.Windows.Forms.CheckBox checkBox1; private System.Windows.Forms.TextBox textBox1; private System.Windows.Forms.Button button3; private System.Windows.Forms.GroupBox groupBox3; } }
//----------------------------------------------------------------------- // <copyright file="Serialization.cs" company="Akka.NET Project"> // Copyright (C) 2009-2016 Lightbend Inc. <http://www.lightbend.com> // Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net> // </copyright> //----------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Linq; using System.Runtime.Serialization; using Akka.Actor; using Akka.Util.Internal; namespace Akka.Serialization { public class Information { public Address Address { get; set; } public ActorSystem System { get; set; } } public class Serialization { [ThreadStatic] private static Information _currentTransportInformation; public static T SerializeWithTransport<T>(ActorSystem system, Address address, Func<T> action) { _currentTransportInformation = new Information() { System = system, Address = address }; var res = action(); _currentTransportInformation = null; return res; } private readonly Serializer _nullSerializer; private readonly Dictionary<Type, Serializer> _serializerMap = new Dictionary<Type, Serializer>(); private readonly Dictionary<int, Serializer> _serializers = new Dictionary<int, Serializer>(); public Serialization(ExtendedActorSystem system) { System = system; _nullSerializer = new NullSerializer(system); _serializers.Add(_nullSerializer.Identifier, _nullSerializer); var serializersConfig = system.Settings.Config.GetConfig("akka.actor.serializers").AsEnumerable().ToList(); var serializerBindingConfig = system.Settings.Config.GetConfig("akka.actor.serialization-bindings").AsEnumerable().ToList(); var namedSerializers = new Dictionary<string, Serializer>(); foreach (var kvp in serializersConfig) { var serializerTypeName = kvp.Value.GetString(); var serializerType = Type.GetType(serializerTypeName); if (serializerType == null) { system.Log.Warning("The type name for serializer '{0}' did not resolve to an actual Type: '{1}'", kvp.Key, serializerTypeName); continue; } var serializer = (Serializer)Activator.CreateInstance(serializerType, system); _serializers.Add(serializer.Identifier, serializer); namedSerializers.Add(kvp.Key, serializer); } foreach (var kvp in serializerBindingConfig) { var typename = kvp.Key; var serializerName = kvp.Value.GetString(); var messageType = Type.GetType(typename); if (messageType == null) { system.Log.Warning("The type name for message/serializer binding '{0}' did not resolve to an actual Type: '{1}'", serializerName, typename); continue; } var serializer = namedSerializers[serializerName]; if (serializer == null) { system.Log.Warning("Serialization binding to non existing serializer: '{0}'", serializerName); continue; } _serializerMap.Add(messageType, serializer); } } public ActorSystem System { get; private set; } public void AddSerializer(Serializer serializer) { _serializers.Add(serializer.Identifier, serializer); } public void AddSerializationMap(Type type, Serializer serializer) { _serializerMap.Add(type, serializer); } /// <summary></summary> /// <exception cref="SerializationException"> /// This exception is thrown if the system cannot find the serializer with the given <paramref name="serializerId"/>. /// </exception> public object Deserialize(byte[] bytes, int serializerId, Type type) { Serializer serializer; if (!_serializers.TryGetValue(serializerId, out serializer)) throw new SerializationException( $"Cannot find serializer with id [{serializerId}]. The most probable reason" + " is that the configuration entry 'akka.actor.serializers' is not in sync between the two systems."); return serializer.FromBinary(bytes, type); } /// <summary></summary> /// <exception cref="SerializationException"> /// This exception is thrown if the system cannot find the serializer with the given <paramref name="serializerId"/> /// or it couldn't find the given <paramref name="manifest"/> with the given <paramref name="serializerId"/>. /// </exception> public object Deserialize(byte[] bytes, int serializerId, string manifest) { Serializer serializer; if (!_serializers.TryGetValue(serializerId, out serializer)) throw new SerializationException( $"Cannot find serializer with id [{serializerId}]. The most probable reason" + " is that the configuration entry 'akka.actor.serializers' is not in sync between the two systems."); if (serializer is SerializerWithStringManifest) return ((SerializerWithStringManifest)serializer).FromBinary(bytes, manifest); if (string.IsNullOrEmpty(manifest)) return serializer.FromBinary(bytes, null); Type type; try { type = Type.GetType(manifest); } catch { throw new SerializationException($"Cannot find manifest class [{manifest}] for serializer with id [{serializerId}]."); } return serializer.FromBinary(bytes, type); } public Serializer FindSerializerFor(object obj) { if (obj == null) return _nullSerializer; Type type = obj.GetType(); return FindSerializerForType(type); } //cache to eliminate lots of typeof operator calls private readonly Type _objectType = typeof(object); /// <summary></summary> /// <exception cref="SerializationException"> /// This exception is thrown if the serializer of the given <paramref name="objectType"/> could not be found. /// </exception> public Serializer FindSerializerForType(Type objectType) { Type type = objectType; //TODO: see if we can do a better job with proper type sorting here - most specific to least specific (object serializer goes last) foreach (var serializerType in _serializerMap) { //force deferral of the base "object" serializer until all other higher-level types have been evaluated if (serializerType.Key.IsAssignableFrom(type) && serializerType.Key != _objectType) return serializerType.Value; } //do a final check for the "object" serializer if (_serializerMap.ContainsKey(_objectType) && _objectType.IsAssignableFrom(type)) return _serializerMap[_objectType]; throw new SerializationException($"Serializer not found for type {objectType.Name}"); } public static string SerializedActorPath(IActorRef actorRef) { if (Equals(actorRef, ActorRefs.NoSender)) return String.Empty; var path = actorRef.Path; ExtendedActorSystem originalSystem = null; if (actorRef is ActorRefWithCell) { originalSystem = actorRef.AsInstanceOf<ActorRefWithCell>().Underlying.System.AsInstanceOf<ExtendedActorSystem>(); } if (_currentTransportInformation == null) { if (originalSystem == null) { var res = path.ToSerializationFormat(); return res; } else { var defaultAddress = originalSystem.Provider.DefaultAddress; var res = path.ToSerializationFormatWithAddress(defaultAddress); return res; } } //CurrentTransportInformation exists var system = _currentTransportInformation.System; var address = _currentTransportInformation.Address; if (originalSystem == null || originalSystem == system) { var res = path.ToSerializationFormatWithAddress(address); return res; } else { var provider = originalSystem.Provider; var res = path.ToSerializationFormatWithAddress(provider.GetExternalAddressFor(address).GetOrElse(provider.DefaultAddress)); return res; } } public Serializer GetSerializerById(int serializerId) { return _serializers[serializerId]; } } }
//Uncomment the next line to enable debugging (also uncomment it in AstarPath.cs) //#define ProfileAstar //#define UNITY_PRO_PROFILER //Requires ProfileAstar, profiles section of astar code which will show up in the Unity Pro Profiler. using System.Collections.Generic; using System; using UnityEngine; public class AstarProfiler { public struct ProfilePoint { public DateTime lastRecorded; public TimeSpan totalTime; public int totalCalls; } private static Dictionary<string, ProfilePoint> profiles = new Dictionary<string, ProfilePoint>(); private static DateTime startTime = DateTime.UtcNow; public static ProfilePoint[] fastProfiles; public static string[] fastProfileNames; private AstarProfiler() { } [System.Diagnostics.Conditional ("ProfileAstar")] public static void InitializeFastProfile (string[] profileNames) { fastProfileNames = profileNames; fastProfiles = new ProfilePoint[profileNames.Length]; } [System.Diagnostics.Conditional ("ProfileAstar")] public static void StartFastProfile(int tag) { //profiles.TryGetValue(tag, out point); fastProfiles[tag].lastRecorded = DateTime.UtcNow; } [System.Diagnostics.Conditional ("ProfileAstar")] public static void EndFastProfile(int tag) { DateTime now = DateTime.UtcNow; /*if (!profiles.ContainsKey(tag)) { Debug.LogError("Can only end profiling for a tag which has already been started (tag was " + tag + ")"); return; }*/ ProfilePoint point = fastProfiles[tag]; point.totalTime += now - point.lastRecorded; point.totalCalls++; fastProfiles[tag] = point; } [System.Diagnostics.Conditional ("UNITY_PRO_PROFILER")] public static void EndProfile () { Profiler.EndSample (); } [System.Diagnostics.Conditional ("ProfileAstar")] public static void StartProfile(string tag) { #if UNITY_PRO_PROFILER Profiler.BeginSample (tag); #else //Console.WriteLine ("Profile Start - " + tag); ProfilePoint point; profiles.TryGetValue(tag, out point); point.lastRecorded = DateTime.UtcNow; profiles[tag] = point; #endif } [System.Diagnostics.Conditional ("ProfileAstar")] public static void EndProfile(string tag) { #if !UNITY_PRO_PROFILER if (!profiles.ContainsKey(tag)) { Debug.LogError("Can only end profiling for a tag which has already been started (tag was " + tag + ")"); return; } //Console.WriteLine ("Profile End - " + tag); DateTime now = DateTime.UtcNow; ProfilePoint point = profiles[tag]; point.totalTime += now - point.lastRecorded; ++point.totalCalls; profiles[tag] = point; #else EndProfile (); #endif } [System.Diagnostics.Conditional ("ProfileAstar")] public static void Reset() { profiles.Clear(); startTime = DateTime.UtcNow; if (fastProfiles != null) { for (int i=0;i<fastProfiles.Length;i++) { fastProfiles[i] = new ProfilePoint (); } } } [System.Diagnostics.Conditional ("ProfileAstar")] public static void PrintFastResults() { TimeSpan endTime = DateTime.UtcNow - startTime; System.Text.StringBuilder output = new System.Text.StringBuilder(); output.Append("============================\n\t\t\t\tProfile results:\n============================\n"); output.Append ("Name | Total Time | Total Calls | Avg/Call "); //foreach(KeyValuePair<string, ProfilePoint> pair in profiles) for (int i=0;i<fastProfiles.Length;i++) { string name = fastProfileNames[i]; ProfilePoint value = fastProfiles[i]; double totalTime = value.totalTime.TotalMilliseconds; int totalCalls = value.totalCalls; if (totalCalls < 1) continue; output.Append ("\n").Append(name.PadLeft(10)).Append("| "); output.Append (totalTime.ToString("0.0").PadLeft (10)).Append ("| "); output.Append (totalCalls.ToString().PadLeft (10)).Append ("| "); output.Append ((totalTime / totalCalls).ToString("0.000").PadLeft(10)); /* output.Append("\nProfile"); output.Append(name); output.Append(" took \t"); output.Append(totalTime.ToString("0.0")); output.Append(" ms to complete over "); output.Append(totalCalls); output.Append(" iteration"); if (totalCalls != 1) output.Append("s"); output.Append(", averaging \t"); output.Append((totalTime / totalCalls).ToString("0.000")); output.Append(" ms per call"); */ } output.Append("\n\n============================\n\t\tTotal runtime: "); output.Append(endTime.TotalSeconds.ToString("F3")); output.Append(" seconds\n============================"); Debug.Log(output.ToString()); } [System.Diagnostics.Conditional ("ProfileAstar")] public static void PrintResults() { TimeSpan endTime = DateTime.UtcNow - startTime; System.Text.StringBuilder output = new System.Text.StringBuilder(); output.Append("============================\n\t\t\t\tProfile results:\n============================\n"); int maxLength = 5; foreach(KeyValuePair<string, ProfilePoint> pair in profiles) { maxLength = Math.Max (pair.Key.Length,maxLength); } output.Append (" Name ".PadRight (maxLength)). Append("|").Append(" Total Time ".PadRight(20)). Append("|").Append(" Total Calls ".PadRight(20)). Append("|").Append(" Avg/Call ".PadRight(20)); foreach(KeyValuePair<string, ProfilePoint> pair in profiles) { double totalTime = pair.Value.totalTime.TotalMilliseconds; int totalCalls = pair.Value.totalCalls; if (totalCalls < 1) continue; string name = pair.Key; output.Append ("\n").Append(name.PadRight(maxLength)).Append("| "); output.Append (totalTime.ToString("0.0").PadRight (20)).Append ("| "); output.Append (totalCalls.ToString().PadRight (20)).Append ("| "); output.Append ((totalTime / totalCalls).ToString("0.000").PadRight(20)); /*output.Append("\nProfile "); output.Append(pair.Key); output.Append(" took "); output.Append(totalTime.ToString("0")); output.Append(" ms to complete over "); output.Append(totalCalls); output.Append(" iteration"); if (totalCalls != 1) output.Append("s"); output.Append(", averaging "); output.Append((totalTime / totalCalls).ToString("0.0")); output.Append(" ms per call");*/ } output.Append("\n\n============================\n\t\tTotal runtime: "); output.Append(endTime.TotalSeconds.ToString("F3")); output.Append(" seconds\n============================"); Debug.Log(output.ToString()); } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Net.Http; using System.Net.Http.Headers; using System.Reflection; using System.Runtime.InteropServices; using System.Text; using System.Threading.Tasks; using Newtonsoft.Json; using Newtonsoft.Json.Linq; using Newtonsoft.Json.Serialization; using Plivo.Authentication; using Plivo.Http; using Plivo.Utilities; using Plivo.Exception; namespace Plivo.Client { /// <summary> /// Client. /// </summary> public class SystemHttpClient : IHttpClient { /// <summary> /// Gets or sets the client. /// </summary> /// <value>The client.</value> public System.Net.Http.HttpClient _client { get; set; } public System.Net.Http.HttpClient _voiceBaseUriClient { get; set; } public System.Net.Http.HttpClient _voiceFallback1Client { get; set; } public System.Net.Http.HttpClient _voiceFallback2Client { get; set; } public System.Net.Http.HttpClient _callInsightsclient { get; set; } public System.Net.Http.HttpClient _lookupClient { get; set; } public class PascalCasePropertyNamesContractResolver : DefaultContractResolver { protected override string ResolvePropertyName(string propertyName) { return StringUtilities.PascalCaseToSnakeCase(propertyName); } } public string RuntimeVersion; private JsonSerializerSettings _jsonSettings; /// <summary> /// Initializes a new instance of the <see cref="T:plivo.Client.SystemHttpClient"/> class. /// </summary> /// <param name="basicAuth">Basic auth.</param> /// <param name="proxyServerSettings">Proxy settings.</param> /// <param name="baseUri">Switch between environments</param> public SystemHttpClient(BasicAuth basicAuth, Dictionary<string, string> proxyServerSettings, string baseUri = null) { #if NETSTANDARD2_0 IWebProxy proxy = null; var networkCreds = new NetworkCredential(); networkCreds.UserName = proxyServerSettings["Username"]; networkCreds.Password = proxyServerSettings["Password"]; var useDefaultCreds = string.IsNullOrEmpty(networkCreds.UserName) && string.IsNullOrEmpty(networkCreds.Password); try { if (!useDefaultCreds) { proxy = new WebProxy() { Address = new Uri($"{ proxyServerSettings["Address"] }:{ proxyServerSettings["Port"] }"), UseDefaultCredentials = useDefaultCreds, Credentials = networkCreds }; } else { proxy = new WebProxy() { Address = new Uri($"{ proxyServerSettings["Address"] }:{ proxyServerSettings["Port"] }"), UseDefaultCredentials = useDefaultCreds }; } } catch { proxy = null; } HttpClientHandler httpClientHandler = new HttpClientHandler() { PreAuthenticate = true, UseDefaultCredentials = false, UseProxy = proxy != null, Proxy = proxy }; #else HttpClientHandler httpClientHandler = new HttpClientHandler() { PreAuthenticate = true, UseDefaultCredentials = false }; #endif _client = new System.Net.Http.HttpClient(httpClientHandler); var authHeader = new AuthenticationHeaderValue("Basic", Convert.ToBase64String(Encoding.UTF8.GetBytes( $"{basicAuth.AuthId}:{basicAuth.AuthToken}" ) ) ); _client.DefaultRequestHeaders.Authorization = authHeader; _client.DefaultRequestHeaders.Add("User-Agent", "plivo-dotnet/" + ThisAssembly.AssemblyFileVersion); var baseServerUri = string.IsNullOrEmpty(baseUri) ? "https://api.plivo.com/" + Version.ApiVersion : baseUri; _client.BaseAddress = new Uri(baseServerUri + "/"); _voiceBaseUriClient = new System.Net.Http.HttpClient(httpClientHandler); _voiceBaseUriClient.Timeout = TimeSpan.FromSeconds(5); _voiceBaseUriClient.DefaultRequestHeaders.Authorization = authHeader; _voiceBaseUriClient.DefaultRequestHeaders.Add("User-Agent", "plivo-dotnet/" + ThisAssembly.AssemblyFileVersion); var voiceBaseServerUri = string.IsNullOrEmpty(baseUri) ? "https://api.plivo.com/" + Version.ApiVersion : baseUri; _voiceBaseUriClient.BaseAddress = new Uri(voiceBaseServerUri + "/"); _voiceFallback1Client = new System.Net.Http.HttpClient(httpClientHandler); _voiceFallback1Client.Timeout = TimeSpan.FromSeconds(5); _voiceFallback1Client.DefaultRequestHeaders.Authorization = authHeader; _voiceFallback1Client.DefaultRequestHeaders.Add("User-Agent", "plivo-dotnet/" + ThisAssembly.AssemblyFileVersion); var voiceFallback1Uri = string.IsNullOrEmpty(baseUri) ? "https://api.plivo.com/" + Version.ApiVersion : baseUri; _voiceFallback1Client.BaseAddress = new Uri(voiceFallback1Uri + "/"); _voiceFallback2Client = new System.Net.Http.HttpClient(httpClientHandler); _voiceFallback2Client.Timeout = TimeSpan.FromSeconds(5); _voiceFallback2Client.DefaultRequestHeaders.Authorization = authHeader; _voiceFallback2Client.DefaultRequestHeaders.Add("User-Agent", "plivo-dotnet/" + ThisAssembly.AssemblyFileVersion); var voiceFallback2Uri = string.IsNullOrEmpty(baseUri) ? "https://api.plivo.com/" + Version.ApiVersion : baseUri; _voiceFallback2Client.BaseAddress = new Uri(voiceFallback2Uri + "/"); _callInsightsclient = new System.Net.Http.HttpClient(httpClientHandler); _callInsightsclient.DefaultRequestHeaders.Authorization = authHeader; _callInsightsclient.DefaultRequestHeaders.Add("User-Agent", "plivo-dotnet/" + ThisAssembly.AssemblyFileVersion); var callInsightsBaseServerUri = "https://stats.plivo.com/" + "v1"; _callInsightsclient.BaseAddress = new Uri(callInsightsBaseServerUri + "/"); _lookupClient = new System.Net.Http.HttpClient(httpClientHandler); _lookupClient.DefaultRequestHeaders.Authorization = authHeader; _lookupClient.DefaultRequestHeaders.Add("User-Agent", "plivo-dotnet/" + ThisAssembly.AssemblyFileVersion); _lookupClient.BaseAddress = new Uri("https://lookup.plivo.com/v1/"); _jsonSettings = new JsonSerializerSettings { ContractResolver = new PascalCasePropertyNamesContractResolver(), NullValueHandling = NullValueHandling.Ignore }; } public HttpRequestMessage NewRequestFunc(string method, string uri, Dictionary<string, object> data, Dictionary<string, string> filesToUpload = null) { HttpRequestMessage request = null; switch (method) { case "GET": request = new HttpRequestMessage(HttpMethod.Get, uri + AsQueryString(data)); request.Headers.Add("Accept", "application/json"); break; case "POST": request = new HttpRequestMessage(HttpMethod.Post, uri); if (filesToUpload == null) { request.Headers.Add("Accept", "application/json"); request.Content = new StringContent( JsonConvert.SerializeObject(data), Encoding.UTF8, "application/json" ); } else { MultipartFormDataContent multipartContent = new MultipartFormDataContent(); foreach (var key in filesToUpload.Keys) { FileInfo fileInfo = new FileInfo(filesToUpload[key]); string fileName = fileInfo.Name; HttpContent fileContents = new ByteArrayContent(File.ReadAllBytes(fileInfo.FullName)); string fileHeader = null; switch (fileName.Split('.')[1].ToLower()) { case "jpg": fileHeader = "image/jpeg"; break; case "png": fileHeader = "image/png"; break; case "jpeg": fileHeader = "image/jpeg"; break; case "pdf": fileHeader = "application/pdf"; break; } fileContents.Headers.Add("Content-Type", fileHeader); multipartContent.Add(fileContents, "file", fileName); } foreach (var key in data.Keys) { HttpContent stringContent = new StringContent((string) data[key].ToString()); multipartContent.Add(stringContent, key); } request.Content = multipartContent; } break; case "DELETE": request = new HttpRequestMessage(HttpMethod.Delete, uri); request.Headers.Add("Accept", "application/json"); request.Content = new StringContent( JsonConvert.SerializeObject(data), Encoding.UTF8, "application/json" ); break; default: throw new NotSupportedException( method + " not supported"); } return request; } /// <summary> /// Sends the request. /// </summary> /// <returns>The request.</returns> /// <param name="method">Method.</param> /// <param name="uri">URI.</param> /// <param name="data">Data.</param> /// <typeparam name="T">The 1st type parameter.</typeparam> public async Task<PlivoResponse<T>> SendRequest<T>(string method, string uri, Dictionary<string, object> data, Dictionary<string, string> filesToUpload = null) where T : new() { HttpResponseMessage response = null; HttpRequestMessage request = null; bool isCallInsightsRequest = false; bool isVoiceRequest = false; bool isLookupRequest = false; if (data.ContainsKey("is_call_insights_request")) { isCallInsightsRequest = true; data.Remove("is_call_insights_request"); } else if (data.ContainsKey("is_voice_request")) { isVoiceRequest = true; data.Remove("is_voice_request"); } else if (data.ContainsKey("is_lookup_request")) { isLookupRequest = true; data.Remove("is_lookup_request"); } request = NewRequestFunc(method, uri, data, filesToUpload); if (isCallInsightsRequest) { response = await _callInsightsclient.SendAsync(request).ConfigureAwait(false); } else if (isVoiceRequest) { response = await _voiceBaseUriClient.SendAsync(request).ConfigureAwait(false); if ((int)response.StatusCode >= 500) { request = NewRequestFunc(method, uri, data, filesToUpload); response = await _voiceFallback1Client.SendAsync(request).ConfigureAwait(false); if ((int)response.StatusCode >= 500) { request = NewRequestFunc(method, uri, data, filesToUpload); response = await _voiceFallback2Client.SendAsync(request).ConfigureAwait(false); } } } else if (isLookupRequest) { response = await _lookupClient.SendAsync(request).ConfigureAwait(false); } else { response = await _client.SendAsync(request).ConfigureAwait(false); } if (response.StatusCode.ToString() == "Unauthorized"){ throw new PlivoAuthenticationException ("Unauthorized Request. Please check credentials"); } var responseContent = await response.Content.ReadAsStringAsync().ConfigureAwait(false); // create Plivo response object along with the deserialized object PlivoResponse<T> plivoResponse; try { plivoResponse = new PlivoResponse<T>( (uint) response.StatusCode.GetHashCode(), response.Headers.Select(item => item.Key + "=" + item.Value).ToList(), responseContent, responseContent != string.Empty ? JsonConvert.DeserializeObject<T>(responseContent, _jsonSettings) : new T(), new PlivoRequest(method, uri, request.Headers.ToString(), data, filesToUpload)); } catch(Newtonsoft.Json.JsonReaderException){ plivoResponse = new PlivoResponse<T>( (uint) response.StatusCode.GetHashCode(), response.Headers.Select(item => item.Key + "=" + item.Value).ToList(), responseContent, responseContent != string.Empty ? JsonConvert.DeserializeObject<T>(responseContent) : new T(), new PlivoRequest(method, uri, request.Headers.ToString(), data, filesToUpload)); } return plivoResponse; } public string AsQueryString(IEnumerable<KeyValuePair<string, object>> parameters) { if (!parameters.Any()) return ""; var builder = new StringBuilder("?"); var separator = ""; foreach (var kvp in parameters.Where(kvp => kvp.Value != null)) { builder.AppendFormat("{0}{1}={2}", separator, WebUtility.UrlEncode(kvp.Key), WebUtility.UrlEncode(kvp.Value.ToString())); separator = "&"; } return builder.ToString(); } } }
/* PreviewLabs.PlayerPrefs Public Domain To the extent possible under law, PreviewLabs has waived all copyright and related or neighboring rights to this document. This work is published from: Belgium. http://www.previewlabs.com/writing-playerprefs-fast/ */ using UnityEngine; using System; using System.Collections.Generic; using System.IO; using System.Text.RegularExpressions; namespace PreviewLabs.AnimatorAccessGenerator { /// <summary> /// Derived from PreviewLabs PlayerPrefs (s. note at the beginning of the file). Writes key value pairs in a text /// file in <Application.persistentDataPath>/AnimatorAccessConfig.txt. Keys are sorted in alpha numeric order. /// </summary> public static class PlayerPrefs { public static bool fileExists = false; private static SortedDictionary<string, object> playerPrefsDict = new SortedDictionary<string, object> (); private static bool hashTableChanged = false; private static string serializedOutput = ""; private static string serializedInput = ""; private const string PARAMETERS_SEPERATOR = "\n"; private const string KEY_VALUE_SEPERATOR = ":"; public static readonly string fileName = Application.persistentDataPath + "/AnimatorAccessConfig.txt"; static bool ReadFile () { StreamReader fileReader = null; if (File.Exists (fileName)) { fileExists = true; fileReader = new StreamReader (fileName); serializedInput = fileReader.ReadToEnd (); fileReader.Close (); return true; } else { return false; } } static PlayerPrefs () { if (ReadFile ()) { Deserialize (); } } public static bool HasKey(string key) { return playerPrefsDict.ContainsKey(key); } public static void SetString(string key, string value) { if(!playerPrefsDict.ContainsKey(key)) { playerPrefsDict.Add(key, value); } else { playerPrefsDict[key] = value; } hashTableChanged = true; } public static void SetInt(string key, int value) { if(!playerPrefsDict.ContainsKey(key)) { playerPrefsDict.Add(key, value); } else { playerPrefsDict[key] = value; } hashTableChanged = true; } public static void SetFloat(string key, float value) { if(!playerPrefsDict.ContainsKey(key)) { playerPrefsDict.Add(key, value); } else { playerPrefsDict[key] = value; } hashTableChanged = true; } public static void SetBool(string key, bool value) { if(!playerPrefsDict.ContainsKey(key)) { playerPrefsDict.Add(key, value); } else { playerPrefsDict[key] = value; } hashTableChanged = true; } public static string GetString(string key) { if(playerPrefsDict.ContainsKey(key)) { return playerPrefsDict[key].ToString(); } return null; } public static string GetString(string key, string defaultValue) { if(playerPrefsDict.ContainsKey(key)) { return playerPrefsDict[key].ToString(); } else { playerPrefsDict.Add(key, defaultValue); hashTableChanged = true; return defaultValue; } } public static int GetInt(string key) { if(playerPrefsDict.ContainsKey(key)) { return (int) playerPrefsDict[key]; } return 0; } public static int GetInt(string key, int defaultValue) { if(playerPrefsDict.ContainsKey(key)) { return (int) playerPrefsDict[key]; } else { playerPrefsDict.Add(key, defaultValue); hashTableChanged = true; return defaultValue; } } public static float GetFloat(string key) { if(playerPrefsDict.ContainsKey(key)) { return (float) playerPrefsDict[key]; } return 0.0f; } public static float GetFloat(string key, float defaultValue) { if(playerPrefsDict.ContainsKey(key)) { return (float) playerPrefsDict[key]; } else { playerPrefsDict.Add(key, defaultValue); hashTableChanged = true; return defaultValue; } } public static bool GetBool(string key) { if(playerPrefsDict.ContainsKey(key)) { return (bool) playerPrefsDict[key]; } return false; } public static bool GetBool(string key, bool defaultValue) { if(playerPrefsDict.ContainsKey(key)) { return (bool) playerPrefsDict[key]; } else { playerPrefsDict.Add(key, defaultValue); hashTableChanged = true; return defaultValue; } } public static void DeleteKey(string key) { playerPrefsDict.Remove(key); hashTableChanged = true; } public static void DeleteAll() { playerPrefsDict.Clear(); hashTableChanged = true; } public static void Flush() { if(hashTableChanged) { Serialize(); StreamWriter fileWriter = null; fileWriter = File.CreateText(fileName); if (fileWriter == null) { Debug.LogWarning("PlayerPrefs::Flush() opening file for writing failed: " + fileName); } fileWriter.Write(serializedOutput); fileWriter.Close(); serializedOutput = ""; } } private static void Serialize () { int i = 0; foreach (KeyValuePair<string, object> myEnumerator in playerPrefsDict) { i++; if (serializedOutput != "") { serializedOutput += PARAMETERS_SEPERATOR; } if (myEnumerator.Key == null || myEnumerator.Value == null) { Debug.LogWarning ("Skipping line " + i + " with undefined value for [" + myEnumerator.Key + "=" + myEnumerator.Value + "]"); continue; } serializedOutput += EscapeNonSeperators (myEnumerator.Key.ToString ()) + " " + KEY_VALUE_SEPERATOR + " " + EscapeNonSeperators (myEnumerator.Value.ToString ()) + " " + KEY_VALUE_SEPERATOR + " " + myEnumerator.Value.GetType (); } } private static void Deserialize () { string parametersSeparator = PARAMETERS_SEPERATOR; string[] parameters = serializedInput.Split (new string[] {parametersSeparator}, StringSplitOptions.None); int i = 0; string previous = ""; foreach (string parameter in parameters) { i++; if (parameter.Length <= 3) { // ignore continue; } string[] parameterContent = parameter.Split (new string[]{" " + KEY_VALUE_SEPERATOR + " "}, StringSplitOptions.None); if (parameterContent.Length != 3) { Debug.LogWarning (i + " (after " + previous + "): PlayerPrefs.Deserialize() parameterContent has " + parameterContent.Length + " elements ! " + (parameterContent.Length >= 1 ? parameterContent [0] : "(no name read)")); } else { playerPrefsDict.Add (DeEscapeNonSeperators (parameterContent [0], parametersSeparator), GetTypeValue (parameterContent [2], DeEscapeNonSeperators (parameterContent [1], parametersSeparator))); previous = parameterContent [0]; } } } private static string EscapeNonSeperators(string inputToEscape) { inputToEscape = inputToEscape.Replace(KEY_VALUE_SEPERATOR,"\\" + KEY_VALUE_SEPERATOR); inputToEscape = inputToEscape.Replace(PARAMETERS_SEPERATOR,"\\" + PARAMETERS_SEPERATOR); return inputToEscape; } private static string DeEscapeNonSeperators(string inputToDeEscape, string parametersSeparator) { inputToDeEscape = inputToDeEscape.Replace("\\" + KEY_VALUE_SEPERATOR, KEY_VALUE_SEPERATOR); inputToDeEscape = inputToDeEscape.Replace("\\" + parametersSeparator, parametersSeparator); return inputToDeEscape; } public static object GetTypeValue(string typeName, string value) { if (typeName == "System.String") { return (object)value.ToString(); } if (typeName == "System.Int32") { return (object)System.Convert.ToInt32(value); } if (typeName == "System.Boolean") { return (object)System.Convert.ToBoolean(value); } if (typeName == "System.Single")// -> single = float { return (object)System.Convert.ToSingle(value); } else { Debug.LogError("Unsupported type: " + typeName + " value: " + value); } return null; } } }
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.html file at the root of this distribution. If * you cannot locate the Apache License, Version 2.0, please send an email to * vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound * by the terms of the Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. * * ***************************************************************************/ using System; using System.Diagnostics.CodeAnalysis; using System.Runtime.InteropServices; using EnvDTE; using VSLangProj; namespace Microsoft.VisualStudioTools.Project.Automation { /// <summary> /// Represents an automation friendly version of a language-specific project. /// </summary> [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "OAVS")] [ComVisible(true), CLSCompliant(false)] public class OAVSProject : VSProject { #region fields private ProjectNode project; private OAVSProjectEvents events; #endregion #region ctors internal OAVSProject(ProjectNode project) { this.project = project; } #endregion #region VSProject Members public virtual ProjectItem AddWebReference(string bstrUrl) { throw new NotImplementedException(); } public virtual BuildManager BuildManager { get { throw new NotImplementedException(); //return new OABuildManager(this.project); } } public virtual void CopyProject(string bstrDestFolder, string bstrDestUNCPath, prjCopyProjectOption copyProjectOption, string bstrUsername, string bstrPassword) { throw new NotImplementedException(); } public virtual ProjectItem CreateWebReferencesFolder() { throw new NotImplementedException(); } public virtual DTE DTE { get { return (EnvDTE.DTE)this.project.Site.GetService(typeof(EnvDTE.DTE)); } } public virtual VSProjectEvents Events { get { if (events == null) events = new OAVSProjectEvents(this); return events; } } public virtual void Exec(prjExecCommand command, int bSuppressUI, object varIn, out object pVarOut) { throw new NotImplementedException(); ; } public virtual void GenerateKeyPairFiles(string strPublicPrivateFile, string strPublicOnlyFile) { throw new NotImplementedException(); ; } public virtual string GetUniqueFilename(object pDispatch, string bstrRoot, string bstrDesiredExt) { throw new NotImplementedException(); ; } public virtual Imports Imports { get { throw new NotImplementedException(); } } public virtual EnvDTE.Project Project { get { return this.project.GetAutomationObject() as EnvDTE.Project; } } public virtual References References { get { ReferenceContainerNode references = project.GetReferenceContainer() as ReferenceContainerNode; if (null == references) { return null; } return references.Object as References; } } public virtual void Refresh() { throw new NotImplementedException(); } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1065:DoNotRaiseExceptionsInUnexpectedLocations")] public virtual string TemplatePath { get { throw new NotImplementedException(); } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1065:DoNotRaiseExceptionsInUnexpectedLocations")] public virtual ProjectItem WebReferencesFolder { get { throw new NotImplementedException(); } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1065:DoNotRaiseExceptionsInUnexpectedLocations")] public virtual bool WorkOffline { get { throw new NotImplementedException(); } set { throw new NotImplementedException(); } } #endregion } /// <summary> /// Provides access to language-specific project events /// </summary> [SuppressMessage("Microsoft.Naming", "CA1709:IdentifiersShouldBeCasedCorrectly", MessageId = "OAVS")] [ComVisible(true), CLSCompliant(false)] public class OAVSProjectEvents : VSProjectEvents { #region fields private OAVSProject vsProject; #endregion #region ctors public OAVSProjectEvents(OAVSProject vsProject) { this.vsProject = vsProject; } #endregion #region VSProjectEvents Members public virtual BuildManagerEvents BuildManagerEvents { get { return vsProject.BuildManager as BuildManagerEvents; } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1065:DoNotRaiseExceptionsInUnexpectedLocations")] public virtual ImportsEvents ImportsEvents { get { throw new NotImplementedException(); } } public virtual ReferencesEvents ReferencesEvents { get { return vsProject.References as ReferencesEvents; } } #endregion } }
using Orleans.Serialization.Buffers; using Orleans.Serialization.Codecs; using Orleans.Serialization.Session; using Orleans.Serialization.WireProtocol; using System; using System.Buffers; using System.IO; using System.Text; namespace Orleans.Serialization.Utilities { /// <summary> /// Utilities for formatting an encoded bitstream in a textual manner. /// </summary> public static class BitStreamFormatter { /// <summary> /// Formats the provided buffer. /// </summary> /// <typeparam name="TInput">The reader input type.</typeparam> /// <param name="reader">The reader.</param> /// <returns>The formatted input.</returns> public static string Format<TInput>(ref Reader<TInput> reader) { var res = new StringBuilder(); Format(ref reader, res); return res.ToString(); } /// <summary> /// Formats the specified array. /// </summary> /// <param name="array">The array.</param> /// <param name="session">The session.</param> /// <returns>The formatted input.</returns> public static string Format(byte[] array, SerializerSession session) { var reader = Reader.Create(array, session); return Format(ref reader); } /// <summary> /// Formats the specified buffer. /// </summary> /// <param name="input">The input buffer.</param> /// <param name="session">The session.</param> /// <returns>The formatted input.</returns> public static string Format(ReadOnlySpan<byte> input, SerializerSession session) { var reader = Reader.Create(input, session); return Format(ref reader); } /// <summary> /// Formats the specified buffer. /// </summary> /// <param name="input">The input buffer.</param> /// <param name="session">The session.</param> /// <returns>The formatted input.</returns> public static string Format(ReadOnlyMemory<byte> input, SerializerSession session) { var reader = Reader.Create(input, session); return Format(ref reader); } /// <summary> /// Formats the specified buffer. /// </summary> /// <param name="input">The input buffer.</param> /// <param name="session">The session.</param> /// <returns>The formatted input.</returns> public static string Format(ReadOnlySequence<byte> input, SerializerSession session) { var reader = Reader.Create(input, session); return Format(ref reader); } /// <summary> /// Formats the specified buffer. /// </summary> /// <param name="input">The input buffer.</param> /// <param name="session">The session.</param> /// <returns>The formatted input.</returns> public static string Format(Stream input, SerializerSession session) { var reader = Reader.Create(input, session); return Format(ref reader); } /// <summary> /// Formats the specified buffer. /// </summary> /// <typeparam name="TInput">The reader input type.</typeparam> /// <param name="reader">The reader.</param> /// <param name="result">The destination string builder.</param> public static void Format<TInput>(ref Reader<TInput> reader, StringBuilder result) { var (field, type) = reader.ReadFieldHeaderForAnalysis(); FormatField(ref reader, field, type, field.FieldIdDelta, result, indentation: 0); } private static void FormatField<TInput>(ref Reader<TInput> reader, Field field, string typeName, uint id, StringBuilder res, int indentation) { var indentString = new string(' ', indentation); AppendAddress(ref reader, res); res.Append(indentString); // References cannot themselves be referenced. if (field.WireType == WireType.Reference) { ReferenceCodec.MarkValueField(reader.Session); var refId = reader.ReadVarUInt32(); var exists = reader.Session.ReferencedObjects.TryGetReferencedObject(refId, out var refd); res.Append('['); FormatFieldHeader(res, reader.Session, field, id, typeName); res.Append($" Reference: {refId} ({(exists ? $"{refd}" : "unknown")})"); res.Append(']'); return; } // Record a placeholder so that this field can later be correctly deserialized if it is referenced. ReferenceCodec.RecordObject(reader.Session, new UnknownFieldMarker(field, reader.Position)); res.Append('['); FormatFieldHeader(res, reader.Session, field, id, typeName); res.Append(']'); res.Append(" Value: "); switch (field.WireType) { case WireType.VarInt: { var a = reader.ReadVarUInt64(); if (a < 10240) { res.Append($"{a} (0x{a:X})"); } else { res.Append($"0x{a:X}"); } } break; case WireType.TagDelimited: // Since tag delimited fields can be comprised of other fields, recursively consume those, too. res.Append($"{{\n"); reader.FormatTagDelimitedField(res, indentation + 1); res.AppendLine(); AppendAddress(ref reader, res); res.Append($"{indentString}}}"); break; case WireType.LengthPrefixed: { var length = reader.ReadVarUInt32(); res.Append($"(length: {length}b) ["); var a = reader.ReadBytes(length); FormatByteArray(res, a); res.Append(']'); } break; case WireType.Fixed32: { var a = reader.ReadUInt32(); if (a < 10240) { res.Append($"{a} (0x{a:X})"); } else { res.Append($"0x{a:X}"); } } break; case WireType.Fixed64: { var a = reader.ReadUInt64(); if (a < 10240) { res.Append($"{a} (0x{a:X})"); } else { res.Append($"0x{a:X}"); } } break; case WireType.Extended: SkipFieldExtension.ThrowUnexpectedExtendedWireType(field); break; default: SkipFieldExtension.ThrowUnexpectedWireType(field); break; } } private static void FormatByteArray(StringBuilder res, byte[] a) { var isAscii = true; foreach (var b in a) { if (b >= 0x7F) { isAscii = false; } } if (isAscii) { res.Append('"'); res.Append(Encoding.ASCII.GetString(a)); res.Append('"'); } else { bool first = true; foreach (var b in a) { if (!first) { res.Append(' '); } else { first = false; } res.Append($"{b:X2}"); } } } private static void FormatFieldHeader(StringBuilder res, SerializerSession session, Field field, uint id, string typeName) { _ = res .Append($"#{session.ReferencedObjects.CurrentReferenceId} ") .Append((string)field.WireType.ToString()); if (field.HasFieldId) { _ = res.Append($" Id: {id}"); } if (field.IsSchemaTypeValid) { _ = res.Append($" SchemaType: {field.SchemaType}"); } if (field.HasExtendedSchemaType) { _ = res.Append($" RuntimeType: {field.FieldType} (name: {typeName})"); } if (field.WireType == WireType.Extended) { _ = res.Append($": {field.ExtendedWireType}"); } } /// <summary> /// Consumes a tag-delimited field. /// </summary> private static void FormatTagDelimitedField<TInput>(this ref Reader<TInput> reader, StringBuilder res, int indentation) { var id = 0U; var first = true; while (true) { var (field, type) = reader.ReadFieldHeaderForAnalysis(); if (field.IsEndObject) { break; } if (field.IsEndBaseFields) { res.AppendLine(); AppendAddress(ref reader, res); res.Append($"{new string(' ', indentation)}- End of base type fields -"); if (first) { first = false; } id = 0U; continue; } id += field.FieldIdDelta; if (!first) { res.AppendLine(); } else { first = false; } FormatField(ref reader, field, type, id, res, indentation); } } private static void AppendAddress<TInput>(ref Reader<TInput> reader, StringBuilder res) { res.Append($"0x{reader.Position:X4} "); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Globalization; using System.Threading; namespace System.ComponentModel { /// <devdoc> /// <para>Specifies the default value for a property.</para> /// </devdoc> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1019:DefineAccessorsForAttributeArguments")] [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1813:AvoidUnsealedAttributes")] [AttributeUsage(AttributeTargets.All)] public class DefaultValueAttribute : Attribute { /// <devdoc> /// This is the default value. /// </devdoc> private object _value; // Delegate ad hoc created 'TypeDescriptor.ConvertFromInvariantString' reflection object cache static object s_convertFromInvariantString; /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class, converting the /// specified value to the /// specified type, and using the U.S. English culture as the /// translation /// context.</para> /// </devdoc> public DefaultValueAttribute(Type type, string value) { // The try/catch here is because attributes should never throw exceptions. We would fail to // load an otherwise normal class. try { if (TryConvertFromInvariantString(type, value, out object convertedValue)) { _value = convertedValue; } else if (type.IsSubclassOf(typeof(Enum))) { _value = Enum.Parse(type, value, true); } else if (type == typeof(TimeSpan)) { _value = TimeSpan.Parse(value); } else { _value = Convert.ChangeType(value, type, CultureInfo.InvariantCulture); } return; // Looking for ad hoc created TypeDescriptor.ConvertFromInvariantString(Type, string) bool TryConvertFromInvariantString(Type typeToConvert, string stringValue, out object conversionResult) { conversionResult = null; // lazy init reflection objects if (s_convertFromInvariantString == null) { Type typeDescriptorType = Type.GetType("System.ComponentModel.TypeDescriptor, System.ComponentModel.TypeConverter", throwOnError: false); Volatile.Write(ref s_convertFromInvariantString, typeDescriptorType == null ? new object() : Delegate.CreateDelegate(typeof(Func<Type, string, object>), typeDescriptorType, "ConvertFromInvariantString", ignoreCase: false)); } if (!(s_convertFromInvariantString is Func<Type, string, object> convertFromInvariantString)) return false; conversionResult = convertFromInvariantString(typeToConvert, stringValue); return true; } } catch { } } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a Unicode /// character.</para> /// </devdoc> public DefaultValueAttribute(char value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using an 8-bit unsigned /// integer.</para> /// </devdoc> public DefaultValueAttribute(byte value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a 16-bit signed /// integer.</para> /// </devdoc> public DefaultValueAttribute(short value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a 32-bit signed /// integer.</para> /// </devdoc> public DefaultValueAttribute(int value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a 64-bit signed /// integer.</para> /// </devdoc> public DefaultValueAttribute(long value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a /// single-precision floating point /// number.</para> /// </devdoc> public DefaultValueAttribute(float value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a /// double-precision floating point /// number.</para> /// </devdoc> public DefaultValueAttribute(double value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a <see cref='System.Boolean'/> /// value.</para> /// </devdoc> public DefaultValueAttribute(bool value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a <see cref='System.String'/>.</para> /// </devdoc> public DefaultValueAttribute(string value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> /// class.</para> /// </devdoc> public DefaultValueAttribute(object value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a <see cref='System.SByte'/> /// value.</para> /// </devdoc> [CLSCompliant(false)] public DefaultValueAttribute(sbyte value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a <see cref='System.UInt16'/> /// value.</para> /// </devdoc> [CLSCompliant(false)] public DefaultValueAttribute(ushort value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a <see cref='System.UInt32'/> /// value.</para> /// </devdoc> [CLSCompliant(false)] public DefaultValueAttribute(uint value) { _value = value; } /// <devdoc> /// <para>Initializes a new instance of the <see cref='System.ComponentModel.DefaultValueAttribute'/> class using a <see cref='System.UInt64'/> /// value.</para> /// </devdoc> [CLSCompliant(false)] public DefaultValueAttribute(ulong value) { _value = value; } /// <devdoc> /// <para> /// Gets the default value of the property this /// attribute is /// bound to. /// </para> /// </devdoc> public virtual object Value { get { return _value; } } public override bool Equals(object obj) { if (obj == this) { return true; } if (obj is DefaultValueAttribute other) { if (Value != null) { return Value.Equals(other.Value); } else { return (other.Value == null); } } return false; } public override int GetHashCode() { return base.GetHashCode(); } protected void SetValue(object value) { _value = value; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Impl.Portable { using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.IO; using Apache.Ignite.Core.Impl.Portable.IO; using Apache.Ignite.Core.Impl.Portable.Metadata; using Apache.Ignite.Core.Portable; /// <summary> /// Portable writer implementation. /// </summary> internal class PortableWriterImpl : IPortableWriter, IPortableRawWriter { /** Marshaller. */ private readonly PortableMarshaller _marsh; /** Stream. */ private readonly IPortableStream _stream; /** Builder (used only during build). */ private PortableBuilderImpl _builder; /** Handles. */ private PortableHandleDictionary<object, long> _hnds; /** Metadatas collected during this write session. */ private IDictionary<int, IPortableMetadata> _metas; /** Current type ID. */ private int _curTypeId; /** Current name converter */ private IPortableNameMapper _curConverter; /** Current mapper. */ private IPortableIdMapper _curMapper; /** Current metadata handler. */ private IPortableMetadataHandler _curMetaHnd; /** Current raw flag. */ private bool _curRaw; /** Current raw position. */ private long _curRawPos; /** Ignore handles flag. */ private bool _detach; /** Object started ignore mode. */ private bool _detachMode; /// <summary> /// Gets the marshaller. /// </summary> internal PortableMarshaller Marshaller { get { return _marsh; } } /// <summary> /// Write named boolean value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Boolean value.</param> public void WriteBoolean(string fieldName, bool val) { WriteSimpleField(fieldName, PortableUtils.TypeBool, val, PortableSystemHandlers.WriteHndBoolTyped, 1); } /// <summary> /// Write boolean value. /// </summary> /// <param name="val">Boolean value.</param> public void WriteBoolean(bool val) { _stream.WriteBool(val); } /// <summary> /// Write named boolean array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Boolean array.</param> public void WriteBooleanArray(string fieldName, bool[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayBool, val, PortableSystemHandlers.WriteHndBoolArrayTyped, val != null ? val.Length + 4 : 0); } /// <summary> /// Write boolean array. /// </summary> /// <param name="val">Boolean array.</param> public void WriteBooleanArray(bool[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndBoolArrayTyped); } /// <summary> /// Write named byte value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Byte value.</param> public void WriteByte(string fieldName, byte val) { WriteSimpleField(fieldName, PortableUtils.TypeByte, val, PortableSystemHandlers.WriteHndByteTyped, 1); } /// <summary> /// Write byte value. /// </summary> /// <param name="val">Byte value.</param> public void WriteByte(byte val) { _stream.WriteByte(val); } /// <summary> /// Write named byte array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Byte array.</param> public void WriteByteArray(string fieldName, byte[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayByte, val, PortableSystemHandlers.WriteHndByteArrayTyped, val != null ? val.Length + 4 : 0); } /// <summary> /// Write byte array. /// </summary> /// <param name="val">Byte array.</param> public void WriteByteArray(byte[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndByteArrayTyped); } /// <summary> /// Write named short value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Short value.</param> public void WriteShort(string fieldName, short val) { WriteSimpleField(fieldName, PortableUtils.TypeShort, val, PortableSystemHandlers.WriteHndShortTyped, 2); } /// <summary> /// Write short value. /// </summary> /// <param name="val">Short value.</param> public void WriteShort(short val) { _stream.WriteShort(val); } /// <summary> /// Write named short array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Short array.</param> public void WriteShortArray(string fieldName, short[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayShort, val, PortableSystemHandlers.WriteHndShortArrayTyped, val != null ? 2 * val.Length + 4 : 0); } /// <summary> /// Write short array. /// </summary> /// <param name="val">Short array.</param> public void WriteShortArray(short[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndShortArrayTyped); } /// <summary> /// Write named char value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Char value.</param> public void WriteChar(string fieldName, char val) { WriteSimpleField(fieldName, PortableUtils.TypeChar, val, PortableSystemHandlers.WriteHndCharTyped, 2); } /// <summary> /// Write char value. /// </summary> /// <param name="val">Char value.</param> public void WriteChar(char val) { _stream.WriteChar(val); } /// <summary> /// Write named char array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Char array.</param> public void WriteCharArray(string fieldName, char[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayChar, val, PortableSystemHandlers.WriteHndCharArrayTyped, val != null ? 2 * val.Length + 4 : 0); } /// <summary> /// Write char array. /// </summary> /// <param name="val">Char array.</param> public void WriteCharArray(char[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndCharArrayTyped); } /// <summary> /// Write named int value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Int value.</param> public void WriteInt(string fieldName, int val) { WriteSimpleField(fieldName, PortableUtils.TypeInt, val, PortableSystemHandlers.WriteHndIntTyped, 4); } /// <summary> /// Write int value. /// </summary> /// <param name="val">Int value.</param> public void WriteInt(int val) { _stream.WriteInt(val); } /// <summary> /// Write named int array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Int array.</param> public void WriteIntArray(string fieldName, int[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayInt, val, PortableSystemHandlers.WriteHndIntArrayTyped, val != null ? 4 * val.Length + 4 : 0); } /// <summary> /// Write int array. /// </summary> /// <param name="val">Int array.</param> public void WriteIntArray(int[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndIntArrayTyped); } /// <summary> /// Write named long value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Long value.</param> public void WriteLong(string fieldName, long val) { WriteSimpleField(fieldName, PortableUtils.TypeLong, val, PortableSystemHandlers.WriteHndLongTyped, 8); } /// <summary> /// Write long value. /// </summary> /// <param name="val">Long value.</param> public void WriteLong(long val) { _stream.WriteLong(val); } /// <summary> /// Write named long array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Long array.</param> public void WriteLongArray(string fieldName, long[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayLong, val, PortableSystemHandlers.WriteHndLongArrayTyped, val != null ? 8 * val.Length + 4 : 0); } /// <summary> /// Write long array. /// </summary> /// <param name="val">Long array.</param> public void WriteLongArray(long[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndLongArrayTyped); } /// <summary> /// Write named float value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Float value.</param> public void WriteFloat(string fieldName, float val) { WriteSimpleField(fieldName, PortableUtils.TypeFloat, val, PortableSystemHandlers.WriteHndFloatTyped, 4); } /// <summary> /// Write float value. /// </summary> /// <param name="val">Float value.</param> public void WriteFloat(float val) { _stream.WriteFloat(val); } /// <summary> /// Write named float array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Float array.</param> public void WriteFloatArray(string fieldName, float[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayFloat, val, PortableSystemHandlers.WriteHndFloatArrayTyped, val != null ? 4 * val.Length + 4 : 0); } /// <summary> /// Write float array. /// </summary> /// <param name="val">Float array.</param> public void WriteFloatArray(float[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndFloatArrayTyped); } /// <summary> /// Write named double value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Double value.</param> public void WriteDouble(string fieldName, double val) { WriteSimpleField(fieldName, PortableUtils.TypeDouble, val, PortableSystemHandlers.WriteHndDoubleTyped, 8); } /// <summary> /// Write double value. /// </summary> /// <param name="val">Double value.</param> public void WriteDouble(double val) { _stream.WriteDouble(val); } /// <summary> /// Write named double array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Double array.</param> public void WriteDoubleArray(string fieldName, double[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayDouble, val, PortableSystemHandlers.WriteHndDoubleArrayTyped, val != null ? 8 * val.Length + 4 : 0); } /// <summary> /// Write double array. /// </summary> /// <param name="val">Double array.</param> public void WriteDoubleArray(double[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndDoubleArrayTyped); } /// <summary> /// Write named decimal value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Decimal value.</param> public void WriteDecimal(string fieldName, decimal val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeDecimal, val, PortableSystemHandlers.WriteHndDecimalTyped); } /// <summary> /// Write decimal value. /// </summary> /// <param name="val">Decimal value.</param> public void WriteDecimal(decimal val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndDecimalTyped); } /// <summary> /// Write named decimal array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Decimal array.</param> public void WriteDecimalArray(string fieldName, decimal[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayDecimal, val, PortableSystemHandlers.WriteHndDecimalArrayTyped); } /// <summary> /// Write decimal array. /// </summary> /// <param name="val">Decimal array.</param> public void WriteDecimalArray(decimal[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndDecimalArrayTyped); } /// <summary> /// Write named date value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Date value.</param> public void WriteDate(string fieldName, DateTime? val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeDate, val, PortableSystemHandlers.WriteHndDateTyped, val.HasValue ? 12 : 0); } /// <summary> /// Write date value. /// </summary> /// <param name="val">Date value.</param> public void WriteDate(DateTime? val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndDateTyped); } /// <summary> /// Write named date array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Date array.</param> public void WriteDateArray(string fieldName, DateTime?[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayDate, val, PortableSystemHandlers.WriteHndDateArrayTyped); } /// <summary> /// Write date array. /// </summary> /// <param name="val">Date array.</param> public void WriteDateArray(DateTime?[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndDateArrayTyped); } /// <summary> /// Write named string value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">String value.</param> public void WriteString(string fieldName, string val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeString, val, PortableSystemHandlers.WriteHndStringTyped); } /// <summary> /// Write string value. /// </summary> /// <param name="val">String value.</param> public void WriteString(string val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndStringTyped); } /// <summary> /// Write named string array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">String array.</param> public void WriteStringArray(string fieldName, string[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayString, val, PortableSystemHandlers.WriteHndStringArrayTyped); } /// <summary> /// Write string array. /// </summary> /// <param name="val">String array.</param> public void WriteStringArray(string[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndStringArrayTyped); } /// <summary> /// Write named GUID value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">GUID value.</param> public void WriteGuid(string fieldName, Guid? val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeGuid, val, PortableSystemHandlers.WriteHndGuidTyped, val.HasValue ? 16 : 0); } /// <summary> /// Write GUID value. /// </summary> /// <param name="val">GUID value.</param> public void WriteGuid(Guid? val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndGuidTyped); } /// <summary> /// Write named GUID array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">GUID array.</param> public void WriteGuidArray(string fieldName, Guid?[] val) { WriteSimpleNullableField(fieldName, PortableUtils.TypeArrayGuid, val, PortableSystemHandlers.WriteHndGuidArrayTyped); } /// <summary> /// Write GUID array. /// </summary> /// <param name="val">GUID array.</param> public void WriteGuidArray(Guid?[] val) { WriteSimpleNullableRawField(val, PortableSystemHandlers.WriteHndGuidArrayTyped); } /// <summary> /// Write named enum value. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fieldName">Field name.</param> /// <param name="val">Enum value.</param> public void WriteEnum<T>(string fieldName, T val) { WriteField(fieldName, PortableUtils.TypeEnum, val, PortableSystemHandlers.WriteHndEnum); } /// <summary> /// Write enum value. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="val">Enum value.</param> public void WriteEnum<T>(T val) { Write(val, PortableSystemHandlers.WriteHndEnum); } /// <summary> /// Write named enum array. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fieldName">Field name.</param> /// <param name="val">Enum array.</param> public void WriteEnumArray<T>(string fieldName, T[] val) { WriteField(fieldName, PortableUtils.TypeArrayEnum, val, PortableSystemHandlers.WriteHndEnumArray); } /// <summary> /// Write enum array. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="val">Enum array.</param> public void WriteEnumArray<T>(T[] val) { Write(val, PortableSystemHandlers.WriteHndEnumArray); } /// <summary> /// Write named object value. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fieldName">Field name.</param> /// <param name="val">Object value.</param> public void WriteObject<T>(string fieldName, T val) { WriteField(fieldName, PortableUtils.TypeObject, val, null); } /// <summary> /// Write object value. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="val">Object value.</param> public void WriteObject<T>(T val) { Write(val); } /// <summary> /// Write named object array. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fieldName">Field name.</param> /// <param name="val">Object array.</param> public void WriteObjectArray<T>(string fieldName, T[] val) { WriteField(fieldName, PortableUtils.TypeArray, val, PortableSystemHandlers.WriteHndArray); } /// <summary> /// Write object array. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="val">Object array.</param> public void WriteObjectArray<T>(T[] val) { Write(val, PortableSystemHandlers.WriteHndArray); } /// <summary> /// Write named collection. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Collection.</param> public void WriteCollection(string fieldName, ICollection val) { WriteField(fieldName, PortableUtils.TypeCollection, val, null); } /// <summary> /// Write collection. /// </summary> /// <param name="val">Collection.</param> public void WriteCollection(ICollection val) { Write(val); } /// <summary> /// Write named generic collection. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fieldName">Field name.</param> /// <param name="val">Collection.</param> public void WriteGenericCollection<T>(string fieldName, ICollection<T> val) { WriteField(fieldName, PortableUtils.TypeCollection, val, null); } /// <summary> /// Write generic collection. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="val">Collection.</param> public void WriteGenericCollection<T>(ICollection<T> val) { Write(val); } /// <summary> /// Write named dictionary. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Dictionary.</param> public void WriteDictionary(string fieldName, IDictionary val) { WriteField(fieldName, PortableUtils.TypeDictionary, val, null); } /// <summary> /// Write dictionary. /// </summary> /// <param name="val">Dictionary.</param> public void WriteDictionary(IDictionary val) { Write(val); } /// <summary> /// Write named generic dictionary. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Dictionary.</param> public void WriteGenericDictionary<TK, TV>(string fieldName, IDictionary<TK, TV> val) { WriteField(fieldName, PortableUtils.TypeDictionary, val, null); } /// <summary> /// Write generic dictionary. /// </summary> /// <param name="val">Dictionary.</param> public void WriteGenericDictionary<TK, TV>(IDictionary<TK, TV> val) { Write(val); } /// <summary> /// Get raw writer. /// </summary> /// <returns> /// Raw writer. /// </returns> public IPortableRawWriter RawWriter() { if (!_curRaw) { _curRaw = true; _curRawPos = _stream.Position; } return this; } /// <summary> /// Set new builder. /// </summary> /// <param name="builder">Builder.</param> /// <returns>Previous builder.</returns> internal PortableBuilderImpl Builder(PortableBuilderImpl builder) { PortableBuilderImpl ret = _builder; _builder = builder; return ret; } /// <summary> /// Constructor. /// </summary> /// <param name="marsh">Marshaller.</param> /// <param name="stream">Stream.</param> internal PortableWriterImpl(PortableMarshaller marsh, IPortableStream stream) { _marsh = marsh; _stream = stream; } /// <summary> /// Write object. /// </summary> /// <param name="obj">Object.</param> internal void Write<T>(T obj) { Write(obj, null); } /// <summary> /// Write object. /// </summary> /// <param name="obj">Object.</param> /// <param name="handler">Optional write handler.</param> [SuppressMessage("ReSharper", "FunctionComplexityOverflow")] internal void Write<T>(T obj, object handler) { // Apply detach mode if needed. PortableHandleDictionary<object, long> oldHnds = null; bool resetDetach = false; if (_detach) { _detach = false; _detachMode = true; resetDetach = true; oldHnds = _hnds; _hnds = null; } try { // Write null. if (obj == null) { _stream.WriteByte(PortableUtils.HdrNull); return; } if (_builder != null) { // Special case for portable object during build. PortableUserObject portObj = obj as PortableUserObject; if (portObj != null) { if (!WriteHandle(_stream.Position, portObj)) _builder.ProcessPortable(_stream, portObj); return; } // Special case for builder during build. PortableBuilderImpl portBuilder = obj as PortableBuilderImpl; if (portBuilder != null) { if (!WriteHandle(_stream.Position, portBuilder)) _builder.ProcessBuilder(_stream, portBuilder); return; } } // Try writting as well-known type. if (InvokeHandler(handler, handler as PortableSystemWriteDelegate, obj)) return; Type type = obj.GetType(); IPortableTypeDescriptor desc = _marsh.Descriptor(type); object typedHandler; PortableSystemWriteDelegate untypedHandler; if (desc == null) { typedHandler = null; untypedHandler = PortableSystemHandlers.WriteHandler(type); } else { typedHandler = desc.TypedHandler; untypedHandler = desc.UntypedHandler; } if (InvokeHandler(typedHandler, untypedHandler, obj)) return; if (desc == null) { if (!type.IsSerializable) // If neither handler, nor descriptor exist, and not serializable, this is an exception. throw new PortableException("Unsupported object type [type=" + type + ", object=" + obj + ']'); Write(new SerializableObjectHolder(obj)); return; } int pos = _stream.Position; // Dealing with handles. if (!(desc.Serializer is IPortableSystemTypeSerializer) && WriteHandle(pos, obj)) return; // Write header. _stream.WriteByte(PortableUtils.HdrFull); _stream.WriteBool(desc.UserType); _stream.WriteInt(desc.TypeId); _stream.WriteInt(obj.GetHashCode()); // Skip length as it is not known in the first place. _stream.Seek(8, SeekOrigin.Current); // Preserve old frame. int oldTypeId = _curTypeId; IPortableNameMapper oldConverter = _curConverter; IPortableIdMapper oldMapper = _curMapper; IPortableMetadataHandler oldMetaHnd = _curMetaHnd; bool oldRaw = _curRaw; long oldRawPos = _curRawPos; // Push new frame. _curTypeId = desc.TypeId; _curConverter = desc.NameConverter; _curMapper = desc.Mapper; _curMetaHnd = desc.MetadataEnabled ? _marsh.MetadataHandler(desc) : null; _curRaw = false; _curRawPos = 0; // Write object fields. desc.Serializer.WritePortable(obj, this); // Calculate and write length. int retPos = _stream.Position; _stream.Seek(pos + 10, SeekOrigin.Begin); int len = retPos - pos; _stream.WriteInt(len); if (_curRawPos != 0) // When set, it is difference between object head and raw position. _stream.WriteInt((int)(_curRawPos - pos)); else // When no set, it is equal to object length. _stream.WriteInt(len); _stream.Seek(retPos, SeekOrigin.Begin); // 13. Collect metadata. if (_curMetaHnd != null) { IDictionary<string, int> meta = _curMetaHnd.OnObjectWriteFinished(); if (meta != null) SaveMetadata(_curTypeId, desc.TypeName, desc.AffinityKeyFieldName, meta); } // Restore old frame. _curTypeId = oldTypeId; _curConverter = oldConverter; _curMapper = oldMapper; _curMetaHnd = oldMetaHnd; _curRaw = oldRaw; _curRawPos = oldRawPos; } finally { // Restore handles if needed. if (resetDetach) { // Add newly recorded handles without overriding already existing ones. if (_hnds != null) { if (oldHnds == null) oldHnds = _hnds; else oldHnds.Merge(_hnds); } _hnds = oldHnds; _detachMode = false; } } } /// <summary> /// Add handle to handles map. /// </summary> /// <param name="pos">Position in stream.</param> /// <param name="obj">Object.</param> /// <returns><c>true</c> if object was written as handle.</returns> private bool WriteHandle(long pos, object obj) { if (_hnds == null) { // Cache absolute handle position. _hnds = new PortableHandleDictionary<object, long>(obj, pos); return false; } long hndPos; if (!_hnds.TryGetValue(obj, out hndPos)) { // Cache absolute handle position. _hnds.Add(obj, pos); return false; } _stream.WriteByte(PortableUtils.HdrHnd); // Handle is written as difference between position before header and handle position. _stream.WriteInt((int)(pos - hndPos)); return true; } /// <summary> /// Try invoking predefined handler on object. /// </summary> /// <param name="typedHandler">Handler</param> /// <param name="untypedHandler">Not typed handler.</param> /// <param name="obj">Object.</param> /// <returns>True if handler was called.</returns> private bool InvokeHandler<T>(object typedHandler, PortableSystemWriteDelegate untypedHandler, T obj) { var typedHandler0 = typedHandler as PortableSystemTypedWriteDelegate<T>; if (typedHandler0 != null) { typedHandler0.Invoke(_stream, obj); return true; } if (untypedHandler != null) { untypedHandler.Invoke(this, obj); return true; } return false; } /// <summary> /// Write simple field with known length. /// </summary> /// <param name="fieldId">Field ID.</param> /// <param name="val">Value.</param> /// <param name="handler">Handler.</param> /// <param name="len">Length.</param> private void WriteSimpleField<T>(int fieldId, T val, PortableSystemTypedWriteDelegate<T> handler, int len) { CheckNotRaw(); _stream.WriteInt(fieldId); _stream.WriteInt(1 + len); // Additional byte for field type. handler(_stream, val); } /// <summary> /// Write simple nullable field with unknown length. /// </summary> /// <param name="fieldId">Field ID.</param> /// <param name="val">Value.</param> /// <param name="handler">Handler.</param> private void WriteSimpleNullableField<T>(int fieldId, T val, PortableSystemTypedWriteDelegate<T> handler) { CheckNotRaw(); _stream.WriteInt(fieldId); if (val == null) { _stream.WriteInt(1); _stream.WriteByte(PortableUtils.HdrNull); } else { int pos = _stream.Position; _stream.Seek(4, SeekOrigin.Current); handler(_stream, val); WriteFieldLength(_stream, pos); } } /// <summary> /// Write simple nullable field with known length. /// </summary> /// <param name="fieldId">Field ID.</param> /// <param name="val">Value.</param> /// <param name="handler">Handler.</param> /// <param name="len">Length.</param> private void WriteSimpleNullableField<T>(int fieldId, T val, PortableSystemTypedWriteDelegate<T> handler, int len) { CheckNotRaw(); _stream.WriteInt(fieldId); if (val == null) { _stream.WriteInt(1); _stream.WriteByte(PortableUtils.HdrNull); } else { _stream.WriteInt(1 + len); handler(_stream, val); } } /// <summary> /// Write field. /// </summary> /// <param name="fieldId">Field ID.</param> /// <param name="val">Value.</param> /// <param name="handler">Handler.</param> private void WriteField(int fieldId, object val, PortableSystemWriteDelegate handler) { CheckNotRaw(); _stream.WriteInt(fieldId); int pos = _stream.Position; _stream.Seek(4, SeekOrigin.Current); Write(val, handler); WriteFieldLength(_stream, pos); } /// <summary> /// Enable detach mode for the next object. /// </summary> internal void DetachNext() { if (!_detachMode) _detach = true; } /// <summary> /// Stream. /// </summary> internal IPortableStream Stream { get { return _stream; } } /// <summary> /// Gets collected metadatas. /// </summary> /// <returns>Collected metadatas (if any).</returns> internal IDictionary<int, IPortableMetadata> Metadata() { return _metas; } /// <summary> /// Check whether the given object is portable, i.e. it can be /// serialized with portable marshaller. /// </summary> /// <param name="obj">Object.</param> /// <returns>True if portable.</returns> internal bool IsPortable(object obj) { if (obj != null) { Type type = obj.GetType(); // We assume object as portable only in case it has descriptor. // Collections, Enums and non-primitive arrays do not have descriptors // and this is fine here because we cannot know whether their members // are portable. return _marsh.Descriptor(type) != null; } return true; } /// <summary> /// Write simple field with known length. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="typeId">Type ID.</param> /// <param name="val">Value.</param> /// <param name="handler">Handler.</param> /// <param name="len">Length.</param> private void WriteSimpleField<T>(string fieldName, byte typeId, T val, PortableSystemTypedWriteDelegate<T> handler, int len) { int fieldId = PortableUtils.FieldId(_curTypeId, fieldName, _curConverter, _curMapper); WriteSimpleField(fieldId, val, handler, len); if (_curMetaHnd != null) _curMetaHnd.OnFieldWrite(fieldId, fieldName, typeId); } /// <summary> /// Write simple nullable field with unknown length. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="typeId">Type ID.</param> /// <param name="val">Value.</param> /// <param name="handler">Handler.</param> private void WriteSimpleNullableField<T>(string fieldName, byte typeId, T val, PortableSystemTypedWriteDelegate<T> handler) { int fieldId = PortableUtils.FieldId(_curTypeId, fieldName, _curConverter, _curMapper); WriteSimpleNullableField(fieldId, val, handler); if (_curMetaHnd != null) _curMetaHnd.OnFieldWrite(fieldId, fieldName, typeId); } /// <summary> /// Write simple nullable field with known length. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="typeId">Type ID.</param> /// <param name="val">Value.</param> /// <param name="handler">Handler.</param> /// <param name="len">Length.</param> private void WriteSimpleNullableField<T>(string fieldName, byte typeId, T val, PortableSystemTypedWriteDelegate<T> handler, int len) { int fieldId = PortableUtils.FieldId(_curTypeId, fieldName, _curConverter, _curMapper); WriteSimpleNullableField(fieldId, val, handler, len); if (_curMetaHnd != null) _curMetaHnd.OnFieldWrite(fieldId, fieldName, typeId); } /// <summary> /// Write nullable raw field. /// </summary> /// <param name="val">Value.</param> /// <param name="handler">Handler.</param> private void WriteSimpleNullableRawField<T>(T val, PortableSystemTypedWriteDelegate<T> handler) { if (val == null) _stream.WriteByte(PortableUtils.HdrNull); else handler(_stream, val); } /// <summary> /// Write field. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="typeId">Type ID.</param> /// <param name="val">Value.</param> /// <param name="handler">Handler.</param> private void WriteField(string fieldName, byte typeId, object val, PortableSystemWriteDelegate handler) { int fieldId = PortableUtils.FieldId(_curTypeId, fieldName, _curConverter, _curMapper); WriteField(fieldId, val, handler); if (_curMetaHnd != null) _curMetaHnd.OnFieldWrite(fieldId, fieldName, typeId); } /// <summary> /// Write field length. /// </summary> /// <param name="stream">Stream.</param> /// <param name="pos">Position where length should reside</param> private static void WriteFieldLength(IPortableStream stream, int pos) { int retPos = stream.Position; stream.Seek(pos, SeekOrigin.Begin); stream.WriteInt(retPos - pos - 4); stream.Seek(retPos, SeekOrigin.Begin); } /// <summary> /// Ensure that we are not in raw mode. /// </summary> private void CheckNotRaw() { if (_curRaw) throw new PortableException("Cannot write named fields after raw data is written."); } /// <summary> /// Saves metadata for this session. /// </summary> /// <param name="typeId">Type ID.</param> /// <param name="typeName">Type name.</param> /// <param name="affKeyFieldName">Affinity key field name.</param> /// <param name="fields">Fields metadata.</param> internal void SaveMetadata(int typeId, string typeName, string affKeyFieldName, IDictionary<string, int> fields) { if (_metas == null) { PortableMetadataImpl meta = new PortableMetadataImpl(typeId, typeName, fields, affKeyFieldName); _metas = new Dictionary<int, IPortableMetadata>(1); _metas[typeId] = meta; } else { IPortableMetadata meta; if (_metas.TryGetValue(typeId, out meta)) { IDictionary<string, int> existingFields = ((PortableMetadataImpl)meta).FieldsMap(); foreach (KeyValuePair<string, int> field in fields) { if (!existingFields.ContainsKey(field.Key)) existingFields[field.Key] = field.Value; } } else _metas[typeId] = new PortableMetadataImpl(typeId, typeName, fields, affKeyFieldName); } } } }
using System; using System.Collections.Generic; using System.Diagnostics; using System.Text; using System.Windows; using System.Windows.Controls; using System.Windows.Input; using Topics.Radical.Windows.Controls.Automation.Peers; namespace Topics.Radical.Windows.Controls { [TemplatePart(Name=Resizer._gripName, Type=typeof(FrameworkElement))] public class Resizer : ContentControl { private FrameworkElement _frameworkElement; private Point _resizeOrigin; private double _originalWidth; private double _originalHeight; private static RoutedCommand _startResizeCommand; private static RoutedCommand _updateSizeCommand; private static RoutedCommand _endResizeCommand; private static RoutedCommand _autoSizeCommand; /// <summary> /// Identifies the <see cref="IsGripEnabled"/> dependency property. /// </summary> public static readonly DependencyProperty IsGripEnabledProperty = DependencyProperty.Register("IsGripEnabled", typeof(bool), typeof(Resizer), new FrameworkPropertyMetadata(true)); /// <summary> /// Identifies the <see cref="IsGripVisible"/> dependency property. /// </summary> public static readonly DependencyProperty IsGripVisibleProperty = DependencyProperty.Register("IsGripVisible", typeof(bool), typeof(Resizer), new FrameworkPropertyMetadata(true, IsGripVisible_Changed)); /// <summary> /// Identifies the <see cref="IsAutoSizeEnabled"/> dependency property. /// </summary> public static readonly DependencyProperty IsAutoSizeEnabledProperty = DependencyProperty.Register("IsAutoSizeEnabled", typeof(bool), typeof(Resizer), new FrameworkPropertyMetadata(true)); /// <summary> /// Identifies the <see cref="ResizeDirection"/> dependency property. /// </summary> public static readonly DependencyProperty ResizeDirectionProperty = DependencyProperty.Register("ResizeDirection", typeof(ResizeDirection), typeof(Resizer), new FrameworkPropertyMetadata(ResizeDirection.SouthEast)); private const string _gripName = "PART_Grip"; /// <summary> /// Gets or sets a value indicating whether the grip is enabled. /// </summary> public bool IsGripEnabled { get { return (bool) GetValue(IsGripEnabledProperty); } set { SetValue(IsGripEnabledProperty, value); } } /// <summary> /// Gets or sets a value indicating whether the grip is visible. /// </summary> public bool IsGripVisible { get { return (bool) GetValue(IsGripVisibleProperty); } set { SetValue(IsGripVisibleProperty, value); } } /// <summary> /// Gets or sets a value indicating whether the <c>Resizer</c> can be auto-sized (by double-clicking on the grip in the default template). /// </summary> public bool IsAutoSizeEnabled { get { return (bool) GetValue(IsAutoSizeEnabledProperty); } set { SetValue(IsAutoSizeEnabledProperty, value); } } /// <summary> /// Gets or sets a value indicating the direction in which resizing takes place. /// </summary> public ResizeDirection ResizeDirection { get { return (ResizeDirection) GetValue(ResizeDirectionProperty); } set { SetValue(ResizeDirectionProperty, value); } } /// <summary> /// Gets the command used to start a resize operation. /// </summary> /// <remarks> /// The parameter passed to the command must be a <see cref="FrameworkElement"/>, which is used as a context for the resizing operation. /// </remarks> public static RoutedCommand StartResizeCommand { get { return _startResizeCommand; } } /// <summary> /// Gets the command used to update the size of the <c>Resizer</c>. /// </summary> public static RoutedCommand UpdateSizeCommand { get { return _updateSizeCommand; } } /// <summary> /// Gets the command used to end a resize operation. /// </summary> public static RoutedCommand EndResizeCommand { get { return _endResizeCommand; } } /// <summary> /// Gets the command used to automatically size the <c>Resizer</c> according to its content. /// </summary> public static RoutedCommand AutoSizeCommand { get { return _autoSizeCommand; } } static Resizer() { DefaultStyleKeyProperty.OverrideMetadata(typeof(Resizer), new FrameworkPropertyMetadata(typeof(Resizer))); HorizontalContentAlignmentProperty.OverrideMetadata(typeof(Resizer), new FrameworkPropertyMetadata(HorizontalAlignment.Stretch)); VerticalContentAlignmentProperty.OverrideMetadata(typeof(Resizer), new FrameworkPropertyMetadata(VerticalAlignment.Stretch)); //hook up commands _startResizeCommand = new RoutedCommand("StartResize", typeof(Resizer)); CommandManager.RegisterClassCommandBinding(typeof(Resizer), new CommandBinding(_startResizeCommand, OnStartResizeCommand)); _updateSizeCommand = new RoutedCommand("UpdateSize", typeof(Resizer)); CommandManager.RegisterClassCommandBinding(typeof(Resizer), new CommandBinding(_updateSizeCommand, OnUpdateSizeCommand)); _endResizeCommand = new RoutedCommand("EndResize", typeof(Resizer)); CommandManager.RegisterClassCommandBinding(typeof(Resizer), new CommandBinding(_endResizeCommand, OnEndResizeCommand)); _autoSizeCommand = new RoutedCommand("AutoSize", typeof(Resizer)); CommandManager.RegisterClassCommandBinding(typeof(Resizer), new CommandBinding(_autoSizeCommand, OnAutoSizeCommand)); } protected override System.Windows.Automation.Peers.AutomationPeer OnCreateAutomationPeer() { return new ResizerAutomationPeer(this); } private static void IsGripVisible_Changed(object sender, DependencyPropertyChangedEventArgs e) { Resizer resizer = sender as Resizer; Debug.Assert(resizer != null); FrameworkElement grip = resizer.Template.FindName(_gripName, resizer) as FrameworkElement; if (grip != null) { grip.Visibility = resizer.IsGripVisible ? Visibility.Visible : Visibility.Hidden; } } private static void OnStartResizeCommand(object sender, ExecutedRoutedEventArgs e) { Resizer resizer = sender as Resizer; Debug.Assert(resizer != null); resizer._frameworkElement = e.Parameter as FrameworkElement; if (resizer._frameworkElement == null) { throw new InvalidOperationException("Parameter to this command must be a FrameworkElement (normally the control being used to represent the Grip)."); } resizer._resizeOrigin = resizer._frameworkElement.PointToScreen(Mouse.GetPosition(resizer._frameworkElement)); resizer._originalWidth = resizer.ActualWidth; resizer._originalHeight = resizer.ActualHeight; e.Handled = true; } private static void OnUpdateSizeCommand(object sender, ExecutedRoutedEventArgs e) { Resizer resizer = sender as Resizer; Debug.Assert(resizer != null); if (resizer._frameworkElement != null) { Point point = resizer._frameworkElement.PointToScreen(Mouse.GetPosition(resizer._frameworkElement)); double widthDelta = 0; double heightDelta = 0; switch (resizer.ResizeDirection) { case ResizeDirection.NorthEast: widthDelta = point.X - resizer._resizeOrigin.X; heightDelta = resizer._resizeOrigin.Y - point.Y; break; case ResizeDirection.NorthWest: widthDelta = resizer._resizeOrigin.X - point.X; heightDelta = resizer._resizeOrigin.Y - point.Y; break; case ResizeDirection.SouthEast: widthDelta = point.X - resizer._resizeOrigin.X; heightDelta = point.Y - resizer._resizeOrigin.Y; break; case ResizeDirection.SouthWest: widthDelta = resizer._resizeOrigin.X - point.X; heightDelta = point.Y - resizer._resizeOrigin.Y; break; default: Debug.Fail("Unexpected ResizeDirection: " + resizer.ResizeDirection); break; } //update the width and height, making sure we don't set to below zero resizer.Width = Math.Max(0, resizer._originalWidth + widthDelta); resizer.Height = Math.Max(0, resizer._originalHeight + heightDelta); } e.Handled = true; } private static void OnEndResizeCommand(object sender, ExecutedRoutedEventArgs e) { Resizer resizer = sender as Resizer; Debug.Assert(resizer != null); if (resizer._frameworkElement != null) { resizer._frameworkElement = null; } e.Handled = true; } private static void OnAutoSizeCommand(object sender, ExecutedRoutedEventArgs e) { Resizer resizer = sender as Resizer; if (resizer != null && resizer.IsAutoSizeEnabled) { resizer.Width = double.NaN; resizer.Height = double.NaN; e.Handled = true; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections; using System.Xml; using Microsoft.Build.BuildEngine.Shared; using error = Microsoft.Build.BuildEngine.Shared.ErrorUtilities; namespace Microsoft.Build.BuildEngine { /// <summary> /// Class representing a When block (also used to represent the Otherwise /// block on a Choose). /// </summary> internal class When { #region Member Data public enum Options { ProcessWhen, ProcessOtherwise, }; private GroupingCollection propertyAndItemLists = null; private Project parentProject = null; // This is the "Condition" attribute on the <PropertyGroup> element above. private XmlAttribute conditionAttribute = null; #endregion #region Constructors /// <summary> /// Constructor for the When block. Parses the contents of the When block (property /// groups, item groups, and nested chooses) and stores them. /// </summary> /// <remarks> /// </remarks> /// <owner>DavidLe</owner> /// <param name="parentProject"></param> /// <param name="parentGroupingCollection"></param> /// <param name="whenElement"></param> /// <param name="importedFromAnotherProject"></param> /// <param name="options"></param> /// <param name="nestingDepth">stack overflow guard</param> internal When( Project parentProject, GroupingCollection parentGroupingCollection, XmlElement whenElement, bool importedFromAnotherProject, Options options, int nestingDepth ) { // Make sure the <When> node has been given to us. error.VerifyThrow(whenElement != null, "Need valid (non-null) <When> element."); // Make sure this really is the <When> node. error.VerifyThrow(whenElement.Name == XMakeElements.when || whenElement.Name == XMakeElements.otherwise, "Expected <{0}> or <{1}> element; received <{2}> element.", XMakeElements.when, XMakeElements.otherwise, whenElement.Name); this.propertyAndItemLists = new GroupingCollection(parentGroupingCollection); this.parentProject = parentProject; string elementName = ((options == Options.ProcessWhen) ? XMakeElements.when : XMakeElements.otherwise); if (options == Options.ProcessWhen) { conditionAttribute = ProjectXmlUtilities.GetConditionAttribute(whenElement, /*verify sole attribute*/ true); ProjectErrorUtilities.VerifyThrowInvalidProject(conditionAttribute != null, whenElement, "MissingCondition", XMakeElements.when); } else { ProjectXmlUtilities.VerifyThrowProjectNoAttributes(whenElement); } ProcessWhenChildren(whenElement, parentProject, importedFromAnotherProject, nestingDepth); } #endregion #region Properties /// <summary> /// Property containing the condition for the When clause. /// </summary> /// <remarks> /// </remarks> /// <owner>DavidLe</owner> /// <returns>string</returns> internal string Condition { get { return (this.conditionAttribute == null) ? String.Empty : this.conditionAttribute.Value; } } /// <summary> /// Property containing the condition for the When clause. /// </summary> /// <remarks> /// </remarks> /// <owner>DavidLe</owner> /// <returns>string</returns> internal XmlAttribute ConditionAttribute { get { return this.conditionAttribute; } } #endregion /// <summary> /// The collection of all sub-groups (item/property groups and chooses) inside this When /// </summary> internal GroupingCollection PropertyAndItemLists { get { return this.propertyAndItemLists; } } #region Methods /// <summary> /// Helper method for processing the children of a When. Only parses Choose, /// PropertyGroup, and ItemGroup. All other tags result in an error. /// </summary> /// <remarks> /// </remarks> /// <owner>DavidLe</owner> /// <param name="parentNode"></param> /// <param name="parentProjectForChildren"></param> /// <param name="importedFromAnotherProject"></param> /// <param name="options"></param> /// <param name="nestingDepth">Number of parent &lt;Choose&gt; elements this is nested inside</param> private void ProcessWhenChildren ( XmlElement parentNode, Project parentProjectForChildren, bool importedFromAnotherProject, int nestingDepth ) { // Loop through the child nodes of the <When> element. foreach (XmlNode whenChildNode in parentNode) { switch (whenChildNode.NodeType) { // Handle XML comments under the <When> node (just ignore them). case XmlNodeType.Comment: // fall through case XmlNodeType.Whitespace: // ignore whitespace break; case XmlNodeType.Element: { // Make sure this element doesn't have a custom namespace ProjectXmlUtilities.VerifyThrowProjectValidNamespace((XmlElement)whenChildNode); // The only three types of child nodes that a <When> element can contain // are <PropertyGroup>, <ItemGroup> and <Choose>. switch (whenChildNode.Name) { case XMakeElements.itemGroup: BuildItemGroup newItemGroup = new BuildItemGroup((XmlElement)whenChildNode, importedFromAnotherProject, parentProjectForChildren); this.propertyAndItemLists.InsertAtEnd(newItemGroup); break; // Process the <PropertyGroup> element. case XMakeElements.propertyGroup: BuildPropertyGroup newPropertyGroup = new BuildPropertyGroup(parentProjectForChildren, (XmlElement)whenChildNode, importedFromAnotherProject); newPropertyGroup.EnsureNoReservedProperties(); this.propertyAndItemLists.InsertAtEnd(newPropertyGroup); break; // Process the <Choose> element. case XMakeElements.choose: Choose newChoose = new Choose(parentProjectForChildren, this.PropertyAndItemLists, (XmlElement)whenChildNode, importedFromAnotherProject, nestingDepth); this.propertyAndItemLists.InsertAtEnd(newChoose); break; default: { ProjectXmlUtilities.ThrowProjectInvalidChildElement(whenChildNode); break; } } } break; default: { ProjectXmlUtilities.ThrowProjectInvalidChildElement(whenChildNode); break; } } } } /// <summary> /// Evaluates a When clause. Checks if the condition is true, and if it is, /// applies all of the contained property group, item lists, and import statements. /// Returns true if the When clause is process (because the condition is true), false /// otherwise. /// </summary> /// <remarks> /// </remarks> /// <owner>DavidLe</owner> /// <param name="parentPropertyBag"></param> /// <param name="conditionedPropertiesTable"></param> /// <returns>bool</returns> internal bool EvaluateCondition ( BuildPropertyGroup parentPropertyBag, Hashtable conditionedPropertiesTable ) { if ( (this.Condition != null) && !Utilities.EvaluateCondition(this.Condition, this.ConditionAttribute, new Expander(parentPropertyBag, parentProject.EvaluatedItemsByName), conditionedPropertiesTable, ParserOptions.AllowProperties, this.parentProject.ParentEngine.LoggingServices, this.parentProject.ProjectBuildEventContext) ) { return false; } return true; } /// <summary> /// Evaluates a When clause. Checks if the condition is true, and if it is, /// applies all of the contained property group, item lists, and import statements. /// Returns true if the When clause is process (because the condition is true), false /// otherwise. /// </summary> /// <remarks> /// </remarks> /// <owner>DavidLe</owner> /// <param name="parentPropertyBag"></param> /// <param name="ignoreCondition"></param> /// <param name="honorCondition"></param> /// <param name="conditionedPropertiesTable"></param> /// <param name="pass"></param> /// <returns>bool</returns> internal void Evaluate ( BuildPropertyGroup parentPropertyBag, bool ignoreCondition, bool honorCondition, Hashtable conditionedPropertiesTable, ProcessingPass pass ) { foreach (IItemPropertyGrouping propOrItem in this.propertyAndItemLists) { // This is where we selectively evaluate PropertyGroups or Itemgroups during their respective passes. // Once we go to a one-pass model, we'll simple spin through all the children and evaluate. if (propOrItem is BuildPropertyGroup && pass == ProcessingPass.Pass1) { ((BuildPropertyGroup) propOrItem).Evaluate(parentPropertyBag, conditionedPropertiesTable, pass); } else if (propOrItem is BuildItemGroup && pass == ProcessingPass.Pass2) { ((BuildItemGroup) propOrItem).Evaluate(parentPropertyBag, parentProject.EvaluatedItemsByName, ignoreCondition, honorCondition, pass); } else if (propOrItem is Choose) { ((Choose) propOrItem).Evaluate(parentPropertyBag, ignoreCondition, honorCondition, conditionedPropertiesTable, pass); } } } #endregion } }
// Copyright 2011 The Noda Time Authors. All rights reserved. // Use of this source code is governed by the Apache License 2.0, // as found in the LICENSE.txt file. using System.Globalization; using NodaTime.Text; using NUnit.Framework; namespace NodaTime.Test.Text { [TestFixture, Category("Formatting"), Category("Parse")] public class ValueCursorTest : TextCursorTestBase { internal override TextCursor MakeCursor(string value) { return new ValueCursor(value); } [Test] public void Match_Char() { var value = new ValueCursor("abc"); Assert.True(value.MoveNext(), "GetNext() 1"); Assert.True(value.Match('a'), "First character"); Assert.True(value.Match('b'), "Second character"); Assert.True(value.Match('c'), "Third character"); Assert.False(value.MoveNext(), "GetNext() end"); } [Test] public void Match_String() { var value = new ValueCursor("abc"); Assert.True(value.MoveNext(), "GetNext() 1"); Assert.True(value.Match("abc")); Assert.False(value.MoveNext(), "GetNext() end"); } [Test] public void Match_StringNotMatched() { var value = new ValueCursor("xabcdef"); Assert.True(value.MoveNext(), "GetNext() 1"); Assert.False(value.Match("abc")); ValidateCurrentCharacter(value, 0, 'x'); } [Test] public void Match_StringOverLongStringToMatch() { var value = new ValueCursor("x"); Assert.True(value.MoveNext()); Assert.False(value.Match("long string")); ValidateCurrentCharacter(value, 0, 'x'); } [Test] public void MatchCaseInsensitive_MatchAndMove() { var value = new ValueCursor("abcd"); Assert.True(value.MoveNext(), "GetNext() 1"); Assert.True(value.MatchCaseInsensitive("AbC", CultureInfo.InvariantCulture.CompareInfo, true)); ValidateCurrentCharacter(value, 3, 'd'); } [Test] public void MatchCaseInsensitive_MatchWithoutMoving() { var value = new ValueCursor("abcd"); Assert.True(value.MoveNext(), "GetNext() 1"); Assert.True(value.MatchCaseInsensitive("AbC", CultureInfo.InvariantCulture.CompareInfo, false)); // We're still looking at the start ValidateCurrentCharacter(value, 0, 'a'); } [Test] public void MatchCaseInsensitive_StringNotMatched() { var value = new ValueCursor("xabcdef"); Assert.True(value.MoveNext(), "GetNext() 1"); Assert.False(value.MatchCaseInsensitive("abc", CultureInfo.InvariantCulture.CompareInfo, true)); ValidateCurrentCharacter(value, 0, 'x'); } [Test] public void MatchCaseInsensitive_StringOverLongStringToMatch() { var value = new ValueCursor("x"); Assert.True(value.MoveNext()); Assert.False(value.MatchCaseInsensitive("long string", CultureInfo.InvariantCulture.CompareInfo, true)); ValidateCurrentCharacter(value, 0, 'x'); } [Test] public void Match_StringPartial() { var value = new ValueCursor("abcdef"); Assert.True(value.MoveNext(), "GetNext() 1"); Assert.True(value.Match("abc")); ValidateCurrentCharacter(value, 3, 'd'); } [Test] public void ParseDigits_TooFewDigits() { var value = new ValueCursor("a12b"); Assert.True(value.MoveNext()); ValidateCurrentCharacter(value, 0, 'a'); Assert.True(value.MoveNext()); Assert.False(value.ParseDigits(3, 3, out int actual)); ValidateCurrentCharacter(value, 1, '1'); } [Test] public void ParseDigits_NoNumber() { var value = new ValueCursor("abc"); Assert.True(value.MoveNext()); Assert.False(value.ParseDigits(1, 2, out int actual)); ValidateCurrentCharacter(value, 0, 'a'); } [Test] public void ParseDigits_Maximum() { var value = new ValueCursor("12"); Assert.True(value.MoveNext()); Assert.True(value.ParseDigits(1, 2, out int actual)); Assert.AreEqual(12, actual); } [Test] public void ParseDigits_MaximumMoreDigits() { var value = new ValueCursor("1234"); Assert.True(value.MoveNext()); Assert.True(value.ParseDigits(1, 2, out int actual)); Assert.AreEqual(12, actual); ValidateCurrentCharacter(value, 2, '3'); } [Test] public void ParseDigits_Minimum() { var value = new ValueCursor("1"); value.MoveNext(); Assert.True(value.ParseDigits(1, 2, out int actual)); Assert.AreEqual(1, actual); ValidateEndOfString(value); } [Test] public void ParseDigits_MinimumNonDigits() { var value = new ValueCursor("1abc"); Assert.True(value.MoveNext()); Assert.True(value.ParseDigits(1, 2, out int actual)); Assert.AreEqual(1, actual); ValidateCurrentCharacter(value, 1, 'a'); } [Test] public void ParseDigits_NonAscii_NeverMatches() { // Arabic-Indic digits 0 and 1. See // http://www.unicode.org/charts/PDF/U0600.pdf var value = new ValueCursor("\u0660\u0661"); Assert.True(value.MoveNext()); Assert.False(value.ParseDigits(1, 2, out int actual)); } [Test] public void ParseInt64Digits_TooFewDigits() { var value = new ValueCursor("a12b"); Assert.True(value.MoveNext()); ValidateCurrentCharacter(value, 0, 'a'); Assert.True(value.MoveNext()); Assert.False(value.ParseInt64Digits(3, 3, out long actual)); ValidateCurrentCharacter(value, 1, '1'); } [Test] public void ParseInt64Digits_NoNumber() { var value = new ValueCursor("abc"); Assert.True(value.MoveNext()); Assert.False(value.ParseInt64Digits(1, 2, out long actual)); ValidateCurrentCharacter(value, 0, 'a'); } [Test] public void ParseInt64Digits_Maximum() { var value = new ValueCursor("12"); Assert.True(value.MoveNext()); Assert.True(value.ParseInt64Digits(1, 2, out long actual)); Assert.AreEqual(12, actual); } [Test] public void ParseInt64Digits_MaximumMoreDigits() { var value = new ValueCursor("1234"); Assert.True(value.MoveNext()); Assert.True(value.ParseInt64Digits(1, 2, out long actual)); Assert.AreEqual(12, actual); ValidateCurrentCharacter(value, 2, '3'); } [Test] public void ParseInt64Digits_Minimum() { var value = new ValueCursor("1"); value.MoveNext(); Assert.True(value.ParseInt64Digits(1, 2, out long actual)); Assert.AreEqual(1, actual); ValidateEndOfString(value); } [Test] public void ParseInt64Digits_MinimumNonDigits() { var value = new ValueCursor("1abc"); Assert.True(value.MoveNext()); Assert.True(value.ParseInt64Digits(1, 2, out long actual)); Assert.AreEqual(1, actual); ValidateCurrentCharacter(value, 1, 'a'); } [Test] public void ParseInt64Digits_NonAscii_NeverMatches() { // Arabic-Indic digits 0 and 1. See // http://www.unicode.org/charts/PDF/U0600.pdf var value = new ValueCursor("\u0660\u0661"); Assert.True(value.MoveNext()); Assert.False(value.ParseInt64Digits(1, 2, out long actual)); } [Test] public void ParseInt64Digits_LargeNumber() { var value = new ValueCursor("9999999999999"); Assert.True(value.MoveNext()); Assert.True(value.ParseInt64Digits(1, 13, out long actual)); Assert.AreEqual(actual, 9999999999999L); Assert.Greater(9999999999999L, int.MaxValue); } [Test] public void ParseFraction_NonAscii_NeverMatches() { // Arabic-Indic digits 0 and 1. See // http://www.unicode.org/charts/PDF/U0600.pdf var value = new ValueCursor("\u0660\u0661"); Assert.True(value.MoveNext()); Assert.False(value.ParseFraction(2, 2, out int actual, 2)); } [Test] public void ParseInt64_Simple() { var value = new ValueCursor("56x"); Assert.True(value.MoveNext()); Assert.IsNull(value.ParseInt64<string>(out long result)); Assert.AreEqual(56L, result); // Cursor ends up post-number Assert.AreEqual(2, value.Index); } [Test] public void ParseInt64_Negative() { var value = new ValueCursor("-56x"); Assert.True(value.MoveNext()); Assert.IsNull(value.ParseInt64<string>(out long result)); Assert.AreEqual(-56L, result); } [Test] public void ParseInt64_NonNumber() { var value = new ValueCursor("xyz"); Assert.True(value.MoveNext()); Assert.IsNotNull(value.ParseInt64<string>(out long result)); // Cursor has not moved Assert.AreEqual(0, value.Index); } [Test] public void ParseInt64_DoubleNegativeSign() { var value = new ValueCursor("--10xyz"); Assert.True(value.MoveNext()); Assert.IsNotNull(value.ParseInt64<string>(out long result)); // Cursor has not moved Assert.AreEqual(0, value.Index); } [Test] public void ParseInt64_NegativeThenNonDigit() { var value = new ValueCursor("-x"); Assert.True(value.MoveNext()); Assert.IsNotNull(value.ParseInt64<string>(out long result)); // Cursor has not moved Assert.AreEqual(0, value.Index); } [Test] public void ParseInt64_NumberOutOfRange_LowLeadingDigits() { var value = new ValueCursor("1000000000000000000000000"); Assert.True(value.MoveNext()); Assert.IsNotNull(value.ParseInt64<string>(out long result)); // Cursor has not moved Assert.AreEqual(0, value.Index); } [Test] public void ParseInt64_NumberOutOfRange_HighLeadingDigits() { var value = new ValueCursor("999999999999999999999999"); Assert.True(value.MoveNext()); Assert.IsNotNull(value.ParseInt64<string>(out long result)); // Cursor has not moved Assert.AreEqual(0, value.Index); } [Test] public void ParseInt64_NumberOutOfRange_MaxValueLeadingDigits() { var value = new ValueCursor("9223372036854775808"); Assert.True(value.MoveNext()); Assert.IsNotNull(value.ParseInt64<string>(out long result)); // Cursor has not moved Assert.AreEqual(0, value.Index); } [Test] public void ParseInt64_NumberOutOfRange_MinValueLeadingDigits() { var value = new ValueCursor("-9223372036854775809"); Assert.True(value.MoveNext()); Assert.IsNotNull(value.ParseInt64<string>(out long result)); // Cursor has not moved Assert.AreEqual(0, value.Index); } [Test] public void ParseInt64_MaxValue() { var value = new ValueCursor("9223372036854775807"); Assert.True(value.MoveNext()); Assert.IsNull(value.ParseInt64<string>(out long result)); Assert.AreEqual(long.MaxValue, result); } [Test] public void ParseInt64_MinValue() { var value = new ValueCursor("-9223372036854775808"); Assert.True(value.MoveNext()); Assert.IsNull(value.ParseInt64<string>(out long result)); Assert.AreEqual(long.MinValue, result); } [Test] public void CompareOrdinal_ExactMatchToEndOfValue() { var value = new ValueCursor("xabc"); value.Move(1); Assert.AreEqual(0, value.CompareOrdinal("abc")); Assert.AreEqual(1, value.Index); // Cursor hasn't moved } [Test] public void CompareOrdinal_ExactMatchValueContinues() { var value = new ValueCursor("xabc"); value.Move(1); Assert.AreEqual(0, value.CompareOrdinal("ab")); Assert.AreEqual(1, value.Index); // Cursor hasn't moved } [Test] public void CompareOrdinal_ValueIsEarlier() { var value = new ValueCursor("xabc"); value.Move(1); Assert.Less(value.CompareOrdinal("mm"), 0); Assert.AreEqual(1, value.Index); // Cursor hasn't moved } [Test] public void CompareOrdinal_ValueIsLater() { var value = new ValueCursor("xabc"); value.Move(1); Assert.Greater(value.CompareOrdinal("aa"), 0); Assert.AreEqual(1, value.Index); // Cursor hasn't moved } [Test] public void CompareOrdinal_LongMatch_EqualToEnd() { var value = new ValueCursor("xabc"); value.Move(1); Assert.Less(value.CompareOrdinal("abcd"), 0); Assert.AreEqual(1, value.Index); // Cursor hasn't moved } [Test] public void CompareOrdinal_LongMatch_ValueIsEarlier() { var value = new ValueCursor("xabc"); value.Move(1); Assert.Less(value.CompareOrdinal("cccc"), 0); Assert.AreEqual(1, value.Index); // Cursor hasn't moved } [Test] public void CompareOrdinal_LongMatch_ValueIsLater() { var value = new ValueCursor("xabc"); value.Move(1); Assert.Greater(value.CompareOrdinal("aaaa"), 0); Assert.AreEqual(1, value.Index); // Cursor hasn't moved } [Test] public void ParseInt64_TooManyDigits() { // We can cope as far as 9223372036854775807, but the trailing 1 causes a failure. var value = new ValueCursor("92233720368547758071"); value.Move(0); var parseResult = value.ParseInt64<string>(out long result); Assert.IsFalse(parseResult!.Success); Assert.IsInstanceOf<UnparsableValueException>(parseResult.Exception); Assert.AreEqual(0, value.Index); // Cursor hasn't moved } } }
// (c) Copyright Microsoft Corporation. // This source is subject to the Microsoft Public License (Ms-PL). // Please see http://go.microsoft.com/fwlink/?LinkID=131993 for details. // All other rights reserved. using Avalonia.Data; using Avalonia.Interactivity; using Avalonia.VisualTree; using Avalonia.Collections; using Avalonia.Utilities; using System; using System.ComponentModel; using System.Linq; using System.Diagnostics; using Avalonia.Controls.Templates; using Avalonia.Controls.Utils; using Avalonia.Layout; using Avalonia.Markup.Xaml.MarkupExtensions; namespace Avalonia.Controls { public abstract class DataGridColumn : AvaloniaObject { internal const int DATAGRIDCOLUMN_maximumWidth = 65536; private const bool DATAGRIDCOLUMN_defaultIsReadOnly = false; private DataGridLength? _width; // Null by default, null means inherit the Width from the DataGrid private bool? _isReadOnly; private double? _maxWidth; private double? _minWidth; private bool _settingWidthInternally; private int _displayIndexWithFiller; private object _header; private IDataTemplate _headerTemplate; private DataGridColumnHeader _headerCell; private IControl _editingElement; private ICellEditBinding _editBinding; private IBinding _clipboardContentBinding; private readonly Classes _cellStyleClasses = new Classes(); /// <summary> /// Initializes a new instance of the <see cref="T:Avalonia.Controls.DataGridColumn" /> class. /// </summary> protected internal DataGridColumn() { _displayIndexWithFiller = -1; IsInitialDesiredWidthDetermined = false; InheritsWidth = true; } internal DataGrid OwningGrid { get; set; } internal int Index { get; set; } internal bool? CanUserReorderInternal { get; set; } internal bool? CanUserResizeInternal { get; set; } internal bool? CanUserSortInternal { get; set; } internal bool ActualCanUserResize { get { if (OwningGrid == null || OwningGrid.CanUserResizeColumns == false || this is DataGridFillerColumn) { return false; } return CanUserResizeInternal ?? true; } } // MaxWidth from local setting or DataGrid setting internal double ActualMaxWidth { get { return _maxWidth ?? OwningGrid?.MaxColumnWidth ?? double.PositiveInfinity; } } // MinWidth from local setting or DataGrid setting internal double ActualMinWidth { get { double minWidth = _minWidth ?? OwningGrid?.MinColumnWidth ?? 0; if (Width.IsStar) { return Math.Max(DataGrid.DATAGRID_minimumStarColumnWidth, minWidth); } return minWidth; } } internal bool DisplayIndexHasChanged { get; set; } internal int DisplayIndexWithFiller { get { return _displayIndexWithFiller; } set { _displayIndexWithFiller = value; } } internal bool HasHeaderCell { get { return _headerCell != null; } } internal DataGridColumnHeader HeaderCell { get { if (_headerCell == null) { _headerCell = CreateHeader(); } return _headerCell; } } /// <summary> /// Tracks whether or not this column inherits its Width value from the DataGrid. /// </summary> internal bool InheritsWidth { get; private set; } /// <summary> /// When a column is initially added, we won't know its initial desired value /// until all rows have been measured. We use this variable to track whether or /// not the column has been fully measured. /// </summary> internal bool IsInitialDesiredWidthDetermined { get; set; } internal double LayoutRoundedWidth { get; private set; } internal ICellEditBinding CellEditBinding { get => _editBinding; } /// <summary> /// Defines the <see cref="IsVisible"/> property. /// </summary> public static StyledProperty<bool> IsVisibleProperty = Control.IsVisibleProperty.AddOwner<DataGridColumn>(); /// <summary> /// Determines whether or not this column is visible. /// </summary> public bool IsVisible { get => GetValue(IsVisibleProperty); set => SetValue(IsVisibleProperty, value); } protected override void OnPropertyChanged<T>(AvaloniaPropertyChangedEventArgs<T> change) { base.OnPropertyChanged(change); if (change.Property == IsVisibleProperty) { OwningGrid?.OnColumnVisibleStateChanging(this); var isVisible = (change as AvaloniaPropertyChangedEventArgs<bool>).NewValue.Value; if (_headerCell != null) { _headerCell.IsVisible = isVisible; } OwningGrid?.OnColumnVisibleStateChanged(this); NotifyPropertyChanged(change.Property.Name); } } /// <summary> /// Actual visible width after Width, MinWidth, and MaxWidth setting at the Column level and DataGrid level /// have been taken into account /// </summary> public double ActualWidth { get { if (OwningGrid == null || double.IsNaN(Width.DisplayValue)) { return ActualMinWidth; } return Width.DisplayValue; } } /// <summary> /// Gets or sets a value that indicates whether the user can change the column display position by /// dragging the column header. /// </summary> /// <returns> /// true if the user can drag the column header to a new position; otherwise, false. The default is the current <see cref="P:Avalonia.Controls.DataGrid.CanUserReorderColumns" /> property value. /// </returns> public bool CanUserReorder { get { return CanUserReorderInternal ?? OwningGrid?.CanUserReorderColumns ?? DataGrid.DATAGRID_defaultCanUserResizeColumns; } set { CanUserReorderInternal = value; } } /// <summary> /// Gets or sets a value that indicates whether the user can adjust the column width using the mouse. /// </summary> /// <returns> /// true if the user can resize the column; false if the user cannot resize the column. The default is the current <see cref="P:Avalonia.Controls.DataGrid.CanUserResizeColumns" /> property value. /// </returns> public bool CanUserResize { get { return CanUserResizeInternal ?? OwningGrid?.CanUserResizeColumns ?? DataGrid.DATAGRID_defaultCanUserResizeColumns; } set { CanUserResizeInternal = value; OwningGrid?.OnColumnCanUserResizeChanged(this); } } /// <summary> /// Gets or sets a value that indicates whether the user can sort the column by clicking the column header. /// </summary> /// <returns> /// true if the user can sort the column; false if the user cannot sort the column. The default is the current <see cref="P:Avalonia.Controls.DataGrid.CanUserSortColumns" /> property value. /// </returns> public bool CanUserSort { get { if (CanUserSortInternal.HasValue) { return CanUserSortInternal.Value; } else if (OwningGrid != null) { string propertyPath = GetSortPropertyName(); Type propertyType = OwningGrid.DataConnection.DataType.GetNestedPropertyType(propertyPath); // if the type is nullable, then we will compare the non-nullable type if (TypeHelper.IsNullableType(propertyType)) { propertyType = TypeHelper.GetNonNullableType(propertyType); } // return whether or not the property type can be compared return (typeof(IComparable).IsAssignableFrom(propertyType)) ? true : false; } else { return DataGrid.DATAGRID_defaultCanUserSortColumns; } } set { CanUserSortInternal = value; } } /// <summary> /// Gets or sets the display position of the column relative to the other columns in the <see cref="T:Avalonia.Controls.DataGrid" />. /// </summary> /// <returns> /// The zero-based position of the column as it is displayed in the associated <see cref="T:Avalonia.Controls.DataGrid" />. The default is the index of the corresponding <see cref="P:System.Collections.ObjectModel.Collection`1.Item(System.Int32)" /> in the <see cref="P:Avalonia.Controls.DataGrid.Columns" /> collection. /// </returns> /// <exception cref="T:System.ArgumentOutOfRangeException"> /// When setting this property, the specified value is less than -1 or equal to <see cref="F:System.Int32.MaxValue" />. /// /// -or- /// /// When setting this property on a column in a <see cref="T:Avalonia.Controls.DataGrid" />, the specified value is less than zero or greater than or equal to the number of columns in the <see cref="T:Avalonia.Controls.DataGrid" />. /// </exception> /// <exception cref="T:System.InvalidOperationException"> /// When setting this property, the <see cref="T:Avalonia.Controls.DataGrid" /> is already making <see cref="P:Avalonia.Controls.DataGridColumn.DisplayIndex" /> adjustments. For example, this exception is thrown when you attempt to set <see cref="P:Avalonia.Controls.DataGridColumn.DisplayIndex" /> in a <see cref="E:Avalonia.Controls.DataGrid.ColumnDisplayIndexChanged" /> event handler. /// /// -or- /// /// When setting this property, the specified value would result in a frozen column being displayed in the range of unfrozen columns, or an unfrozen column being displayed in the range of frozen columns. /// </exception> public int DisplayIndex { get { if (OwningGrid != null && OwningGrid.ColumnsInternal.RowGroupSpacerColumn.IsRepresented) { return _displayIndexWithFiller - 1; } else { return _displayIndexWithFiller; } } set { if (value == Int32.MaxValue) { throw DataGridError.DataGrid.ValueMustBeLessThan(nameof(value), nameof(DisplayIndex), Int32.MaxValue); } if (OwningGrid != null) { if (OwningGrid.ColumnsInternal.RowGroupSpacerColumn.IsRepresented) { value++; } if (_displayIndexWithFiller != value) { if (value < 0 || value >= OwningGrid.ColumnsItemsInternal.Count) { throw DataGridError.DataGrid.ValueMustBeBetween(nameof(value), nameof(DisplayIndex), 0, true, OwningGrid.Columns.Count, false); } // Will throw an error if a visible frozen column is placed inside a non-frozen area or vice-versa. OwningGrid.OnColumnDisplayIndexChanging(this, value); _displayIndexWithFiller = value; try { OwningGrid.InDisplayIndexAdjustments = true; OwningGrid.OnColumnDisplayIndexChanged(this); OwningGrid.OnColumnDisplayIndexChanged_PostNotification(); } finally { OwningGrid.InDisplayIndexAdjustments = false; } } } else { if (value < -1) { throw DataGridError.DataGrid.ValueMustBeGreaterThanOrEqualTo(nameof(value), nameof(DisplayIndex), -1); } _displayIndexWithFiller = value; } } } public Classes CellStyleClasses { get => _cellStyleClasses; set { if(_cellStyleClasses != value) { _cellStyleClasses.Replace(value); } } } /// <summary> /// Backing field for Header property /// </summary> public static readonly DirectProperty<DataGridColumn, object> HeaderProperty = AvaloniaProperty.RegisterDirect<DataGridColumn, object>( nameof(Header), o => o.Header, (o, v) => o.Header = v); /// <summary> /// Gets or sets the <see cref="DataGridColumnHeader"/> content /// </summary> public object Header { get { return _header; } set { SetAndRaise(HeaderProperty, ref _header, value); } } /// <summary> /// Backing field for Header property /// </summary> public static readonly DirectProperty<DataGridColumn, IDataTemplate> HeaderTemplateProperty = AvaloniaProperty.RegisterDirect<DataGridColumn, IDataTemplate>( nameof(HeaderTemplate), o => o.HeaderTemplate, (o, v) => o.HeaderTemplate = v); /// <summary> /// Gets or sets an <see cref="IDataTemplate"/> for the <see cref="Header"/> /// </summary> public IDataTemplate HeaderTemplate { get { return _headerTemplate; } set { SetAndRaise(HeaderTemplateProperty, ref _headerTemplate, value); } } public bool IsAutoGenerated { get; internal set; } public bool IsFrozen { get; internal set; } public bool IsReadOnly { get { if (OwningGrid == null) { return _isReadOnly ?? DATAGRIDCOLUMN_defaultIsReadOnly; } if (_isReadOnly != null) { return _isReadOnly.Value || OwningGrid.IsReadOnly; } return OwningGrid.GetColumnReadOnlyState(this, DATAGRIDCOLUMN_defaultIsReadOnly); } set { if (value != _isReadOnly) { OwningGrid?.OnColumnReadOnlyStateChanging(this, value); _isReadOnly = value; } } } public double MaxWidth { get { return _maxWidth ?? double.PositiveInfinity; } set { if (value < 0) { throw DataGridError.DataGrid.ValueMustBeGreaterThanOrEqualTo("value", "MaxWidth", 0); } if (value < ActualMinWidth) { throw DataGridError.DataGrid.ValueMustBeGreaterThanOrEqualTo("value", "MaxWidth", "MinWidth"); } if (!_maxWidth.HasValue || _maxWidth.Value != value) { double oldValue = ActualMaxWidth; _maxWidth = value; if (OwningGrid != null && OwningGrid.ColumnsInternal != null) { OwningGrid.OnColumnMaxWidthChanged(this, oldValue); } } } } public double MinWidth { get { return _minWidth ?? 0; } set { if (double.IsNaN(value)) { throw DataGridError.DataGrid.ValueCannotBeSetToNAN("MinWidth"); } if (value < 0) { throw DataGridError.DataGrid.ValueMustBeGreaterThanOrEqualTo("value", "MinWidth", 0); } if (double.IsPositiveInfinity(value)) { throw DataGridError.DataGrid.ValueCannotBeSetToInfinity("MinWidth"); } if (value > ActualMaxWidth) { throw DataGridError.DataGrid.ValueMustBeLessThanOrEqualTo("value", "MinWidth", "MaxWidth"); } if (!_minWidth.HasValue || _minWidth.Value != value) { double oldValue = ActualMinWidth; _minWidth = value; if (OwningGrid != null && OwningGrid.ColumnsInternal != null) { OwningGrid.OnColumnMinWidthChanged(this, oldValue); } } } } public DataGridLength Width { get { return _width ?? OwningGrid?.ColumnWidth ?? // We don't have a good choice here because we don't want to make this property nullable, see DevDiv Bugs 196581 DataGridLength.Auto; } set { if (!_width.HasValue || _width.Value != value) { if (!_settingWidthInternally) { InheritsWidth = false; } if (OwningGrid != null) { DataGridLength width = CoerceWidth(value); if (width.IsStar != Width.IsStar) { // If a column has changed either from or to a star value, we want to recalculate all // star column widths. They are recalculated during Measure based off what the value we set here. SetWidthInternalNoCallback(width); IsInitialDesiredWidthDetermined = false; OwningGrid.OnColumnWidthChanged(this); } else { // If a column width's value is simply changing, we resize it (to the right only). Resize(width.Value, width.UnitType, width.DesiredValue, width.DisplayValue, false); } } else { SetWidthInternalNoCallback(value); } } } } /// <summary> /// The binding that will be used to get or set cell content for the clipboard. /// </summary> public virtual IBinding ClipboardContentBinding { get { return _clipboardContentBinding; } set { _clipboardContentBinding = value; } } /// <summary> /// Gets the value of a cell according to the the specified binding. /// </summary> /// <param name="item">The item associated with a cell.</param> /// <param name="binding">The binding to get the value of.</param> /// <returns>The resultant cell value.</returns> internal object GetCellValue(object item, IBinding binding) { Debug.Assert(OwningGrid != null); object content = null; if (binding != null) { OwningGrid.ClipboardContentControl.DataContext = item; var sub = OwningGrid.ClipboardContentControl.Bind(ContentControl.ContentProperty, binding); content = OwningGrid.ClipboardContentControl.GetValue(ContentControl.ContentProperty); sub.Dispose(); } return content; } public IControl GetCellContent(DataGridRow dataGridRow) { Contract.Requires<ArgumentNullException>(dataGridRow != null); if (OwningGrid == null) { throw DataGridError.DataGrid.NoOwningGrid(GetType()); } if (dataGridRow.OwningGrid == OwningGrid) { DataGridCell dataGridCell = dataGridRow.Cells[Index]; if (dataGridCell != null) { return dataGridCell.Content as IControl; } } return null; } public IControl GetCellContent(object dataItem) { Contract.Requires<ArgumentNullException>(dataItem != null); if (OwningGrid == null) { throw DataGridError.DataGrid.NoOwningGrid(GetType()); } DataGridRow dataGridRow = OwningGrid.GetRowFromItem(dataItem); if (dataGridRow == null) { return null; } return GetCellContent(dataGridRow); } /// <summary> /// Returns the column which contains the given element /// </summary> /// <param name="element">element contained in a column</param> /// <returns>Column that contains the element, or null if not found /// </returns> public static DataGridColumn GetColumnContainingElement(IControl element) { // Walk up the tree to find the DataGridCell or DataGridColumnHeader that contains the element IVisual parent = element; while (parent != null) { if (parent is DataGridCell cell) { return cell.OwningColumn; } if (parent is DataGridColumnHeader columnHeader) { return columnHeader.OwningColumn; } parent = parent.GetVisualParent(); } return null; } /// <summary> /// Clears the current sort direction /// </summary> public void ClearSort() { //InvokeProcessSort is already validating if sorting is possible _headerCell?.InvokeProcessSort(Input.KeyModifiers.Control); } /// <summary> /// Switches the current state of sort direction /// </summary> public void Sort() { //InvokeProcessSort is already validating if sorting is possible _headerCell?.InvokeProcessSort(Input.KeyModifiers.None); } /// <summary> /// Changes the sort direction of this column /// </summary> /// <param name="direction">New sort direction</param> public void Sort(ListSortDirection direction) { //InvokeProcessSort is already validating if sorting is possible _headerCell?.InvokeProcessSort(Input.KeyModifiers.None, direction); } /// <summary> /// When overridden in a derived class, causes the column cell being edited to revert to the unedited value. /// </summary> /// <param name="editingElement"> /// The element that the column displays for a cell in editing mode. /// </param> /// <param name="uneditedValue"> /// The previous, unedited value in the cell being edited. /// </param> protected virtual void CancelCellEdit(IControl editingElement, object uneditedValue) { } /// <summary> /// When overridden in a derived class, gets an editing element that is bound to the column's <see cref="P:Avalonia.Controls.DataGridBoundColumn.Binding" /> property value. /// </summary> /// <param name="cell"> /// The cell that will contain the generated element. /// </param> /// <param name="dataItem"> /// The data item represented by the row that contains the intended cell. /// </param> /// <param name="binding">When the method returns, contains the applied binding.</param> /// <returns> /// A new editing element that is bound to the column's <see cref="P:Avalonia.Controls.DataGridBoundColumn.Binding" /> property value. /// </returns> protected abstract IControl GenerateEditingElement(DataGridCell cell, object dataItem, out ICellEditBinding binding); /// <summary> /// When overridden in a derived class, gets a read-only element that is bound to the column's /// <see cref="P:Avalonia.Controls.DataGridBoundColumn.Binding" /> property value. /// </summary> /// <param name="cell"> /// The cell that will contain the generated element. /// </param> /// <param name="dataItem"> /// The data item represented by the row that contains the intended cell. /// </param> /// <returns> /// A new, read-only element that is bound to the column's <see cref="P:Avalonia.Controls.DataGridBoundColumn.Binding" /> property value. /// </returns> protected abstract IControl GenerateElement(DataGridCell cell, object dataItem); /// <summary> /// Called by a specific column type when one of its properties changed, /// and its current cells need to be updated. /// </summary> /// <param name="propertyName">Indicates which property changed and caused this call</param> protected void NotifyPropertyChanged(string propertyName) { OwningGrid?.RefreshColumnElements(this, propertyName); } /// <summary> /// When overridden in a derived class, called when a cell in the column enters editing mode. /// </summary> /// <param name="editingElement"> /// The element that the column displays for a cell in editing mode. /// </param> /// <param name="editingEventArgs"> /// Information about the user gesture that is causing a cell to enter editing mode. /// </param> /// <returns> /// The unedited value. /// </returns> protected abstract object PrepareCellForEdit(IControl editingElement, RoutedEventArgs editingEventArgs); /// <summary> /// Called by the DataGrid control when a column asked for its /// elements to be refreshed, typically because one of its properties changed. /// </summary> /// <param name="element">Indicates the element that needs to be refreshed</param> /// <param name="propertyName">Indicates which property changed and caused this call</param> protected internal virtual void RefreshCellContent(IControl element, string propertyName) { } internal void CancelCellEditInternal(IControl editingElement, object uneditedValue) { CancelCellEdit(editingElement, uneditedValue); } /// <summary> /// Coerces a DataGridLength to a valid value. If any value components are double.NaN, this method /// coerces them to a proper initial value. For star columns, the desired width is calculated based /// on the rest of the star columns. For pixel widths, the desired value is based on the pixel value. /// For auto widths, the desired value is initialized as the column's minimum width. /// </summary> /// <param name="width">The DataGridLength to coerce.</param> /// <returns>The resultant (coerced) DataGridLength.</returns> internal DataGridLength CoerceWidth(DataGridLength width) { double desiredValue = width.DesiredValue; if (double.IsNaN(desiredValue)) { if (width.IsStar && OwningGrid != null && OwningGrid.ColumnsInternal != null) { double totalStarValues = 0; double totalStarDesiredValues = 0; double totalNonStarDisplayWidths = 0; foreach (DataGridColumn column in OwningGrid.ColumnsInternal.GetDisplayedColumns(c => c.IsVisible && c != this && !double.IsNaN(c.Width.DesiredValue))) { if (column.Width.IsStar) { totalStarValues += column.Width.Value; totalStarDesiredValues += column.Width.DesiredValue; } else { totalNonStarDisplayWidths += column.ActualWidth; } } if (totalStarValues == 0) { // Compute the new star column's desired value based on the available space if there are no other visible star columns desiredValue = Math.Max(ActualMinWidth, OwningGrid.CellsWidth - totalNonStarDisplayWidths); } else { // Otherwise, compute its desired value based on those of other visible star columns desiredValue = totalStarDesiredValues * width.Value / totalStarValues; } } else if (width.IsAbsolute) { desiredValue = width.Value; } else { desiredValue = ActualMinWidth; } } double displayValue = width.DisplayValue; if (double.IsNaN(displayValue)) { displayValue = desiredValue; } displayValue = Math.Max(ActualMinWidth, Math.Min(ActualMaxWidth, displayValue)); return new DataGridLength(width.Value, width.UnitType, desiredValue, displayValue); } /// <summary> /// If the DataGrid is using layout rounding, the pixel snapping will force all widths to /// whole numbers. Since the column widths aren't visual elements, they don't go through the normal /// rounding process, so we need to do it ourselves. If we don't, then we'll end up with some /// pixel gaps and/or overlaps between columns. /// </summary> /// <param name="leftEdge"></param> internal void ComputeLayoutRoundedWidth(double leftEdge) { if (OwningGrid != null && OwningGrid.UseLayoutRounding) { var scale = LayoutHelper.GetLayoutScale(HeaderCell); var roundSize = LayoutHelper.RoundLayoutSize(new Size(leftEdge + ActualWidth, 1), scale, scale); LayoutRoundedWidth = roundSize.Width - leftEdge; } else { LayoutRoundedWidth = ActualWidth; } } //TODO Styles internal virtual DataGridColumnHeader CreateHeader() { var result = new DataGridColumnHeader { OwningColumn = this }; result[!ContentControl.ContentProperty] = this[!HeaderProperty]; result[!ContentControl.ContentTemplateProperty] = this[!HeaderTemplateProperty]; //result.EnsureStyle(null); return result; } /// <summary> /// Ensures that this column's width has been coerced to a valid value. /// </summary> internal void EnsureWidth() { SetWidthInternalNoCallback(CoerceWidth(Width)); } internal IControl GenerateElementInternal(DataGridCell cell, object dataItem) { return GenerateElement(cell, dataItem); } internal object PrepareCellForEditInternal(IControl editingElement, RoutedEventArgs editingEventArgs) { var result = PrepareCellForEdit(editingElement, editingEventArgs); editingElement.Focus(); return result; } /// <summary> /// Attempts to resize the column's width to the desired DisplayValue, but limits the final size /// to the column's minimum and maximum values. If star sizing is being used, then the column /// can only decrease in size by the amount that the columns after it can increase in size. /// Likewise, the column can only increase in size if other columns can spare the width. /// </summary> /// <param name="value">The new Value.</param> /// <param name="unitType">The new UnitType.</param> /// <param name="desiredValue">The new DesiredValue.</param> /// <param name="displayValue">The new DisplayValue.</param> /// <param name="userInitiated">Whether or not this resize was initiated by a user action.</param> internal void Resize(double value, DataGridLengthUnitType unitType, double desiredValue, double displayValue, bool userInitiated) { double newValue = value; double newDesiredValue = desiredValue; double newDisplayValue = Math.Max(ActualMinWidth, Math.Min(ActualMaxWidth, displayValue)); DataGridLengthUnitType newUnitType = unitType; int starColumnsCount = 0; double totalDisplayWidth = 0; foreach (DataGridColumn column in OwningGrid.ColumnsInternal.GetVisibleColumns()) { column.EnsureWidth(); totalDisplayWidth += column.ActualWidth; starColumnsCount += (column != this && column.Width.IsStar) ? 1 : 0; } bool hasInfiniteAvailableWidth = !OwningGrid.RowsPresenterAvailableSize.HasValue || double.IsPositiveInfinity(OwningGrid.RowsPresenterAvailableSize.Value.Width); // If we're using star sizing, we can only resize the column as much as the columns to the // right will allow (i.e. until they hit their max or min widths). if (!hasInfiniteAvailableWidth && (starColumnsCount > 0 || (unitType == DataGridLengthUnitType.Star && Width.IsStar && userInitiated))) { double limitedDisplayValue = Width.DisplayValue; double availableIncrease = Math.Max(0, OwningGrid.CellsWidth - totalDisplayWidth); double desiredChange = newDisplayValue - Width.DisplayValue; if (desiredChange > availableIncrease) { // The desired change is greater than the amount of available space, // so we need to decrease the widths of columns to the right to make room. desiredChange -= availableIncrease; double actualChange = desiredChange + OwningGrid.DecreaseColumnWidths(DisplayIndex + 1, -desiredChange, userInitiated); limitedDisplayValue += availableIncrease + actualChange; } else if (desiredChange > 0) { // The desired change is positive but less than the amount of available space, // so there's no need to decrease the widths of columns to the right. limitedDisplayValue += desiredChange; } else { // The desired change is negative, so we need to increase the widths of columns to the right. limitedDisplayValue += desiredChange + OwningGrid.IncreaseColumnWidths(DisplayIndex + 1, -desiredChange, userInitiated); } if (ActualCanUserResize || (Width.IsStar && !userInitiated)) { newDisplayValue = limitedDisplayValue; } } if (userInitiated) { newDesiredValue = newDisplayValue; if (!Width.IsStar) { InheritsWidth = false; newValue = newDisplayValue; newUnitType = DataGridLengthUnitType.Pixel; } else if (starColumnsCount > 0 && !hasInfiniteAvailableWidth) { // Recalculate star weight of this column based on the new desired value InheritsWidth = false; newValue = (Width.Value * newDisplayValue) / ActualWidth; } } DataGridLength oldWidth = Width; SetWidthInternalNoCallback(new DataGridLength(Math.Min(double.MaxValue, newValue), newUnitType, newDesiredValue, newDisplayValue)); if (Width != oldWidth) { OwningGrid.OnColumnWidthChanged(this); } } /// <summary> /// Sets the column's Width to a new DataGridLength with a different DesiredValue. /// </summary> /// <param name="desiredValue">The new DesiredValue.</param> internal void SetWidthDesiredValue(double desiredValue) { SetWidthInternalNoCallback(new DataGridLength(Width.Value, Width.UnitType, desiredValue, Width.DisplayValue)); } /// <summary> /// Sets the column's Width to a new DataGridLength with a different DisplayValue. /// </summary> /// <param name="displayValue">The new DisplayValue.</param> internal void SetWidthDisplayValue(double displayValue) { SetWidthInternalNoCallback(new DataGridLength(Width.Value, Width.UnitType, Width.DesiredValue, displayValue)); } /// <summary> /// Set the column's Width without breaking inheritance. /// </summary> /// <param name="width">The new Width.</param> internal void SetWidthInternal(DataGridLength width) { bool originalValue = _settingWidthInternally; _settingWidthInternally = true; try { Width = width; } finally { _settingWidthInternally = originalValue; } } /// <summary> /// Sets the column's Width directly, without any callback effects. /// </summary> /// <param name="width">The new Width.</param> internal void SetWidthInternalNoCallback(DataGridLength width) { _width = width; } /// <summary> /// Set the column's star value. Whenever the star value changes, width inheritance is broken. /// </summary> /// <param name="value">The new star value.</param> internal void SetWidthStarValue(double value) { InheritsWidth = false; SetWidthInternalNoCallback(new DataGridLength(value, Width.UnitType, Width.DesiredValue, Width.DisplayValue)); } //TODO Binding internal IControl GenerateEditingElementInternal(DataGridCell cell, object dataItem) { if (_editingElement == null) { _editingElement = GenerateEditingElement(cell, dataItem, out _editBinding); } return _editingElement; } /// <summary> /// Clears the cached editing element. /// </summary> //TODO Binding internal void RemoveEditingElement() { _editingElement = null; } /// <summary> /// Holds the name of the member to use for sorting, if not using the default. /// </summary> public string SortMemberPath { get; set; } /// <summary> /// Holds a Comparer to use for sorting, if not using the default. /// </summary> public System.Collections.IComparer CustomSortComparer { get; set; } /// <summary> /// We get the sort description from the data source. We don't worry whether we can modify sort -- perhaps the sort description /// describes an unchangeable sort that exists on the data. /// </summary> internal DataGridSortDescription GetSortDescription() { if (OwningGrid != null && OwningGrid.DataConnection != null && OwningGrid.DataConnection.SortDescriptions != null) { if(CustomSortComparer != null) { return OwningGrid.DataConnection.SortDescriptions .OfType<DataGridComparerSortDesctiption>() .FirstOrDefault(s => s.SourceComparer == CustomSortComparer); } string propertyName = GetSortPropertyName(); return OwningGrid.DataConnection.SortDescriptions.FirstOrDefault(s => s.HasPropertyPath && s.PropertyPath == propertyName); } return null; } internal string GetSortPropertyName() { string result = SortMemberPath; if (String.IsNullOrEmpty(result)) { if (this is DataGridBoundColumn boundColumn) { if (boundColumn.Binding is Binding binding) { result = binding.Path; } else if (boundColumn.Binding is CompiledBindingExtension compiledBinding) { result = compiledBinding.Path.ToString(); } } } return result; } } }
#region Using using System; using System.Collections.Generic; using System.Reflection; using FlatRedBall.Graphics; using FlatRedBall.ManagedSpriteGroups; using FlatRedBall.Math; using FlatRedBall.Math.Geometry; using FlatRedBall.Gui; using FlatRedBall.Utilities; using FlatRedBall.IO; using System.Linq; #if !SILVERLIGHT #endif #if WINDOWS_PHONE using System.IO.IsolatedStorage; using Microsoft.Phone.Shell; #endif #endregion namespace FlatRedBall.Screens { public static partial class ScreenManager { #region Fields static bool? mWasFixedTimeStep = null; static double? mLastTimeFactor = null; private static Screen mCurrentScreen; private static bool mSuppressStatePush = false; private static bool mWarnIfNotEmptyBetweenScreens = true; private static int mNumberOfFramesSinceLastScreenLoad = 0; private static Layer mNextScreenLayer; // The ScreenManager can be told to ignore certain objects which // we recognize will persist from screen to screen. This should // NOT be used as a solution to get around the ScreenManager's check. private static PositionedObjectList<Camera> mPersistentCameras = new PositionedObjectList<Camera>(); private static PositionedObjectList<SpriteFrame> mPersistentSpriteFrames = new PositionedObjectList<SpriteFrame>(); private static PositionedObjectList<Text> mPersistentTexts = new PositionedObjectList<Text>(); private static List<IDrawableBatch> mPersistentDrawableBatches = new List<IDrawableBatch>(); private static Action<Screen> nextCallback; public static bool IsInEditMode { get; private set; } public static bool IsNextScreenInEditMode { get; set; } #endregion #region Properties public static Assembly MainAssembly { get; private set; } public static Screen CurrentScreen { get { return mCurrentScreen; } } public static Layer NextScreenLayer { get { return mNextScreenLayer; } } public static PositionedObjectList<Camera> PersistentCameras { get { return mPersistentCameras; } } public static PositionedObjectList<SpriteFrame> PersistentSpriteFrames { get { return mPersistentSpriteFrames; } } public static PositionedObjectList<Sprite> PersistentSprites { get; private set; } = new PositionedObjectList<Sprite>(); public static PositionedObjectList<PositionedObject> PersistentPositionedObjects { get; private set; } = new PositionedObjectList<PositionedObject>(); /// <summary> /// A list of IDrawableBatch instances which persist inbetween screens. Items in this list /// do not result in exceptions if they are not cleaned up inbetween screens. /// </summary> public static List<IDrawableBatch> PersistentDrawableBatches { get { return mPersistentDrawableBatches; } } public static PositionedObjectList<Text> PersistentTexts { get { return mPersistentTexts; } } public static List<IWindow> PersistentWindows { get; private set; } = new List<IWindow>(); public static PositionedObjectList<AxisAlignedRectangle> PersistentAxisAlignedRectangles { get; private set; } = new PositionedObjectList<AxisAlignedRectangle>(); public static PositionedObjectList<Line> PersistentLines { get; private set; } = new PositionedObjectList<Line>(); public static bool WarnIfNotEmptyBetweenScreens { get { return mWarnIfNotEmptyBetweenScreens; } set { mWarnIfNotEmptyBetweenScreens = value; } } public static bool ShouldActivateScreen { get; set; } #endregion static Exception GlueViewLoadException; public static Action<string> RehydrateAction { get; set; } /// <summary> /// Event to run before a screen's CustomInitialize is run, allowing systems (like the level editor) to /// run code before the user's custom code. /// </summary> public static event Action<Screen> BeforeScreenCustomInitialize; public static event Action<Screen> ScreenLoaded; #region Methods #region Public Methods #region XML Docs /// <summary> /// Calls activity on the current screen and checks to see if screen /// activity is finished. If activity is finished, the current Screen's /// NextScreen is loaded. /// </summary> #endregion public static void Activity() { /////////////////Early Out/////////////////////////// if (mCurrentScreen == null) return; //////////////End Early Out////////////////////////// if(!IsInEditMode) { mCurrentScreen.Activity(mCurrentScreen.ActivityCallCount == 0); mCurrentScreen.ActivityCallCount++; } else { if(!mCurrentScreen.IsActivityFinished) { mCurrentScreen.ActivityEditMode(); } if(GlueViewLoadException != null) { FlatRedBall.Debugging.Debugger.Write(GlueViewLoadException.ToString()); } } if (mCurrentScreen.ActivityCallCount == 1 && mWasFixedTimeStep.HasValue) { FlatRedBallServices.Game.IsFixedTimeStep = mWasFixedTimeStep.Value; TimeManager.TimeFactor = mLastTimeFactor.Value; } if (mCurrentScreen.IsActivityFinished) { string type = mCurrentScreen.NextScreen; #if DEBUG if(string.IsNullOrWhiteSpace(type)) { var message = $"The current screen ({mCurrentScreen.GetType()}) just ended but didn't specify a NextScreen. " + $"You can specify the next screen by calling MoveToScreen or by manually setting the NextScreen"; } #endif var isFullyQualified = type.Contains("."); if(!isFullyQualified) { // try to prepend the current type to make the next screen fully qualified: var prepend = mCurrentScreen.GetType().Namespace; type = prepend + "." + type; } Screen asyncLoadedScreen = mCurrentScreen.mNextScreenToLoadAsync; // October 10, 2020 // We used to set the time and values before calling // destroy. Now we want to call Destroy on the screen first // in case Destroy happens to change the time factor: mCurrentScreen.Destroy(); mWasFixedTimeStep = FlatRedBallServices.Game.IsFixedTimeStep; mLastTimeFactor = TimeManager.TimeFactor; FlatRedBallServices.Game.IsFixedTimeStep = false; TimeManager.TimeFactor = 0; GuiManager.Cursor.IgnoreInputThisFrame = true; if(Input.InputManager.InputReceiver != null) { Input.InputManager.InputReceiver = null; } Instructions.InstructionManager.ObjectsIgnoringPausing.Clear(); // Added Nov 15 2020 - do we want this here? If not we may get // silent accumulation. Do we warn or just destroy? Instructions.InstructionManager.Instructions.Clear(); FlatRedBallServices.singleThreadSynchronizationContext.Clear(); //mCurrentScreen.Destroy(); // check to see if there is any leftover data CheckAndWarnIfNotEmpty(mCurrentScreen); // Let's perform a GC here. GC.Collect(); // Not sure why this started to freeze on Android in the automated test project // on April 22, 2015. I'm commenting it out because I don't think we need to wait // for finalizers, and we can just continue on. Maybe try to bring the code back // on Android in the future too. // March 16, 2017 - Desktop GL too, not sure why... #if !ANDROID && !DESKTOP_GL GC.WaitForPendingFinalizers(); #endif if (asyncLoadedScreen == null) { // Loads the Screen, suspends input for one frame, and // calls Activity on the Screen. // The Activity call is required for objects like SpriteGrids // which need to be managed internally. // No need to assign mCurrentScreen - this is done by the 4th argument "true" //mCurrentScreen = LoadScreen(type); } else { mCurrentScreen = asyncLoadedScreen; nextCallback?.Invoke(mCurrentScreen); nextCallback = null; mCurrentScreen.AddToManagers(); } mNumberOfFramesSinceLastScreenLoad = 0; } else { mNumberOfFramesSinceLastScreenLoad++; } } public static void Start<T>() where T : Screen, new() { var type = typeof(T); Start(type); } /// <summary> /// Ends the current screen and moves to the next screen. /// </summary> /// <param name="screenType">The screen to move to.</param> /// <param name="screenCreatedCallback">An event to call after the screen has been created.</param> /// <remarks> /// This method provides an alternative to the screen managing its own flow through its MoveMoveToScreen method. /// This method can be used by objects outside of screens managing flow. /// </remarks> public static void MoveToScreen(Type screenType, Action<Screen> screenCreatedCallback = null) { if(mCurrentScreen != null) { mCurrentScreen.MoveToScreen(screenType); nextCallback = screenCreatedCallback; } else { throw new Exception("There is no current screen to move from. Call Start to create the first screen."); } } public static void MoveToScreen(string screenType, Action<Screen> screenCreatedCallback = null) { if (mCurrentScreen != null) { mCurrentScreen.MoveToScreen(screenType); nextCallback = screenCreatedCallback; } else { throw new Exception("There is no current screen to move from. Call Start to create the first screen."); } } /// <summary> /// Loads a screen. Should only be called once during initialization. /// </summary> /// <param name="screenToStartWithType">Qualified name of the class to load.</param> public static void Start(Type screenToStartWithType) { #if WINDOWS_8 || UWP MainAssembly = screenToStartWithType.GetTypeInfo().Assembly; #else MainAssembly = screenToStartWithType.Assembly; #endif string screenToStartWith = screenToStartWithType.FullName; if (mCurrentScreen != null) { throw new InvalidOperationException("You can't call Start if there is already a Screen. Did you call Start twice?"); } else { StateManager.Current.Initialize(); if (ShouldActivateScreen && RehydrateAction != null) { RehydrateAction(screenToStartWith); } else { mCurrentScreen = LoadScreen(screenToStartWith); ShouldActivateScreen = false; } } } public static new string ToString() { if (mCurrentScreen != null) return mCurrentScreen.ToString(); else return "No Current Screen"; } #endregion #region Internal Methods internal static void Draw() { if(mCurrentScreen != null) { mCurrentScreen.HasDrawBeenCalled = true; } } internal static void UpdateDependencies() { mCurrentScreen?.UpdateDependencies(TimeManager.CurrentTime); } #endregion #region Private Methods private static Screen LoadScreen(string screen) { mNextScreenLayer = null; Screen newScreen = null; Type typeOfScreen = MainAssembly.GetType(screen); if (typeOfScreen == null) { throw new System.ArgumentException("There is no " + screen + " class defined in your project or linked assemblies."); } IsInEditMode = IsNextScreenInEditMode; IsNextScreenInEditMode = false; if (screen != null && screen != "") { if(typeOfScreen.IsAbstract == false) { newScreen = (Screen)Activator.CreateInstance(typeOfScreen, new object[0]); } } if (newScreen != null) { FlatRedBall.Input.InputManager.CurrentFrameInputSuspended = true; // We do this so that new Screens are the CurrentScreen in Activity. // This is useful in custom logic. mCurrentScreen = newScreen; if(BeforeScreenCustomInitialize != null) { mCurrentScreen.BeforeCustomInitialize += () => BeforeScreenCustomInitialize(mCurrentScreen); } if(IsInEditMode) { GlueViewLoadException = null; try { // in edit mode, we tolerate crashes on initialize since this is common newScreen.Initialize(true); TimeManager.SetNextFrameTimeTo0 = true; newScreen.ApplyRestartVariables(); // stop everything: foreach (var item in SpriteManager.ManagedPositionedObjects) { item.Velocity = Microsoft.Xna.Framework.Vector3.Zero; item.Acceleration = Microsoft.Xna.Framework.Vector3.Zero; } foreach(var shape in ShapeManager.AutomaticallyUpdatedShapes) { shape.Velocity = Microsoft.Xna.Framework.Vector3.Zero; shape.Acceleration = Microsoft.Xna.Framework.Vector3.Zero; } } catch(Exception e) { // I guess do nothing? GlueViewLoadException = e; } } else { newScreen.Initialize(true); TimeManager.SetNextFrameTimeTo0 = true; newScreen.ApplyRestartVariables(); } mSuppressStatePush = false; nextCallback?.Invoke(newScreen); nextCallback = null; // Dec 28, 2020 // I thought we called // Activity immediately // when a new Screen was // created/added. If we don't // then a single frame will pass // without activity, and objects may // not be positioned correclty. if (!IsInEditMode) { mCurrentScreen.Activity(mCurrentScreen.ActivityCallCount == 0); mCurrentScreen.ActivityCallCount++; } // We want to set time factor back to non-zero if in edit mode so objects can move and update if (IsInEditMode || ( mCurrentScreen.ActivityCallCount == 1 && mWasFixedTimeStep.HasValue)) { FlatRedBallServices.Game.IsFixedTimeStep = mWasFixedTimeStep.Value; TimeManager.TimeFactor = mLastTimeFactor.Value; } ScreenLoaded?.Invoke(mCurrentScreen); if (IsInEditMode) { // stop everything: foreach (var item in SpriteManager.ManagedPositionedObjects) { // this prevents flickering due to the upate dependencies call // having been run earlier in the frame. item.ForceUpdateDependenciesDeep(); } } } return newScreen; } public static void CheckAndWarnIfNotEmpty(Screen screen = null) { if (WarnIfNotEmptyBetweenScreens) { List<string> messages = new List<string>(); // the user wants to make sure that the Screens have cleaned up everything // after being destroyed. Check the data to make sure it's all empty. // Currently we're not checking the GuiManager - do we want to? #region Make sure there's only 1 non-persistent Camera left if (SpriteManager.Cameras.Count > 1) { int count = SpriteManager.Cameras.Count; foreach (Camera camera in mPersistentCameras) { if (SpriteManager.Cameras.Contains(camera)) { count--; } } if (count > 1) { messages.Add("There are " + count + " Cameras in the SpriteManager (excluding ignored Cameras). There should only be 1. See \"FlatRedBall.SpriteManager.Cameras\""); } } #endregion #region Make sure that the Camera doesn't have any extra layers if (SpriteManager.Camera.Layers.Count > 1) { messages.Add("There are " + SpriteManager.Camera.Layers.Count + " Layers on the default Camera. There should only be 1. See \"FlatRedBall.SpriteManager.Camera.Layers\""); } #endregion #region Automatically updated Sprites if (SpriteManager.AutomaticallyUpdatedSprites.Count != 0) { int spriteCount = SpriteManager.AutomaticallyUpdatedSprites.Count; foreach (var spriteFrame in mPersistentSpriteFrames) { foreach (Sprite sprite in SpriteManager.AutomaticallyUpdatedSprites) { if (spriteFrame.IsSpriteComponentOfThis(sprite)) { spriteCount--; } } } foreach(var sprite in PersistentSprites) { if(sprite.ListsBelongingTo.Contains(SpriteManager.mAutomaticallyUpdatedSprites)) { spriteCount--; } } if (spriteCount != 0) { messages.Add("There are " + spriteCount + " AutomaticallyUpdatedSprites in the SpriteManager. See \"FlatRedBall.SpriteManager.AutomaticallyUpdatedSprites\""); } } #endregion #region Manually updated Sprites if (SpriteManager.ManuallyUpdatedSpriteCount != 0) { messages.Add("There are " + SpriteManager.ManuallyUpdatedSpriteCount + " ManuallyUpdatedSprites in the SpriteManager. See \"SpriteManager.ManuallyUpdatedSpriteCount\""); } #endregion #region Ordered by distance Sprites if (SpriteManager.OrderedSprites.Count != 0) { int spriteCount = SpriteManager.OrderedSprites.Count; foreach (SpriteFrame spriteFrame in mPersistentSpriteFrames) { foreach (Sprite sprite in SpriteManager.OrderedSprites) { if (spriteFrame.IsSpriteComponentOfThis(sprite)) { spriteCount--; } } } if (spriteCount != 0) { messages.Add("There are " + spriteCount + " Ordered (Drawn) Sprites in the SpriteManager. See \"FlatRedBall.SpriteManager.OrderedSprites\""); } } #endregion #region Drawable Batches if (SpriteManager.DrawableBatches.Count != 0) { int drawableBatchCount = 0; foreach(var item in SpriteManager.DrawableBatches) { if(!PersistentDrawableBatches.Contains(item)) { drawableBatchCount++; } } if (drawableBatchCount > 0) { messages.Add("There are " + drawableBatchCount + " DrawableBatches in the SpriteManager. " + "See \"FlatRedBall.SpriteManager.DrawableBatches\""); } } #endregion #region Managed Positionedobjects if (SpriteManager.ManagedPositionedObjects.Count != 0) { var count = SpriteManager.ManagedPositionedObjects.Count; foreach(var persistentPositionedObject in PersistentPositionedObjects) { if(persistentPositionedObject.ListsBelongingTo.Contains(SpriteManager.mManagedPositionedObjects)) { count--; } } if(count > 1) { messages.Add("There are " + count + " Managed PositionedObjects in the SpriteManager. See \"FlatRedBall.SpriteManager.ManagedPositionedObjects\""); var firstPositionedObject = SpriteManager.ManagedPositionedObjects.Except(PersistentPositionedObjects).FirstOrDefault(); var type = firstPositionedObject.GetType(); if (type.FullName.Contains(".Entities.")) { string message; if(string.IsNullOrWhiteSpace(firstPositionedObject.Name)) { message = $"The first is an unnnamed entity of type {type.Name}"; } else { message = $"The first is an entity of type {type.Name} named {firstPositionedObject.Name}"; } messages.Add(message); } } } #endregion #region Layers if (SpriteManager.LayerCount != 0) { messages.Add("There are " + SpriteManager.LayerCount + " Layers in the SpriteManager. See \"FlatRedBall.SpriteManager.Layers\""); } #endregion #region TopLayer if (SpriteManager.TopLayer.Sprites.Count != 0) { var count = SpriteManager.TopLayer.Sprites.Count; foreach (var sprite in PersistentSprites) { if(sprite.ListsBelongingTo.Contains(SpriteManager.TopLayer.mSprites)) { count--; } } if (count > 0) { messages.Add("There are " + count + " Sprites in the SpriteManager's TopLayer. See \"FlatRedBall.SpriteManager.TopLayer.Sprites\""); } } #endregion #region Particles if (SpriteManager.ParticleCount != 0) { messages.Add("There are " + SpriteManager.ParticleCount + " Particle Sprites in the SpriteManager. See \"FlatRedBall.SpriteManager.AutomaticallyUpdatedSprites\""); } #endregion #region SpriteFrames if (SpriteManager.SpriteFrames.Count != 0) { int spriteFrameCount = SpriteManager.SpriteFrames.Count; foreach (SpriteFrame spriteFrame in mPersistentSpriteFrames) { if (SpriteManager.SpriteFrames.Contains(spriteFrame)) { spriteFrameCount--; } } if (spriteFrameCount != 0) { messages.Add("There are " + spriteFrameCount + " SpriteFrames in the SpriteManager. See \"FlatRedBall.SpriteManager.SpriteFrames\""); } } #endregion #region Text objects if (TextManager.AutomaticallyUpdatedTexts.Count != 0) { int textCount = TextManager.AutomaticallyUpdatedTexts.Count; foreach (Text text in mPersistentTexts) { if (TextManager.AutomaticallyUpdatedTexts.Contains(text)) { textCount--; } } if (textCount != 0) { var textError = "There are " + textCount + " automatically updated Texts in the TextManager. See \"FlatRedBall.Graphics.TextManager.AutomaticallyUpdatedTexts\""; messages.Add(textError); } } #endregion #region Managed Shapes if (ShapeManager.AutomaticallyUpdatedShapes.Count != 0) { messages.Add("There are " + ShapeManager.AutomaticallyUpdatedShapes.Count + " Automatically Updated Shapes in the ShapeManager. See \"FlatRedBall.Math.Geometry.ShapeManager.AutomaticallyUpdatedShapes\""); } #endregion #region Visible Circles if (ShapeManager.VisibleCircles.Count != 0) { messages.Add("There are " + ShapeManager.VisibleCircles.Count + " visible Circles in the ShapeManager. See \"FlatRedBall.Math.Geometry.ShapeManager.VisibleCircles\""); } #endregion #region Visible Rectangles if (ShapeManager.VisibleRectangles.Count != 0) { var rectangleCount = ShapeManager.VisibleRectangles.Count; foreach(var rectangle in PersistentAxisAlignedRectangles) { if(ShapeManager.VisibleRectangles.Contains(rectangle)) { rectangleCount--; } } if(rectangleCount != 0) { messages.Add($"There are {rectangleCount}" + " visible AxisAlignedRectangles in the VisibleRectangles. See \"FlatRedBall.Math.Geometry.ShapeManager.VisibleRectangles\""); } } #endregion #region Visible Polygons if (ShapeManager.VisiblePolygons.Count != 0) { messages.Add("There are " + ShapeManager.VisiblePolygons.Count + " visible Polygons in the ShapeManager. See \"FlatRedBall.Math.Geometry.ShapeManager.VisiblePolygons\""); } #endregion #region Visible Lines if (ShapeManager.VisibleLines.Count != 0) { var lineListCopy = ShapeManager.VisibleLines .Except(PersistentLines) .ToList(); if(lineListCopy.Count > 0) { messages.Add("There are " + lineListCopy.Count + " visible Lines in the ShapeManager. See \"FlatRedBall.Math.Geometry.ShapeManager.VisibleLines\""); } } #endregion #region IWindows if (GuiManager.Windows.Count != 0) { var remainingWindows = GuiManager.Windows.Except(PersistentWindows).ToArray(); if(remainingWindows.Length > 0) { var message = "The GuiManager has " + remainingWindows.Length + " windows.\n"; message += $"The first is of type {remainingWindows[0].GetType()} named {remainingWindows[0].Name}\n"; message += "See \"FlatRedBall.Gui.GuiManager.Windows\" or add the window to PersistentWindows if it should persist between screens"; messages.Add(message); } } #endregion if (messages.Count != 0) { string errorString = "The Screen that was just unloaded did not clean up after itself:"; if(mCurrentScreen != null) { errorString = $"The Screen that was just unloaded ({mCurrentScreen.GetType().Name}) did not clean up after itself:"; } foreach (string s in messages) errorString += "\n" + s; throw new System.Exception(errorString); } } } #endregion #endregion } }
using System; using System.IO; using System.Net; using System.Net.Sockets; using System.Text; using System.Threading.Tasks; using SuperSocket.ProtoBase; using Xunit; using Xunit.Abstractions; using Microsoft.Extensions.Hosting; using Microsoft.Extensions.DependencyInjection; using SuperSocket; using System.Linq; using System.Reflection; using SuperSocket.Channel; namespace SuperSocket.Tests { [Trait("Category", "Basic")] public class SessionTest : TestClassBase { public SessionTest(ITestOutputHelper outputHelper) : base(outputHelper) { } [Fact] public async Task TestSessionEvents() { var hostConfigurator = new RegularHostConfigurator(); var connected = false; IAppSession session = null; using (var server = CreateSocketServerBuilder<TextPackageInfo, LinePipelineFilter>(hostConfigurator) .UseSessionHandler((s) => { connected = true; session = s; return new ValueTask(); }) .BuildAsServer()) { Assert.Equal("TestServer", server.Name); Assert.True(await server.StartAsync()); OutputHelper.WriteLine("Started."); var client = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); await client.ConnectAsync(hostConfigurator.GetServerEndPoint()); OutputHelper.WriteLine("Connected."); await Task.Delay(1000); session.Closed += (s, e) => { connected = false; session.DataContext = "Hello, my love!"; return new ValueTask(); }; session.Connected += async (s, e) => { OutputHelper.WriteLine("Session.Connected event was triggered"); await Task.CompletedTask; }; var itemKey = "GirlFriend"; var itemValue = "Who?"; session[itemKey] = itemValue; Assert.Equal(SessionState.Connected, session.State); Assert.True(connected); client.Shutdown(SocketShutdown.Both); client.Close(); await Task.Delay(1000); Assert.Equal(SessionState.Closed, session.State); Assert.False(connected); Assert.Equal(itemValue, session[itemKey]); Assert.Equal(1, GetEventInvocationCount(session, nameof(session.Closed))); Assert.Equal(1, GetEventInvocationCount(session, nameof(session.Connected))); session.Reset(); Assert.Null(session.Channel); Assert.Null(session.DataContext); Assert.Equal(SessionState.None, session.State); Assert.Null(session[itemKey]); Assert.Equal(0, GetEventInvocationCount(session, nameof(session.Closed))); Assert.Equal(0, GetEventInvocationCount(session, nameof(session.Connected))); await server.StopAsync(); } } private int GetEventInvocationCount(object objectWithEvent, string eventName) { var allBindings = BindingFlags.Static | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Public | BindingFlags.FlattenHierarchy; var type = objectWithEvent.GetType(); var fieldInfo = type.GetField(eventName, allBindings); var value = fieldInfo.GetValue(objectWithEvent); var handler = value as Delegate; if (handler == null) return 0; return handler.GetInvocationList().Length; } [Fact] public async Task TestCloseReason() { var hostConfigurator = new RegularHostConfigurator(); IAppSession session = null; using (var server = CreateSocketServerBuilder<TextPackageInfo, LinePipelineFilter>(hostConfigurator) .UseSessionHandler((s) => { session = s; return new ValueTask(); }) .BuildAsServer()) { Assert.Equal("TestServer", server.Name); Assert.True(await server.StartAsync()); OutputHelper.WriteLine("Started."); CloseReason closeReason = CloseReason.Unknown; var client = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); await client.ConnectAsync(hostConfigurator.GetServerEndPoint()); OutputHelper.WriteLine("Connected."); await Task.Delay(1000); session.Closed += (s, e) => { closeReason = e.Reason; return new ValueTask(); }; client.Shutdown(SocketShutdown.Both); client.Close(); await Task.Delay(1000); Assert.Equal(SessionState.Closed, session.State); Assert.Equal(CloseReason.RemoteClosing, closeReason); closeReason = CloseReason.Unknown; client = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); await client.ConnectAsync(hostConfigurator.GetServerEndPoint()); OutputHelper.WriteLine("Connected."); await Task.Delay(1000); session.Closed += (s, e) => { closeReason = e.Reason; return new ValueTask(); }; await session.CloseAsync(CloseReason.LocalClosing); await Task.Delay(1000); Assert.Equal(SessionState.Closed, session.State); Assert.Equal(CloseReason.LocalClosing, closeReason); await server.StopAsync(); } } [Fact] public async Task TestConsoleProtocol() { var hostConfigurator = new RegularHostConfigurator(); using (var server = CreateSocketServerBuilder<TextPackageInfo, LinePipelineFilter>(hostConfigurator) .UsePackageHandler(async (IAppSession s, TextPackageInfo p) => { await s.SendAsync(Utf8Encoding.GetBytes("Hello World\r\n")); }).BuildAsServer() as IServer) { Assert.True(await server.StartAsync()); Assert.Equal(0, server.SessionCount); var client = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); await client.ConnectAsync(hostConfigurator.GetServerEndPoint()); using (var stream = await hostConfigurator.GetClientStream(client)) using (var streamReader = new StreamReader(stream, Utf8Encoding, true)) using (var streamWriter = new StreamWriter(stream, Utf8Encoding, 1024 * 1024 * 4)) { await streamWriter.WriteAsync("Hello World\r\n"); await streamWriter.FlushAsync(); var line = await streamReader.ReadLineAsync(); Assert.Equal("Hello World", line); } await server.StopAsync(); } } [Fact] public async Task TestServiceProvider() { using (var server = CreateSocketServerBuilder<TextPackageInfo, LinePipelineFilter>() .ConfigureServices((ctx, services) => { services.AddSingleton<IHostConfigurator, RegularHostConfigurator>(); }).BuildAsServer() as IServer) { Assert.True(await server.StartAsync()); Assert.IsType<RegularHostConfigurator>(server.ServiceProvider.GetService<IHostConfigurator>()); await server.StopAsync(); } } } }
using System; using System.Collections.Generic; using System.Text; using System.ComponentModel; using System.Collections; using System.Globalization; using Microsoft.Msagl.Core.DataStructures; using Microsoft.Msagl.Core.Geometry; using Microsoft.Msagl.Core.Layout; using P2=Microsoft.Msagl.Core.Geometry.Point; using Microsoft.Msagl.Drawing; namespace Microsoft.Msagl.Drawing { /// <summary> /// Keep the information related to an object label /// </summary> [Serializable] public class Label: DrawingObject { ///<summary> ///</summary> public DrawingObject Owner { get; set; } /// <summary> /// an empty constructor /// </summary> public Label(){} /// <summary> /// a constructor with text /// </summary> /// <param name="textPar"></param> public Label(string textPar) { this.text = textPar; } ///<summary> ///</summary> public Point Center { get { if (Owner == null) return new Point(); var edge = Owner as Edge; if (edge != null) return edge.GeometryEdge.Label.Center; return ((Node)Owner).GeometryNode.Center; } } double width; double height; /// <summary> /// the width of the label /// </summary> public double Width { get { return GeometryLabel == null ? width : GeometryLabel.Width; } set { if (GeometryLabel == null) width = value; else GeometryLabel.Width = value; } } /// <summary> /// the height of the label /// </summary> public double Height { get { return GeometryLabel == null ? height : GeometryLabel.Height; } set { if (GeometryLabel == null) height = value; else GeometryLabel.Height = value; } } /// <summary> /// left coordinate /// </summary> public double Left { get { return Center.X - Width / 2; } } /// <summary> /// top coordinate /// </summary> public double Top { get { return Center.Y + Height / 2; } } /// <summary> /// left coordinate /// </summary> public double Right { get { return Center.X + Width / 2; } } /// <summary> /// top coordinate /// </summary> public double Bottom { get { return Center.Y - Height / 2; } } /// <summary> /// gets the left top corner /// </summary> public P2 LeftTop { get{ return new P2(Left,Top);}} /// <summary> /// gets the right bottom corner /// </summary> public P2 RightBottom { get { return new P2(Right, Bottom); } } /// <summary> /// returns the bounding box of the label /// </summary> override public Rectangle BoundingBox { get { return new Rectangle(LeftTop, RightBottom); } } /// <summary> /// gets or sets the label size /// </summary> virtual public Size Size { get { return new Size(Width, Height); } set { Width = value.Width; Height = value.Height; } } internal Color fontcolor = Color.Black; ///<summary> ///Label font color. ///</summary> [Description("type face color")] public Color FontColor { get { return fontcolor; } set { fontcolor = value; } } ///<summary> ///Type face font. ///</summary> string fontName = ""; ///<summary> ///Type face font ///</summary> [Description("type face font"), DefaultValue("")] public string FontName { get { if (String.IsNullOrEmpty(fontName)) return DefaultFontName; else return fontName; } set { fontName = value; } } string text; /// <summary> /// A label of the entity. The label is rendered opposite to the ID. /// </summary> public string Text { get { return text; } set { if (value != null) text = value.Replace("\\n", "\n"); else text = ""; } } internal double fontsize = DefaultFontSize; ///<summary> ///The point size of the id. ///</summary> public double FontSize { get { return fontsize; } set { fontsize = value; } } internal static string defaultFontName = "Times-Roman"; /// <summary> /// the name of the defaul font /// </summary> public static string DefaultFontName { get { return defaultFontName; } set { defaultFontName = value; } } static int defaultFontSize = 12; /// <summary> /// the default font size /// </summary> static public int DefaultFontSize { get { return defaultFontSize; } set { defaultFontSize = value; } } Core.Layout.Label geometryLabel=new Core.Layout.Label(); /// <summary> /// gets or set geometry label /// </summary> public Core.Layout.Label GeometryLabel { get { return geometryLabel; } set { geometryLabel = value; } } /// <summary> /// gets the geometry of the label /// </summary> public override GeometryObject GeometryObject { get { return GeometryLabel; } set { GeometryLabel = (Core.Layout.Label) value; } } } }
/****************************************************************************** * Spine Runtimes Software License v2.5 * * Copyright (c) 2013-2016, Esoteric Software * All rights reserved. * * You are granted a perpetual, non-exclusive, non-sublicensable, and * non-transferable license to use, install, execute, and perform the Spine * Runtimes software and derivative works solely for personal or internal * use. Without the written permission of Esoteric Software (see Section 2 of * the Spine Software License Agreement), you may not (a) modify, translate, * adapt, or develop new applications using the Spine Runtimes or otherwise * create derivative works or improvements of the Spine Runtimes or (b) remove, * delete, alter, or obscure any trademarks or any copyright, trademark, patent, * or other intellectual property or proprietary rights notices on or in the * Software, including any copy thereof. Redistributions in binary or source * form must include this license and terms. * * THIS SOFTWARE IS PROVIDED BY ESOTERIC SOFTWARE "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL ESOTERIC SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES, BUSINESS INTERRUPTION, OR LOSS OF * USE, DATA, OR PROFITS) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. *****************************************************************************/ using System; namespace Spine { public class PathConstraint : IConstraint { const int NONE = -1, BEFORE = -2, AFTER = -3; internal PathConstraintData data; internal ExposedList<Bone> bones; internal Slot target; internal float position, spacing, rotateMix, translateMix; internal ExposedList<float> spaces = new ExposedList<float>(), positions = new ExposedList<float>(); internal ExposedList<float> world = new ExposedList<float>(), curves = new ExposedList<float>(), lengths = new ExposedList<float>(); internal float[] segments = new float[10]; public int Order { get { return data.order; } } public float Position { get { return position; } set { position = value; } } public float Spacing { get { return spacing; } set { spacing = value; } } public float RotateMix { get { return rotateMix; } set { rotateMix = value; } } public float TranslateMix { get { return translateMix; } set { translateMix = value; } } public ExposedList<Bone> Bones { get { return bones; } } public Slot Target { get { return target; } set { target = value; } } public PathConstraintData Data { get { return data; } } public PathConstraint (PathConstraintData data, Skeleton skeleton) { if (data == null) throw new ArgumentNullException("data", "data cannot be null."); if (skeleton == null) throw new ArgumentNullException("skeleton", "skeleton cannot be null."); this.data = data; bones = new ExposedList<Bone>(data.Bones.Count); foreach (BoneData boneData in data.bones) bones.Add(skeleton.FindBone(boneData.name)); target = skeleton.FindSlot(data.target.name); position = data.position; spacing = data.spacing; rotateMix = data.rotateMix; translateMix = data.translateMix; } public void Apply () { Update(); } public void Update () { PathAttachment attachment = target.Attachment as PathAttachment; if (attachment == null) return; float rotateMix = this.rotateMix, translateMix = this.translateMix; bool translate = translateMix > 0, rotate = rotateMix > 0; if (!translate && !rotate) return; PathConstraintData data = this.data; SpacingMode spacingMode = data.spacingMode; bool lengthSpacing = spacingMode == SpacingMode.Length; RotateMode rotateMode = data.rotateMode; bool tangents = rotateMode == RotateMode.Tangent, scale = rotateMode == RotateMode.ChainScale; int boneCount = this.bones.Count, spacesCount = tangents ? boneCount : boneCount + 1; Bone[] bonesItems = this.bones.Items; ExposedList<float> spaces = this.spaces.Resize(spacesCount), lengths = null; float spacing = this.spacing; if (scale || lengthSpacing) { if (scale) lengths = this.lengths.Resize(boneCount); for (int i = 0, n = spacesCount - 1; i < n;) { Bone bone = bonesItems[i]; float setupLength = bone.data.length, x = setupLength * bone.a, y = setupLength * bone.c; float length = (float)Math.Sqrt(x * x + y * y); if (scale) lengths.Items[i] = setupLength; spaces.Items[++i] = (lengthSpacing ? Math.Max(0, setupLength + spacing) : spacing) * length / setupLength; } } else { for (int i = 1; i < spacesCount; i++) spaces.Items[i] = spacing; } float[] positions = ComputeWorldPositions(attachment, spacesCount, tangents, data.positionMode == PositionMode.Percent, spacingMode == SpacingMode.Percent); float boneX = positions[0], boneY = positions[1], offsetRotation = data.offsetRotation; bool tip; if (offsetRotation == 0) { tip = rotateMode == RotateMode.Chain; } else { tip = false; Bone p = target.bone; offsetRotation *= p.a * p.d - p.b * p.c > 0 ? MathUtils.DegRad : -MathUtils.DegRad; } for (int i = 0, p = 3; i < boneCount; i++, p += 3) { Bone bone = bonesItems[i]; bone.worldX += (boneX - bone.worldX) * translateMix; bone.worldY += (boneY - bone.worldY) * translateMix; float x = positions[p], y = positions[p + 1], dx = x - boneX, dy = y - boneY; if (scale) { float length = lengths.Items[i]; if (length != 0) { float s = ((float)Math.Sqrt(dx * dx + dy * dy) / length - 1) * rotateMix + 1; bone.a *= s; bone.c *= s; } } boneX = x; boneY = y; if (rotate) { float a = bone.a, b = bone.b, c = bone.c, d = bone.d, r, cos, sin; if (tangents) r = positions[p - 1]; else if (spaces.Items[i + 1] == 0) r = positions[p + 2]; else r = MathUtils.Atan2(dy, dx); r -= MathUtils.Atan2(c, a); if (tip) { cos = MathUtils.Cos(r); sin = MathUtils.Sin(r); float length = bone.data.length; boneX += (length * (cos * a - sin * c) - dx) * rotateMix; boneY += (length * (sin * a + cos * c) - dy) * rotateMix; } else { r += offsetRotation; } if (r > MathUtils.PI) r -= MathUtils.PI2; else if (r < -MathUtils.PI) // r += MathUtils.PI2; r *= rotateMix; cos = MathUtils.Cos(r); sin = MathUtils.Sin(r); bone.a = cos * a - sin * c; bone.b = cos * b - sin * d; bone.c = sin * a + cos * c; bone.d = sin * b + cos * d; } bone.appliedValid = false; } } float[] ComputeWorldPositions (PathAttachment path, int spacesCount, bool tangents, bool percentPosition, bool percentSpacing) { Slot target = this.target; float position = this.position; float[] spacesItems = this.spaces.Items, output = this.positions.Resize(spacesCount * 3 + 2).Items, world; bool closed = path.Closed; int verticesLength = path.WorldVerticesLength, curveCount = verticesLength / 6, prevCurve = NONE; float pathLength; if (!path.ConstantSpeed) { float[] lengths = path.Lengths; curveCount -= closed ? 1 : 2; pathLength = lengths[curveCount]; if (percentPosition) position *= pathLength; if (percentSpacing) { for (int i = 0; i < spacesCount; i++) spacesItems[i] *= pathLength; } world = this.world.Resize(8).Items; for (int i = 0, o = 0, curve = 0; i < spacesCount; i++, o += 3) { float space = spacesItems[i]; position += space; float p = position; if (closed) { p %= pathLength; if (p < 0) p += pathLength; curve = 0; } else if (p < 0) { if (prevCurve != BEFORE) { prevCurve = BEFORE; path.ComputeWorldVertices(target, 2, 4, world, 0); } AddBeforePosition(p, world, 0, output, o); continue; } else if (p > pathLength) { if (prevCurve != AFTER) { prevCurve = AFTER; path.ComputeWorldVertices(target, verticesLength - 6, 4, world, 0); } AddAfterPosition(p - pathLength, world, 0, output, o); continue; } // Determine curve containing position. for (;; curve++) { float length = lengths[curve]; if (p > length) continue; if (curve == 0) p /= length; else { float prev = lengths[curve - 1]; p = (p - prev) / (length - prev); } break; } if (curve != prevCurve) { prevCurve = curve; if (closed && curve == curveCount) { path.ComputeWorldVertices(target, verticesLength - 4, 4, world, 0); path.ComputeWorldVertices(target, 0, 4, world, 4); } else path.ComputeWorldVertices(target, curve * 6 + 2, 8, world, 0); } AddCurvePosition(p, world[0], world[1], world[2], world[3], world[4], world[5], world[6], world[7], output, o, tangents || (i > 0 && space == 0)); } return output; } // World vertices. if (closed) { verticesLength += 2; world = this.world.Resize(verticesLength).Items; path.ComputeWorldVertices(target, 2, verticesLength - 4, world, 0); path.ComputeWorldVertices(target, 0, 2, world, verticesLength - 4); world[verticesLength - 2] = world[0]; world[verticesLength - 1] = world[1]; } else { curveCount--; verticesLength -= 4; world = this.world.Resize(verticesLength).Items; path.ComputeWorldVertices(target, 2, verticesLength, world, 0); } // Curve lengths. float[] curves = this.curves.Resize(curveCount).Items; pathLength = 0; float x1 = world[0], y1 = world[1], cx1 = 0, cy1 = 0, cx2 = 0, cy2 = 0, x2 = 0, y2 = 0; float tmpx, tmpy, dddfx, dddfy, ddfx, ddfy, dfx, dfy; for (int i = 0, w = 2; i < curveCount; i++, w += 6) { cx1 = world[w]; cy1 = world[w + 1]; cx2 = world[w + 2]; cy2 = world[w + 3]; x2 = world[w + 4]; y2 = world[w + 5]; tmpx = (x1 - cx1 * 2 + cx2) * 0.1875f; tmpy = (y1 - cy1 * 2 + cy2) * 0.1875f; dddfx = ((cx1 - cx2) * 3 - x1 + x2) * 0.09375f; dddfy = ((cy1 - cy2) * 3 - y1 + y2) * 0.09375f; ddfx = tmpx * 2 + dddfx; ddfy = tmpy * 2 + dddfy; dfx = (cx1 - x1) * 0.75f + tmpx + dddfx * 0.16666667f; dfy = (cy1 - y1) * 0.75f + tmpy + dddfy * 0.16666667f; pathLength += (float)Math.Sqrt(dfx * dfx + dfy * dfy); dfx += ddfx; dfy += ddfy; ddfx += dddfx; ddfy += dddfy; pathLength += (float)Math.Sqrt(dfx * dfx + dfy * dfy); dfx += ddfx; dfy += ddfy; pathLength += (float)Math.Sqrt(dfx * dfx + dfy * dfy); dfx += ddfx + dddfx; dfy += ddfy + dddfy; pathLength += (float)Math.Sqrt(dfx * dfx + dfy * dfy); curves[i] = pathLength; x1 = x2; y1 = y2; } if (percentPosition) position *= pathLength; if (percentSpacing) { for (int i = 0; i < spacesCount; i++) spacesItems[i] *= pathLength; } float[] segments = this.segments; float curveLength = 0; for (int i = 0, o = 0, curve = 0, segment = 0; i < spacesCount; i++, o += 3) { float space = spacesItems[i]; position += space; float p = position; if (closed) { p %= pathLength; if (p < 0) p += pathLength; curve = 0; } else if (p < 0) { AddBeforePosition(p, world, 0, output, o); continue; } else if (p > pathLength) { AddAfterPosition(p - pathLength, world, verticesLength - 4, output, o); continue; } // Determine curve containing position. for (;; curve++) { float length = curves[curve]; if (p > length) continue; if (curve == 0) p /= length; else { float prev = curves[curve - 1]; p = (p - prev) / (length - prev); } break; } // Curve segment lengths. if (curve != prevCurve) { prevCurve = curve; int ii = curve * 6; x1 = world[ii]; y1 = world[ii + 1]; cx1 = world[ii + 2]; cy1 = world[ii + 3]; cx2 = world[ii + 4]; cy2 = world[ii + 5]; x2 = world[ii + 6]; y2 = world[ii + 7]; tmpx = (x1 - cx1 * 2 + cx2) * 0.03f; tmpy = (y1 - cy1 * 2 + cy2) * 0.03f; dddfx = ((cx1 - cx2) * 3 - x1 + x2) * 0.006f; dddfy = ((cy1 - cy2) * 3 - y1 + y2) * 0.006f; ddfx = tmpx * 2 + dddfx; ddfy = tmpy * 2 + dddfy; dfx = (cx1 - x1) * 0.3f + tmpx + dddfx * 0.16666667f; dfy = (cy1 - y1) * 0.3f + tmpy + dddfy * 0.16666667f; curveLength = (float)Math.Sqrt(dfx * dfx + dfy * dfy); segments[0] = curveLength; for (ii = 1; ii < 8; ii++) { dfx += ddfx; dfy += ddfy; ddfx += dddfx; ddfy += dddfy; curveLength += (float)Math.Sqrt(dfx * dfx + dfy * dfy); segments[ii] = curveLength; } dfx += ddfx; dfy += ddfy; curveLength += (float)Math.Sqrt(dfx * dfx + dfy * dfy); segments[8] = curveLength; dfx += ddfx + dddfx; dfy += ddfy + dddfy; curveLength += (float)Math.Sqrt(dfx * dfx + dfy * dfy); segments[9] = curveLength; segment = 0; } // Weight by segment length. p *= curveLength; for (;; segment++) { float length = segments[segment]; if (p > length) continue; if (segment == 0) p /= length; else { float prev = segments[segment - 1]; p = segment + (p - prev) / (length - prev); } break; } AddCurvePosition(p * 0.1f, x1, y1, cx1, cy1, cx2, cy2, x2, y2, output, o, tangents || (i > 0 && space == 0)); } return output; } static void AddBeforePosition (float p, float[] temp, int i, float[] output, int o) { float x1 = temp[i], y1 = temp[i + 1], dx = temp[i + 2] - x1, dy = temp[i + 3] - y1, r = MathUtils.Atan2(dy, dx); output[o] = x1 + p * MathUtils.Cos(r); output[o + 1] = y1 + p * MathUtils.Sin(r); output[o + 2] = r; } static void AddAfterPosition (float p, float[] temp, int i, float[] output, int o) { float x1 = temp[i + 2], y1 = temp[i + 3], dx = x1 - temp[i], dy = y1 - temp[i + 1], r = MathUtils.Atan2(dy, dx); output[o] = x1 + p * MathUtils.Cos(r); output[o + 1] = y1 + p * MathUtils.Sin(r); output[o + 2] = r; } static void AddCurvePosition (float p, float x1, float y1, float cx1, float cy1, float cx2, float cy2, float x2, float y2, float[] output, int o, bool tangents) { if (p == 0 || float.IsNaN(p)) p = 0.0001f; float tt = p * p, ttt = tt * p, u = 1 - p, uu = u * u, uuu = uu * u; float ut = u * p, ut3 = ut * 3, uut3 = u * ut3, utt3 = ut3 * p; float x = x1 * uuu + cx1 * uut3 + cx2 * utt3 + x2 * ttt, y = y1 * uuu + cy1 * uut3 + cy2 * utt3 + y2 * ttt; output[o] = x; output[o + 1] = y; if (tangents) output[o + 2] = (float)Math.Atan2(y - (y1 * uu + cy1 * ut * 2 + cy2 * tt), x - (x1 * uu + cx1 * ut * 2 + cx2 * tt)); } } }
using System; using System.Data; using System.Configuration; using System.Collections; using System.Web; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using System.Web.UI.HtmlControls; using Vevo; using Vevo.Domain; using Vevo.Domain.Marketing; using Vevo.Domain.Shipping; using Vevo.Domain.Users; using Vevo.Shared.Utilities; using Vevo.Base.Domain; using Vevo.Deluxe.Domain; using Vevo.Deluxe.Domain.GiftRegistry; using Vevo.Shared.DataAccess; public partial class Components_GiftRegistryDetail : Vevo.WebUI.International.BaseLanguageUserControl { #region Private private enum Mode { Add, Edit }; private Mode _mode = Mode.Add; private string GiftRegistryID { get { return Request.QueryString["GiftRegistryID"]; } } private void PopulateUserAddress() { uxEventDateCalendarPopup.SelectedDate = DateTime.Now; string customerID = DataAccessContext.CustomerRepository.GetIDFromUserName( Page.User.Identity.Name ); Customer customer = DataAccessContext.CustomerRepository.GetOne( customerID ); uxCompany.Text = customer.ShippingAddress.Company; uxAddress1.Text = customer.ShippingAddress.Address1; uxAddress2.Text = customer.ShippingAddress.Address2; uxCity.Text = customer.ShippingAddress.City; uxZip.Text = customer.ShippingAddress.Zip; uxCountryState.CurrentCountry = customer.ShippingAddress.Country; uxCountryState.CurrentState = customer.ShippingAddress.State; uxPhone.Text = customer.ShippingAddress.Phone; uxFax.Text = customer.ShippingAddress.Fax; uxResidentialDrop.SelectedValue = customer.ShippingAddress.Residential.ToString(); } private void PopulateControls() { GiftRegistry giftRegistry = DataAccessContextDeluxe.GiftRegistryRepository.GetOne( GiftRegistryID ); uxEventName.Text = giftRegistry.EventName; uxEventDateCalendarPopup.SelectedDate = giftRegistry.EventDate; uxCompany.Text = giftRegistry.ShippingAddress.Company; uxAddress1.Text = giftRegistry.ShippingAddress.Address1; uxAddress2.Text = giftRegistry.ShippingAddress.Address2; uxCity.Text = giftRegistry.ShippingAddress.City; uxZip.Text = giftRegistry.ShippingAddress.Zip; uxCountryState.CurrentCountry = giftRegistry.ShippingAddress.Country; uxCountryState.CurrentState = giftRegistry.ShippingAddress.State; uxPhone.Text = giftRegistry.ShippingAddress.Phone; uxFax.Text = giftRegistry.ShippingAddress.Fax; uxResidentialDrop.SelectedValue = giftRegistry.ShippingAddress.Residential.ToString(); uxHideAddressCheck.Checked = giftRegistry.HideAddress; uxHideEventCheck.Checked = giftRegistry.HideEvent; uxNotifyNewOrderCheck.Checked = giftRegistry.NotifyNewOrder; } private void Redirect() { if (IsEditMode()) Response.Redirect( "GiftRegistryList.aspx" ); else Response.Redirect( "GiftRegistryComplete.aspx" ); } private GiftRegistry SetUpGiftRegistry( GiftRegistry giftRegistry ) { string customerID = DataAccessContext.CustomerRepository.GetIDFromUserName( Page.User.Identity.Name ); Customer customer = DataAccessContext.CustomerRepository.GetOne( customerID ); giftRegistry.EventName = uxEventName.Text; giftRegistry.EventDate = uxEventDateCalendarPopup.SelectedDate; giftRegistry.CustomerID = customerID; giftRegistry.UserName = customer.UserName; giftRegistry.ShippingAddress = new ShippingAddress( new Address( customer.BillingAddress.FirstName, customer.BillingAddress.LastName, uxCompany.Text, uxAddress1.Text, uxAddress2.Text, uxCity.Text, uxCountryState.CurrentState, uxZip.Text, uxCountryState.CurrentCountry, uxPhone.Text, uxFax.Text ), ConvertUtilities.ToBoolean( uxResidentialDrop.SelectedValue ) ); giftRegistry.HideAddress = uxHideAddressCheck.Checked; giftRegistry.HideEvent = uxHideEventCheck.Checked; giftRegistry.NotifyNewOrder = uxNotifyNewOrderCheck.Checked; giftRegistry.StoreID = DataAccessContext.StoreRetriever.GetCurrentStoreID(); return giftRegistry; } private void AddNewAndRedirect() { if (Page.IsValid) { bool validateCountry, validateState; if (!uxCountryState.Validate( out validateCountry, out validateState )) { uxSummaryLiteral.Text = uxCountryState.FormatErrorHtml( "Please correct the following errors:", validateCountry, validateState ); return; } GiftRegistry giftRegistry = new GiftRegistry(); giftRegistry = SetUpGiftRegistry( giftRegistry ); giftRegistry = DataAccessContextDeluxe.GiftRegistryRepository.Save( giftRegistry ); string giftRegistryID = giftRegistry.GiftRegistryID; Redirect(); } } private void EditAndRedirect() { if (Page.IsValid) { bool validateCountry, validateState; if (!uxCountryState.Validate( out validateCountry, out validateState )) { uxSummaryLiteral.Text = uxCountryState.FormatErrorHtml( "Please correct the following errors:", validateCountry, validateState ); return; } GiftRegistry giftRegistry = DataAccessContextDeluxe.GiftRegistryRepository.GetOne( GiftRegistryID ); giftRegistry = SetUpGiftRegistry( giftRegistry ); DataAccessContextDeluxe.GiftRegistryRepository.Save( giftRegistry ); Redirect(); } } private void ShowHideShippingResidential() { ShippingPolicy shippingPolicy = new ShippingPolicy(); if (shippingPolicy.RequiresResidentialStatus()) { uxResidentialLabelDiv.Visible = true; uxResidentialDataDiv.Visible = true; } else { uxResidentialLabelDiv.Visible = false; uxResidentialDataDiv.Visible = false; } } #endregion #region Protected protected void Page_Load( object sender, EventArgs e ) { if (!KeyUtilities.IsDeluxeLicense( DataAccessHelper.DomainRegistrationkey, DataAccessHelper.DomainName )) { this.Visible = false; return; } if (AdminConfig.CurrentTestMode == AdminConfig.TestMode.Test) uxEventDateCalendarPopup.SelectedDate = DateTime.Today.AddMonths( 1 ); if (IsEditMode()) { if (!IsPostBack) { ShowHideShippingResidential(); PopulateControls(); uxAddLinkButton.Visible = false; uxEditLinkButton.Visible = true; } } else { if (!IsPostBack) { PopulateUserAddress(); uxHideEventCheck.Checked = true; uxNotifyNewOrderCheck.Checked = true; } uxAddLinkButton.Visible = true; uxEditLinkButton.Visible = false; } } protected void Page_PreRender( object sender, EventArgs e ) { } protected void uxAddLinkButton_Click( object sender, EventArgs e ) { AddNewAndRedirect(); } protected void uxEditLinkButton_Click( object sender, EventArgs e ) { EditAndRedirect(); } #endregion #region Public Methods public bool IsEditMode() { return (_mode == Mode.Edit); } public void SetEditMode() { _mode = Mode.Edit; } #endregion }
/* Copyright 2011 - 2022 Adrian Popescu Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Xml; using System.Xml.Serialization; using Newtonsoft.Json; using Redmine.Net.Api.Extensions; using Redmine.Net.Api.Internals; namespace Redmine.Net.Api.Types { /// <summary> /// /// </summary> [DebuggerDisplay("{" + nameof(DebuggerDisplay) + ",nq}")] [XmlRoot(RedmineKeys.CUSTOM_FIELD)] public sealed class CustomField : IdentifiableName, IEquatable<CustomField> { #region Properties /// <summary> /// /// </summary> public string CustomizedType { get; internal set; } /// <summary> /// /// </summary> public string FieldFormat { get; internal set; } /// <summary> /// /// </summary> public string Regexp { get; internal set; } /// <summary> /// /// </summary> public int? MinLength { get; internal set; } /// <summary> /// /// </summary> public int? MaxLength { get; internal set; } /// <summary> /// /// </summary> public bool IsRequired { get; internal set; } /// <summary> /// /// </summary> public bool IsFilter { get; internal set; } /// <summary> /// /// </summary> public bool Searchable { get; internal set; } /// <summary> /// /// </summary> public bool Multiple { get; internal set; } /// <summary> /// /// </summary> public string DefaultValue { get; internal set; } /// <summary> /// /// </summary> public bool Visible { get; internal set; } /// <summary> /// /// </summary> public IList<CustomFieldPossibleValue> PossibleValues { get; internal set; } /// <summary> /// /// </summary> public IList<TrackerCustomField> Trackers { get; internal set; } /// <summary> /// /// </summary> public IList<CustomFieldRole> Roles { get; internal set; } #endregion #region Implementation of IXmlSerializable /// <summary> /// /// </summary> /// <param name="reader"></param> public override void ReadXml(XmlReader reader) { reader.Read(); while (!reader.EOF) { if (reader.IsEmptyElement && !reader.HasAttributes) { reader.Read(); continue; } switch (reader.Name) { case RedmineKeys.ID: Id = reader.ReadElementContentAsInt(); break; case RedmineKeys.CUSTOMIZED_TYPE: CustomizedType = reader.ReadElementContentAsString(); break; case RedmineKeys.DEFAULT_VALUE: DefaultValue = reader.ReadElementContentAsString(); break; case RedmineKeys.FIELD_FORMAT: FieldFormat = reader.ReadElementContentAsString(); break; case RedmineKeys.IS_FILTER: IsFilter = reader.ReadElementContentAsBoolean(); break; case RedmineKeys.IS_REQUIRED: IsRequired = reader.ReadElementContentAsBoolean(); break; case RedmineKeys.MAX_LENGTH: MaxLength = reader.ReadElementContentAsNullableInt(); break; case RedmineKeys.MIN_LENGTH: MinLength = reader.ReadElementContentAsNullableInt(); break; case RedmineKeys.MULTIPLE: Multiple = reader.ReadElementContentAsBoolean(); break; case RedmineKeys.NAME: Name = reader.ReadElementContentAsString(); break; case RedmineKeys.POSSIBLE_VALUES: PossibleValues = reader.ReadElementContentAsCollection<CustomFieldPossibleValue>(); break; case RedmineKeys.REGEXP: Regexp = reader.ReadElementContentAsString(); break; case RedmineKeys.ROLES: Roles = reader.ReadElementContentAsCollection<CustomFieldRole>(); break; case RedmineKeys.SEARCHABLE: Searchable = reader.ReadElementContentAsBoolean(); break; case RedmineKeys.TRACKERS: Trackers = reader.ReadElementContentAsCollection<TrackerCustomField>(); break; case RedmineKeys.VISIBLE: Visible = reader.ReadElementContentAsBoolean(); break; default: reader.Read(); break; } } } #endregion #region Implementation of IJsonSerialization /// <summary> /// /// </summary> /// <param name="reader"></param> public override void ReadJson(JsonReader reader) { while (reader.Read()) { if (reader.TokenType == JsonToken.EndObject) { return; } if (reader.TokenType != JsonToken.PropertyName) { continue; } switch (reader.Value) { case RedmineKeys.ID: Id = reader.ReadAsInt(); break; case RedmineKeys.CUSTOMIZED_TYPE: CustomizedType = reader.ReadAsString(); break; case RedmineKeys.DEFAULT_VALUE: DefaultValue = reader.ReadAsString(); break; case RedmineKeys.FIELD_FORMAT: FieldFormat = reader.ReadAsString(); break; case RedmineKeys.IS_FILTER: IsFilter = reader.ReadAsBool(); break; case RedmineKeys.IS_REQUIRED: IsRequired = reader.ReadAsBool(); break; case RedmineKeys.MAX_LENGTH: MaxLength = reader.ReadAsInt32(); break; case RedmineKeys.MIN_LENGTH: MinLength = reader.ReadAsInt32(); break; case RedmineKeys.MULTIPLE: Multiple = reader.ReadAsBool(); break; case RedmineKeys.NAME: Name = reader.ReadAsString(); break; case RedmineKeys.POSSIBLE_VALUES: PossibleValues = reader.ReadAsCollection<CustomFieldPossibleValue>(); break; case RedmineKeys.REGEXP: Regexp = reader.ReadAsString(); break; case RedmineKeys.ROLES: Roles = reader.ReadAsCollection<CustomFieldRole>(); break; case RedmineKeys.SEARCHABLE: Searchable = reader.ReadAsBool(); break; case RedmineKeys.TRACKERS: Trackers = reader.ReadAsCollection<TrackerCustomField>(); break; case RedmineKeys.VISIBLE: Visible = reader.ReadAsBool(); break; default: reader.Read(); break; } } } #endregion #region Implementation of IEquatable<CustomField> /// <summary> /// /// </summary> /// <param name="other"></param> /// <returns></returns> public bool Equals(CustomField other) { if (other == null) return false; return Id == other.Id && IsFilter == other.IsFilter && IsRequired == other.IsRequired && Multiple == other.Multiple && Searchable == other.Searchable && Visible == other.Visible && string.Equals(CustomizedType,other.CustomizedType, StringComparison.OrdinalIgnoreCase) && string.Equals(DefaultValue,other.DefaultValue, StringComparison.OrdinalIgnoreCase) && string.Equals(FieldFormat,other.FieldFormat, StringComparison.OrdinalIgnoreCase) && MaxLength == other.MaxLength && MinLength == other.MinLength && string.Equals(Name,other.Name, StringComparison.OrdinalIgnoreCase) && string.Equals(Regexp,other.Regexp, StringComparison.OrdinalIgnoreCase) && PossibleValues.Equals(other.PossibleValues) && Roles.Equals(other.Roles) && Trackers.Equals(other.Trackers); } /// <summary> /// /// </summary> /// <param name="obj"></param> /// <returns></returns> public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != GetType()) return false; return Equals(obj as CustomField); } /// <summary> /// /// </summary> /// <returns></returns> public override int GetHashCode() { unchecked { var hashCode = 13; hashCode = HashCodeHelper.GetHashCode(Id, hashCode); hashCode = HashCodeHelper.GetHashCode(IsFilter, hashCode); hashCode = HashCodeHelper.GetHashCode(IsRequired, hashCode); hashCode = HashCodeHelper.GetHashCode(Multiple, hashCode); hashCode = HashCodeHelper.GetHashCode(Searchable, hashCode); hashCode = HashCodeHelper.GetHashCode(Visible, hashCode); hashCode = HashCodeHelper.GetHashCode(CustomizedType, hashCode); hashCode = HashCodeHelper.GetHashCode(DefaultValue, hashCode); hashCode = HashCodeHelper.GetHashCode(FieldFormat, hashCode); hashCode = HashCodeHelper.GetHashCode(MaxLength, hashCode); hashCode = HashCodeHelper.GetHashCode(MinLength, hashCode); hashCode = HashCodeHelper.GetHashCode(Name, hashCode); hashCode = HashCodeHelper.GetHashCode(Regexp, hashCode); hashCode = HashCodeHelper.GetHashCode(PossibleValues, hashCode); hashCode = HashCodeHelper.GetHashCode(Roles, hashCode); hashCode = HashCodeHelper.GetHashCode(Trackers, hashCode); return hashCode; } } #endregion private string DebuggerDisplay => $@"[{nameof(CustomField)}: {ToString()} , CustomizedType={CustomizedType} , FieldFormat={FieldFormat} , Regexp={Regexp} , MinLength={MinLength?.ToString(CultureInfo.InvariantCulture)} , MaxLength={MaxLength?.ToString(CultureInfo.InvariantCulture)} , IsRequired={IsRequired.ToString(CultureInfo.InvariantCulture)} , IsFilter={IsFilter.ToString(CultureInfo.InvariantCulture)} , Searchable={Searchable.ToString(CultureInfo.InvariantCulture)} , Multiple={Multiple.ToString(CultureInfo.InvariantCulture)} , DefaultValue={DefaultValue} , Visible={Visible.ToString(CultureInfo.InvariantCulture)} , PossibleValues={PossibleValues.Dump()} , Trackers={Trackers.Dump()} , Roles={Roles.Dump()}]"; } }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Composition; using System.IO; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Text; using Microsoft.Extensions.Logging; using OmniSharp.FileWatching; using OmniSharp.Roslyn; using OmniSharp.Roslyn.Utilities; using OmniSharp.Utilities; namespace OmniSharp { [Export, Shared] public class OmniSharpWorkspace : Workspace { public bool Initialized { get; set; } public BufferManager BufferManager { get; private set; } private readonly ILogger<OmniSharpWorkspace> _logger; private readonly ConcurrentDictionary<string, ProjectInfo> miscDocumentsProjectInfos = new ConcurrentDictionary<string, ProjectInfo>(); [ImportingConstructor] public OmniSharpWorkspace(HostServicesAggregator aggregator, ILoggerFactory loggerFactory, IFileSystemWatcher fileSystemWatcher) : base(aggregator.CreateHostServices(), "Custom") { BufferManager = new BufferManager(this, fileSystemWatcher); _logger = loggerFactory.CreateLogger<OmniSharpWorkspace>(); } public override bool CanOpenDocuments => true; public override void OpenDocument(DocumentId documentId, bool activate = true) { var doc = this.CurrentSolution.GetDocument(documentId); if (doc != null) { var text = doc.GetTextAsync(CancellationToken.None).WaitAndGetResult(CancellationToken.None); this.OnDocumentOpened(documentId, text.Container, activate); } } public override void CloseDocument(DocumentId documentId) { var doc = this.CurrentSolution.GetDocument(documentId); if (doc != null) { var text = doc.GetTextAsync(CancellationToken.None).WaitAndGetResult(CancellationToken.None); var version = doc.GetTextVersionAsync(CancellationToken.None).WaitAndGetResult(CancellationToken.None); var loader = TextLoader.From(TextAndVersion.Create(text, version, doc.FilePath)); this.OnDocumentClosed(documentId, loader); } } public void AddProject(ProjectInfo projectInfo) { OnProjectAdded(projectInfo); } public void AddProjectReference(ProjectId projectId, ProjectReference projectReference) { OnProjectReferenceAdded(projectId, projectReference); } public void RemoveProjectReference(ProjectId projectId, ProjectReference projectReference) { OnProjectReferenceRemoved(projectId, projectReference); } public void AddMetadataReference(ProjectId projectId, MetadataReference metadataReference) { OnMetadataReferenceAdded(projectId, metadataReference); } public void RemoveMetadataReference(ProjectId projectId, MetadataReference metadataReference) { OnMetadataReferenceRemoved(projectId, metadataReference); } public void AddDocument(DocumentInfo documentInfo) { // if the file has already been added as a misc file, // because of a possible race condition between the updates of the project systems, // remove the misc file and add the document as required TryRemoveMiscellaneousDocument(documentInfo.FilePath); OnDocumentAdded(documentInfo); } public DocumentId TryAddMiscellaneousDocument(string filePath, string language) { if (GetDocument(filePath) != null) return null; //if the workspace already knows about this document then it is not a miscellaneous document var projectInfo = miscDocumentsProjectInfos.GetOrAdd(language, (lang) => CreateMiscFilesProject(lang)); var documentId = AddDocument(projectInfo.Id, filePath); _logger.LogInformation($"Miscellaneous file: {filePath} added to workspace"); return documentId; } public bool TryRemoveMiscellaneousDocument(string filePath) { var documentId = GetDocumentId(filePath); if (documentId == null || !IsMiscellaneousDocument(documentId)) return false; RemoveDocument(documentId); _logger.LogDebug($"Miscellaneous file: {filePath} removed from workspace"); return true; } public void TryPromoteMiscellaneousDocumentsToProject(Project project) { if (project == null) { throw new ArgumentNullException(nameof(project)); } var miscProjectInfos = miscDocumentsProjectInfos.Values.ToArray(); for (var i = 0; i < miscProjectInfos.Length; i++) { var miscProject = CurrentSolution.GetProject(miscProjectInfos[i].Id); var documents = miscProject.Documents.ToArray(); for (var j = 0; j < documents.Length; j++) { var document = documents[j]; if (FileBelongsToProject(document.FilePath, project)) { var textLoader = new DelegatingTextLoader(document); var documentId = DocumentId.CreateNewId(project.Id); var documentInfo = DocumentInfo.Create( documentId, document.FilePath, filePath: document.FilePath, loader: textLoader); // This transitively will remove the document from the misc project. AddDocument(documentInfo); } } } } private ProjectInfo CreateMiscFilesProject(string language) { string assemblyName = Guid.NewGuid().ToString("N"); var projectInfo = ProjectInfo.Create( id: ProjectId.CreateNewId(), version: VersionStamp.Create(), name: "MiscellaneousFiles.csproj", metadataReferences: DefaultMetadataReferenceHelper.GetDefaultMetadataReferenceLocations() .Select(loc => MetadataReference.CreateFromFile(loc)), assemblyName: assemblyName, language: language); AddProject(projectInfo); return projectInfo; } public DocumentId AddDocument(ProjectId projectId, string filePath, SourceCodeKind sourceCodeKind = SourceCodeKind.Regular) { var documentId = DocumentId.CreateNewId(projectId); this.AddDocument(documentId, projectId, filePath, sourceCodeKind); return documentId; } public DocumentId AddDocument(DocumentId documentId, ProjectId projectId, string filePath, SourceCodeKind sourceCodeKind = SourceCodeKind.Regular) { var loader = new OmniSharpTextLoader(filePath); var documentInfo = DocumentInfo.Create(documentId, filePath, filePath: filePath, loader: loader, sourceCodeKind: sourceCodeKind); this.AddDocument(documentInfo); return documentId; } public void RemoveDocument(DocumentId documentId) { OnDocumentRemoved(documentId); } public void RemoveProject(ProjectId projectId) { OnProjectRemoved(projectId); } public void SetCompilationOptions(ProjectId projectId, CompilationOptions options) { OnCompilationOptionsChanged(projectId, options); } public void SetParseOptions(ProjectId projectId, ParseOptions parseOptions) { OnParseOptionsChanged(projectId, parseOptions); } public void OnDocumentChanged(DocumentId documentId, SourceText text) { OnDocumentTextChanged(documentId, text, PreservationMode.PreserveIdentity); } public DocumentId GetDocumentId(string filePath) { var documentIds = CurrentSolution.GetDocumentIdsWithFilePath(filePath); return documentIds.FirstOrDefault(); } public IEnumerable<Document> GetDocuments(string filePath) { return CurrentSolution .GetDocumentIdsWithFilePath(filePath) .Select(id => CurrentSolution.GetDocument(id)); } public Document GetDocument(string filePath) { if (string.IsNullOrWhiteSpace(filePath)) return null; var documentId = GetDocumentId(filePath); if (documentId == null) { return null; } return CurrentSolution.GetDocument(documentId); } public override bool CanApplyChange(ApplyChangesKind feature) { return true; } internal bool FileBelongsToProject(string fileName, Project project) { if (string.IsNullOrWhiteSpace(project.FilePath) || string.IsNullOrWhiteSpace(fileName)) { return false; } var fileDirectory = new FileInfo(fileName).Directory; var projectPath = project.FilePath; var projectDirectory = new FileInfo(projectPath).Directory.FullName; while (fileDirectory != null) { if (string.Equals(fileDirectory.FullName, projectDirectory, StringComparison.OrdinalIgnoreCase)) { return true; } fileDirectory = fileDirectory.Parent; } return false; } protected override void ApplyDocumentRemoved(DocumentId documentId) { var document = this.CurrentSolution.GetDocument(documentId); if (document != null) { DeleteDocumentFile(document.Id, document.FilePath); this.OnDocumentRemoved(documentId); } } private void DeleteDocumentFile(DocumentId id, string fullPath) { try { File.Delete(fullPath); } catch (IOException e) { LogDeletionException(e, fullPath); } catch (NotSupportedException e) { LogDeletionException(e, fullPath); } catch (UnauthorizedAccessException e) { LogDeletionException(e, fullPath); } } private void LogDeletionException(Exception e, string filePath) { _logger.LogError(e, $"Error deleting file {filePath}"); } protected override void ApplyDocumentAdded(DocumentInfo info, SourceText text) { var project = this.CurrentSolution.GetProject(info.Id.ProjectId); var fullPath = info.FilePath; this.OnDocumentAdded(info); if (text != null) { this.SaveDocumentText(info.Id, fullPath, text, text.Encoding ?? Encoding.UTF8); } } private void SaveDocumentText(DocumentId id, string fullPath, SourceText newText, Encoding encoding) { try { var dir = Path.GetDirectoryName(fullPath); if (!Directory.Exists(dir)) { Directory.CreateDirectory(dir); } using (var writer = new StreamWriter(fullPath, append: false, encoding: encoding)) { newText.Write(writer); } } catch (IOException e) { _logger.LogError(e, $"Error saving document {fullPath}"); } } public bool IsCapableOfSemanticDiagnostics(Document document) { return !IsMiscellaneousDocument(document.Id); } private bool IsMiscellaneousDocument(DocumentId documentId) { return miscDocumentsProjectInfos.Where(p => p.Value.Id == documentId.ProjectId).Any(); } private class DelegatingTextLoader : TextLoader { private readonly Document _fromDocument; public DelegatingTextLoader(Document fromDocument) { _fromDocument = fromDocument ?? throw new ArgumentNullException(nameof(fromDocument)); } public override async Task<TextAndVersion> LoadTextAndVersionAsync( Workspace workspace, DocumentId documentId, CancellationToken cancellationToken) { var sourceText = await _fromDocument.GetTextAsync(); var version = await _fromDocument.GetTextVersionAsync(); var textAndVersion = TextAndVersion.Create(sourceText, version); return textAndVersion; } } } }
// <copyright file="Segment.cs" company="Fubar Development Junker"> // Copyright (c) 2016 Fubar Development Junker. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // </copyright> using System; using System.Collections.Generic; using System.Linq; using System.Text; using BeanIO.Internal.Util; namespace BeanIO.Internal.Parser { internal class Segment : ParserComponent { private readonly ParserLocal<IList<IParser>> _missing = new ParserLocal<IList<IParser>>(() => new List<IParser>()); private bool _optional; private int _size; /// <summary> /// Gets or sets the <see cref="IProperty"/> mapped to this component, or null if there is no property mapping. /// </summary> public IProperty Property { get; set; } /// <summary> /// Gets or sets a value indicating whether the existence is known to be true when unmarshal is called /// </summary> public bool IsExistencePredetermined { get; set; } /// <summary> /// Gets or sets a value indicating whether the segment repeats /// </summary> public bool IsRepeating { get; set; } /// <summary> /// Gets the size of a single occurrence of this element, which is used to offset /// field positions for repeating segments and fields. /// </summary> /// <remarks> /// The concept of size is dependent on the stream format. The size of an element in a fixed /// length stream format is determined by the length of the element in characters, while other /// stream formats calculate size based on the number of fields. Some stream formats, /// such as XML, may ignore size settings. /// </remarks> public override int? Size => _size; /// <summary> /// Gets or sets a value indicating whether this parser or any descendant of this parser is used to identify /// a record during unmarshalling. /// </summary> public override bool IsIdentifier { get; set; } /// <summary> /// Gets a value indicating whether this node must exist during unmarshalling. /// </summary> public override bool IsOptional => _optional; /// <summary> /// Returns whether this parser and its children match a record being unmarshalled. /// </summary> /// <param name="context">The <see cref="UnmarshallingContext"/></param> /// <returns>true if matched, false otherwise</returns> public override bool Matches(UnmarshallingContext context) { return !IsIdentifier || Children.Cast<IParser>().All(x => x.Matches(context)); } /// <summary> /// Unmarshals a record /// </summary> /// <param name="context">The <see cref="UnmarshallingContext"/></param> /// <returns>true if this component was present in the unmarshalled record, or false otherwise</returns> public override bool Unmarshal(UnmarshallingContext context) { var missing = _missing.Get(context); // unmarshals all children and determine existence, // if a child exists, the segment must exist // existence may also be predetermined in any tag based format (such as XML) var exists = IsExistencePredetermined; foreach (var parser in Children.Cast<IParser>()) { if (parser.Unmarshal(context)) { exists = true; } else if (!parser.IsOptional) { missing.Add(parser); } } // validate all required children are present if either the segment // exists or the segment itself is required if (exists || !IsOptional) { // validate there are no missing children if (missing.Count == 0) { // if the segment valid and bound to a property, create the property value Property?.CreateValue(context); } else { // otherwise create appropriate field errors for missing children foreach (var parser in missing) { context.AddFieldError(parser.Name, null, "minOccurs", 1); } } } missing.Clear(); return exists; } /// <summary> /// Marshals a record /// </summary> /// <param name="context">The <see cref="MarshallingContext"/></param> /// <returns>whether a value was marshalled</returns> public override bool Marshal(MarshallingContext context) { if (IsOptional && !IsRepeating) { if (!HasContent(context)) return false; } foreach (var parser in Children.Cast<IParser>()) { parser.Marshal(context); } return true; } /// <summary> /// Returns whether this parser or any of its descendant have content for marshalling. /// </summary> /// <param name="context">The <see cref="ParsingContext"/></param> /// <returns>true if there is content for marshalling, false otherwise</returns> public override bool HasContent(ParsingContext context) { if (Property != null) return !ReferenceEquals(Property.GetValue(context), Value.Missing); return Children.Cast<IParser>().Any(x => x.HasContent(context)); } /// <summary> /// Clears the current property value. /// </summary> /// <param name="context">The <see cref="ParsingContext"/></param> public override void ClearValue(ParsingContext context) { Property?.ClearValue(context); } /// <summary> /// Sets the property value for marshaling. /// </summary> /// <param name="context">The <see cref="ParsingContext"/></param> /// <param name="value">the property value</param> public override void SetValue(ParsingContext context, object value) { Property?.SetValue(context, value); } /// <summary> /// Returns the unmarshalled property value. /// </summary> /// <param name="context">The <see cref="ParsingContext"/></param> /// <returns>the property value</returns> public override object GetValue(ParsingContext context) { return Property?.GetValue(context); } /// <summary> /// Called by a stream to register variables stored in the parsing context. /// </summary> /// <remarks> /// This method should be overridden by subclasses that need to register /// one or more parser context variables. /// </remarks> /// <param name="locals">set of local variables</param> public override void RegisterLocals(ISet<IParserLocal> locals) { ((Component)Property)?.RegisterLocals(locals); if (locals.Add(_missing)) base.RegisterLocals(locals); } /// <summary> /// Sets the size of a single occurrence of this element, which is used to offset /// field positions for repeating segments and fields. /// </summary> /// <param name="size">the size of a single occurrence of this element</param> public void SetSize(int size) { _size = size; } /// <summary> /// Sets a value indicating whether this node must exist during unmarshalling. /// </summary> /// <param name="optional">a value indicating whether this node must exist during unmarshalling</param> public void SetOptional(bool optional) { _optional = optional; } /// <summary> /// Called by <see cref="TreeNode{T}.ToString"/> to append node parameters to the output /// </summary> /// <param name="s">The output to append</param> protected override void ToParamString(StringBuilder s) { base.ToParamString(s); s .AppendFormat(", size={0}", Size == null || Size == int.MaxValue ? "unbounded" : Size.ToString()) .AppendFormat(", {0}", DebugUtil.FormatOption("rid", IsIdentifier)) .AppendFormat(", {0}", DebugUtil.FormatOption("repeating", IsRepeating)) .AppendFormat(", {0}", DebugUtil.FormatOption("optional", IsOptional)); if (Property != null) { if (Property is Field) { s.AppendFormat(", property=${0}", Property.Name); } else { s.AppendFormat(", property={0}", Property); } } } } }
// **************************************************************** // Copyright 2009, Charlie Poole // This is free software licensed under the NUnit license. You may // obtain a copy of the license at http://nunit.org // **************************************************************** using System; using System.Collections; namespace NUnit.Framework.Constraints { /// <summary> /// ConstraintExpression represents a compound constraint in the /// process of being constructed from a series of syntactic elements. /// /// Individual elements are appended to the expression as they are /// reognized. Once an actual Constraint is appended, the expression /// returns a resolvable Constraint. /// </summary> public class ConstraintExpression : ConstraintExpressionBase { /// <summary> /// Initializes a new instance of the <see cref="T:ConstraintExpression"/> class. /// </summary> public ConstraintExpression() { } /// <summary> /// Initializes a new instance of the <see cref="T:ConstraintExpression"/> /// class passing in a ConstraintBuilder, which may be pre-populated. /// </summary> /// <param name="builder">The builder.</param> public ConstraintExpression(ConstraintBuilder builder) : base( builder ) { } #region Not /// <summary> /// Returns a ConstraintExpression that negates any /// following constraint. /// </summary> public ConstraintExpression Not { get { return this.Append(new NotOperator()); } } /// <summary> /// Returns a ConstraintExpression that negates any /// following constraint. /// </summary> public ConstraintExpression No { get { return this.Append(new NotOperator()); } } #endregion #region All /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if all of them succeed. /// </summary> public ConstraintExpression All { get { return this.Append(new AllOperator()); } } #endregion #region Some /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if at least one of them succeeds. /// </summary> public ConstraintExpression Some { get { return this.Append(new SomeOperator()); } } #endregion #region None /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if all of them fail. /// </summary> public ConstraintExpression None { get { return this.Append(new NoneOperator()); } } #endregion #region Exactly(n) /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding only if a specified number of them succeed. /// </summary> public ConstraintExpression Exactly(int expectedCount) { return this.Append(new ExactCountOperator(expectedCount)); } #endregion #region Property /// <summary> /// Returns a new PropertyConstraintExpression, which will either /// test for the existence of the named property on the object /// being tested or apply any following constraint to that property. /// </summary> public ResolvableConstraintExpression Property(string name) { return this.Append(new PropOperator(name)); } #endregion #region Length /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Length property of the object being tested. /// </summary> public ResolvableConstraintExpression Length { get { return Property("Length"); } } #endregion #region Count /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Count property of the object being tested. /// </summary> public ResolvableConstraintExpression Count { get { return Property("Count"); } } #endregion #region Message /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Message property of the object being tested. /// </summary> public ResolvableConstraintExpression Message { get { return Property("Message"); } } #endregion #region InnerException /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the InnerException property of the object being tested. /// </summary> public ResolvableConstraintExpression InnerException { get { return Property("InnerException"); } } #endregion #region With /// <summary> /// With is currently a NOP - reserved for future use. /// </summary> public ConstraintExpression With { get { return this.Append(new WithOperator()); } } #endregion #region Matches /// <summary> /// Returns the constraint provided as an argument - used to allow custom /// custom constraints to easily participate in the syntax. /// </summary> public Constraint Matches(Constraint constraint) { return this.Append(constraint); } #if CLR_2_0 || CLR_4_0 /// <summary> /// Returns the constraint provided as an argument - used to allow custom /// custom constraints to easily participate in the syntax. /// </summary> public Constraint Matches<T>(Predicate<T> predicate) { return this.Append(new PredicateConstraint<T>(predicate)); } #endif #endregion #region Null /// <summary> /// Returns a constraint that tests for null /// </summary> public NullConstraint Null { get { return (NullConstraint)this.Append(new NullConstraint()); } } #endregion #region True /// <summary> /// Returns a constraint that tests for True /// </summary> public TrueConstraint True { get { return (TrueConstraint)this.Append(new TrueConstraint()); } } #endregion #region False /// <summary> /// Returns a constraint that tests for False /// </summary> public FalseConstraint False { get { return (FalseConstraint)this.Append(new FalseConstraint()); } } #endregion #region Positive /// <summary> /// Returns a constraint that tests for a positive value /// </summary> public GreaterThanConstraint Positive { get { return (GreaterThanConstraint)this.Append(new GreaterThanConstraint(0)); } } #endregion #region Negative /// <summary> /// Returns a constraint that tests for a negative value /// </summary> public LessThanConstraint Negative { get { return (LessThanConstraint)this.Append(new LessThanConstraint(0)); } } #endregion #region NaN /// <summary> /// Returns a constraint that tests for NaN /// </summary> public NaNConstraint NaN { get { return (NaNConstraint)this.Append(new NaNConstraint()); } } #endregion #region Empty /// <summary> /// Returns a constraint that tests for empty /// </summary> public EmptyConstraint Empty { get { return (EmptyConstraint)this.Append(new EmptyConstraint()); } } #endregion #region Unique /// <summary> /// Returns a constraint that tests whether a collection /// contains all unique items. /// </summary> public UniqueItemsConstraint Unique { get { return (UniqueItemsConstraint)this.Append(new UniqueItemsConstraint()); } } #endregion #region EqualTo /// <summary> /// Returns a constraint that tests two items for equality /// </summary> public EqualConstraint EqualTo(object expected) { return (EqualConstraint)this.Append(new EqualConstraint(expected)); } #endregion #region SameAs /// <summary> /// Returns a constraint that tests that two references are the same object /// </summary> public SameAsConstraint SameAs(object expected) { return (SameAsConstraint)this.Append(new SameAsConstraint(expected)); } #endregion #region GreaterThan /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than the suppled argument /// </summary> public GreaterThanConstraint GreaterThan(object expected) { return (GreaterThanConstraint)this.Append(new GreaterThanConstraint(expected)); } #endregion #region GreaterThanOrEqualTo /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than or equal to the suppled argument /// </summary> public GreaterThanOrEqualConstraint GreaterThanOrEqualTo(object expected) { return (GreaterThanOrEqualConstraint)this.Append(new GreaterThanOrEqualConstraint(expected)); } /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than or equal to the suppled argument /// </summary> public GreaterThanOrEqualConstraint AtLeast(object expected) { return (GreaterThanOrEqualConstraint)this.Append(new GreaterThanOrEqualConstraint(expected)); } #endregion #region LessThan /// <summary> /// Returns a constraint that tests whether the /// actual value is less than the suppled argument /// </summary> public LessThanConstraint LessThan(object expected) { return (LessThanConstraint)this.Append(new LessThanConstraint(expected)); } #endregion #region LessThanOrEqualTo /// <summary> /// Returns a constraint that tests whether the /// actual value is less than or equal to the suppled argument /// </summary> public LessThanOrEqualConstraint LessThanOrEqualTo(object expected) { return (LessThanOrEqualConstraint)this.Append(new LessThanOrEqualConstraint(expected)); } /// <summary> /// Returns a constraint that tests whether the /// actual value is less than or equal to the suppled argument /// </summary> public LessThanOrEqualConstraint AtMost(object expected) { return (LessThanOrEqualConstraint)this.Append(new LessThanOrEqualConstraint(expected)); } #endregion #region TypeOf /// <summary> /// Returns a constraint that tests whether the actual /// value is of the exact type supplied as an argument. /// </summary> public ExactTypeConstraint TypeOf(Type expectedType) { return (ExactTypeConstraint)this.Append(new ExactTypeConstraint(expectedType)); } #if CLR_2_0 || CLR_4_0 /// <summary> /// Returns a constraint that tests whether the actual /// value is of the exact type supplied as an argument. /// </summary> public ExactTypeConstraint TypeOf<T>() { return (ExactTypeConstraint)this.Append(new ExactTypeConstraint(typeof(T))); } #endif #endregion #region InstanceOf /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> public InstanceOfTypeConstraint InstanceOf(Type expectedType) { return (InstanceOfTypeConstraint)this.Append(new InstanceOfTypeConstraint(expectedType)); } #if CLR_2_0 || CLR_4_0 /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> public InstanceOfTypeConstraint InstanceOf<T>() { return (InstanceOfTypeConstraint)this.Append(new InstanceOfTypeConstraint(typeof(T))); } #endif /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> [Obsolete("Use InstanceOf(expectedType)")] public InstanceOfTypeConstraint InstanceOfType(Type expectedType) { return (InstanceOfTypeConstraint)this.Append(new InstanceOfTypeConstraint(expectedType)); } #if CLR_2_0 || CLR_4_0 /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> [Obsolete("Use InstanceOf<T>()")] public InstanceOfTypeConstraint InstanceOfType<T>() { return (InstanceOfTypeConstraint)this.Append(new InstanceOfTypeConstraint(typeof(T))); } #endif #endregion #region AssignableFrom /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableFromConstraint AssignableFrom(Type expectedType) { return (AssignableFromConstraint)this.Append(new AssignableFromConstraint(expectedType)); } #if CLR_2_0 || CLR_4_0 /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableFromConstraint AssignableFrom<T>() { return (AssignableFromConstraint)this.Append(new AssignableFromConstraint(typeof(T))); } #endif #endregion #region AssignableTo /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableToConstraint AssignableTo(Type expectedType) { return (AssignableToConstraint)this.Append(new AssignableToConstraint(expectedType)); } #if CLR_2_0 || CLR_4_0 /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableToConstraint AssignableTo<T>() { return (AssignableToConstraint)this.Append(new AssignableToConstraint(typeof(T))); } #endif #endregion #region EquivalentTo /// <summary> /// Returns a constraint that tests whether the actual value /// is a collection containing the same elements as the /// collection supplied as an argument. /// </summary> public CollectionEquivalentConstraint EquivalentTo(IEnumerable expected) { return (CollectionEquivalentConstraint)this.Append(new CollectionEquivalentConstraint(expected)); } #endregion #region SubsetOf /// <summary> /// Returns a constraint that tests whether the actual value /// is a subset of the collection supplied as an argument. /// </summary> public CollectionSubsetConstraint SubsetOf(IEnumerable expected) { return (CollectionSubsetConstraint)this.Append(new CollectionSubsetConstraint(expected)); } #endregion #region Ordered /// <summary> /// Returns a constraint that tests whether a collection is ordered /// </summary> public CollectionOrderedConstraint Ordered { get { return (CollectionOrderedConstraint)this.Append(new CollectionOrderedConstraint()); } } #endregion #region Member /// <summary> /// Returns a new CollectionContainsConstraint checking for the /// presence of a particular object in the collection. /// </summary> public CollectionContainsConstraint Member(object expected) { return (CollectionContainsConstraint)this.Append(new CollectionContainsConstraint(expected)); } /// <summary> /// Returns a new CollectionContainsConstraint checking for the /// presence of a particular object in the collection. /// </summary> public CollectionContainsConstraint Contains(object expected) { return (CollectionContainsConstraint)this.Append(new CollectionContainsConstraint(expected)); } #endregion #region Contains /// <summary> /// Returns a new ContainsConstraint. This constraint /// will, in turn, make use of the appropriate second-level /// constraint, depending on the type of the actual argument. /// This overload is only used if the item sought is a string, /// since any other type implies that we are looking for a /// collection member. /// </summary> public ContainsConstraint Contains(string expected) { return (ContainsConstraint)this.Append(new ContainsConstraint(expected)); } #endregion #region StringContaining /// <summary> /// Returns a constraint that succeeds if the actual /// value contains the substring supplied as an argument. /// </summary> public SubstringConstraint StringContaining(string expected) { return (SubstringConstraint)this.Append(new SubstringConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value contains the substring supplied as an argument. /// </summary> public SubstringConstraint ContainsSubstring(string expected) { return (SubstringConstraint)this.Append(new SubstringConstraint(expected)); } #endregion #region StartsWith /// <summary> /// Returns a constraint that succeeds if the actual /// value starts with the substring supplied as an argument. /// </summary> public StartsWithConstraint StartsWith(string expected) { return (StartsWithConstraint)this.Append(new StartsWithConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value starts with the substring supplied as an argument. /// </summary> public StartsWithConstraint StringStarting(string expected) { return (StartsWithConstraint)this.Append(new StartsWithConstraint(expected)); } #endregion #region EndsWith /// <summary> /// Returns a constraint that succeeds if the actual /// value ends with the substring supplied as an argument. /// </summary> public EndsWithConstraint EndsWith(string expected) { return (EndsWithConstraint)this.Append(new EndsWithConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value ends with the substring supplied as an argument. /// </summary> public EndsWithConstraint StringEnding(string expected) { return (EndsWithConstraint)this.Append(new EndsWithConstraint(expected)); } #endregion #region Matches /// <summary> /// Returns a constraint that succeeds if the actual /// value matches the Regex pattern supplied as an argument. /// </summary> public RegexConstraint Matches(string pattern) { return (RegexConstraint)this.Append(new RegexConstraint(pattern)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value matches the Regex pattern supplied as an argument. /// </summary> public RegexConstraint StringMatching(string pattern) { return (RegexConstraint)this.Append(new RegexConstraint(pattern)); } #endregion #region InRange #if !CLR_2_0 && !CLR_4_0 /// <summary> /// Returns a constraint that tests whether the actual value falls /// within a specified range. /// </summary> public RangeConstraint InRange(IComparable from, IComparable to) { return (RangeConstraint)this.Append(new RangeConstraint(from, to)); } #endif #endregion #region InRange<T> #if CLR_2_0 || CLR_4_0 /// <summary> /// Returns a constraint that tests whether the actual value falls /// within a specified range. /// </summary> public RangeConstraint<T> InRange<T>(T from, T to) where T : IComparable<T> { return (RangeConstraint<T>)this.Append(new RangeConstraint<T>(from, to)); } #endif #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** Class: EqualityComparer<T> ** ===========================================================*/ using System; using System.Collections; namespace System.Collections.Generic { public abstract class EqualityComparer<T> : IEqualityComparer, IEqualityComparer<T> { protected EqualityComparer() { } public static EqualityComparer<T> Default { get { if (_default == null) { object comparer; // NUTC compiler is able to static evalulate the conditions and only put the necessary branches in finally binary code, // even casting to EqualityComparer<T> can be removed. // For example: for Byte, the code generated is // if (_default == null) _default = new EqualityComparerForByte(); return _default; // For classes, due to generic sharing, the code generated is: // if (_default == null) { if (handle == typeof(string).RuntimeTypeHandle) comparer = new EqualityComparerForString(); else comparer = new LastResortEqalityComparer<T>; ... if (typeof(T) == typeof(SByte)) comparer = new EqualityComparerForSByte(); else if (typeof(T) == typeof(Byte)) comparer = new EqualityComparerForByte(); else if (typeof(T) == typeof(Int16)) comparer = new EqualityComparerForInt16(); else if (typeof(T) == typeof(UInt16)) comparer = new EqualityComparerForUInt16(); else if (typeof(T) == typeof(Int32)) comparer = new EqualityComparerForInt32(); else if (typeof(T) == typeof(UInt32)) comparer = new EqualityComparerForUInt32(); else if (typeof(T) == typeof(Int64)) comparer = new EqualityComparerForInt64(); else if (typeof(T) == typeof(UInt64)) comparer = new EqualityComparerForUInt64(); else if (typeof(T) == typeof(IntPtr)) comparer = new EqualityComparerForIntPtr(); else if (typeof(T) == typeof(UIntPtr)) comparer = new EqualityComparerForUIntPtr(); else if (typeof(T) == typeof(Single)) comparer = new EqualityComparerForSingle(); else if (typeof(T) == typeof(Double)) comparer = new EqualityComparerForDouble(); else if (typeof(T) == typeof(Decimal)) comparer = new EqualityComparerForDecimal(); else if (typeof(T) == typeof(String)) comparer = new EqualityComparerForString(); else comparer = new LastResortEqualityComparer<T>(); _default = (EqualityComparer<T>)comparer; } return _default; } } private static volatile EqualityComparer<T> _default; public abstract bool Equals(T x, T y); public abstract int GetHashCode(T obj); int IEqualityComparer.GetHashCode(object obj) { if (obj == null) return 0; if (obj is T) return GetHashCode((T)obj); throw new ArgumentException(SR.Argument_InvalidArgumentForComparison); } bool IEqualityComparer.Equals(object x, object y) { if (x == y) return true; if (x == null || y == null) return false; if ((x is T) && (y is T)) return Equals((T)x, (T)y); throw new ArgumentException(SR.Argument_InvalidArgumentForComparison); } } // // ProjectN compatiblity notes: // // Unlike the full desktop, we make no attempt to use the IEquatable<T> interface on T. Because we can't generate // code at runtime, we derive no performance benefit from using the type-specific Equals(). We can't even // perform the check for IEquatable<> at the time the type-specific constructor is created (due to the removable of Type.IsAssignableFrom). // We would thus be incurring an interface cast check on each call to Equals() for no performance gain. // // This should not cause a compat problem unless some type implements an IEquatable.Equals() that is semantically // incompatible with Object.Equals(). That goes specifically against the documented guidelines (and would in any case, // break any hashcode-dependent collection.) // internal sealed class LastResortEqualityComparer<T> : EqualityComparer<T> { public LastResortEqualityComparer() { } public sealed override bool Equals(T x, T y) { if (x == null) return y == null; if (y == null) return false; return x.Equals(y); } public sealed override int GetHashCode(T obj) { if (obj == null) return 0; return obj.GetHashCode(); } } internal sealed class EqualityComparerForSByte : EqualityComparer<SByte> { public override bool Equals(SByte x, SByte y) { return x == y; } public override int GetHashCode(SByte x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForByte : EqualityComparer<Byte> { public override bool Equals(Byte x, Byte y) { return x == y; } public override int GetHashCode(Byte x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForInt16 : EqualityComparer<Int16> { public override bool Equals(Int16 x, Int16 y) { return x == y; } public override int GetHashCode(Int16 x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForUInt16 : EqualityComparer<UInt16> { public override bool Equals(UInt16 x, UInt16 y) { return x == y; } public override int GetHashCode(UInt16 x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForInt32 : EqualityComparer<Int32> { public override bool Equals(Int32 x, Int32 y) { return x == y; } public override int GetHashCode(Int32 x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForUInt32 : EqualityComparer<UInt32> { public override bool Equals(UInt32 x, UInt32 y) { return x == y; } public override int GetHashCode(UInt32 x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForInt64 : EqualityComparer<Int64> { public override bool Equals(Int64 x, Int64 y) { return x == y; } public override int GetHashCode(Int64 x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForUInt64 : EqualityComparer<UInt64> { public override bool Equals(UInt64 x, UInt64 y) { return x == y; } public override int GetHashCode(UInt64 x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForIntPtr : EqualityComparer<IntPtr> { public override bool Equals(IntPtr x, IntPtr y) { return x == y; } public override int GetHashCode(IntPtr x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForUIntPtr : EqualityComparer<UIntPtr> { public override bool Equals(UIntPtr x, UIntPtr y) { return x == y; } public override int GetHashCode(UIntPtr x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForSingle : EqualityComparer<Single> { public override bool Equals(Single x, Single y) { // == has the wrong semantic for NaN for Single return x.Equals(y); } public override int GetHashCode(Single x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForDouble : EqualityComparer<Double> { public override bool Equals(Double x, Double y) { // == has the wrong semantic for NaN for Double return x.Equals(y); } public override int GetHashCode(Double x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForDecimal : EqualityComparer<Decimal> { public override bool Equals(Decimal x, Decimal y) { return x == y; } public override int GetHashCode(Decimal x) { return x.GetHashCode(); } } internal sealed class EqualityComparerForString : EqualityComparer<String> { public override bool Equals(String x, String y) { return x == y; } public override int GetHashCode(String x) { if (x == null) return 0; return x.GetHashCode(); } } }
/////////////////////////////////////////////////////////////////////////////////////////////// // // This File is Part of the CallButler Open Source PBX (http://www.codeplex.com/callbutler // // Copyright (c) 2005-2008, Jim Heising // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation and/or // other materials provided with the distribution. // // * Neither the name of Jim Heising nor the names of its contributors may be // used to endorse or promote products derived from this software without specific prior // written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, // INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT // NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. // /////////////////////////////////////////////////////////////////////////////////////////////// /*===================================================================== File: PipeClientChannel.cs =====================================================================*/ using System; using System.Collections; using System.IO; using System.Reflection; using System.Runtime.Remoting; using System.Runtime.Remoting.Messaging; using System.Runtime.Remoting.Channels; using System.Threading; namespace NET.Remoting { public class PipeClientChannel: IChannelSender { // pipe:// prefix private const String ChannelScheme = "pipe"; private const int DefaultChannelPriority=1; private const String DefaultChannelName = "Pipe"; private int m_ChannelPriority; private String m_ChannelName; private String m_pipeName = null; private IClientChannelSinkProvider _clientSinkProvider; // client sink chain provider public PipeClientChannel() { InitDefaults(); InitProperties(null); InitProviders(null); } public PipeClientChannel( IDictionary properties, IClientChannelSinkProvider clientProviderChain ) { InitDefaults(); InitProperties(properties); InitProviders(clientProviderChain); } internal void InitDefaults() { m_ChannelPriority = DefaultChannelPriority; m_ChannelName = DefaultChannelName; } internal void InitProperties(IDictionary properties) { if(properties != null) { foreach (DictionaryEntry entry in properties) { switch ((String) entry.Key) { case "name": m_ChannelName = (String) entry.Value; break; case "priority": m_ChannelPriority = Convert.ToInt32(entry.Value); break; case "pipe": m_pipeName = (String) entry.Value; break; } } } } void InitProviders(IClientChannelSinkProvider clientProviderChain) { _clientSinkProvider = clientProviderChain; if(_clientSinkProvider == null) { _clientSinkProvider = new BinaryClientFormatterSinkProvider(); } IClientChannelSinkProvider tempSinkProvider = _clientSinkProvider; // Move to the end of provider list while (tempSinkProvider.Next != null) tempSinkProvider = tempSinkProvider.Next; // Append transport sink provider to end tempSinkProvider.Next = new PipeClientTransportSinkProvider(); } // IChannel public String ChannelName { get { return(m_ChannelName); } } public int ChannelPriority { get { return(m_ChannelPriority); } } public String Parse(String url, out string uri) { return(PipeConnection.Parse(url, out uri)); } // IChannelSender public IMessageSink CreateMessageSink(String url, Object data, out String objuri) { DBG.Info(null, "CreateMessageSink: url = " + url); // Set the out parameters objuri = null; String chanuri = null; if (url != null) // Is this a well known object? { /* String urlCompare = String.ToLower(url); // Starts with pipe:// ? if (urlCompare.StartsWith(ChannelScheme) == false) { return null; } */ // Parse returns null if this is not one of the pipe channel url's chanuri = Parse(url, out objuri); } else if(data != null) { IChannelDataStore cds = data as IChannelDataStore; if(cds != null) { DBG.Info(null, "ChannelUris[0] = " + cds.ChannelUris[0]); //Console.WriteLine("Channel Uri {0}", cds.ChannelUris[0]); chanuri = Parse(cds.ChannelUris[0], out objuri); DBG.Info(null, "CreateMessageSink: chanuri = " + chanuri + ", objuri = " + objuri); if(chanuri != null) url = cds.ChannelUris[0]; } } if (null != chanuri) { if (url == null) url = chanuri; DBG.Info(null, "CreateMessageSink: delegating w/ url = " + url); //Console.WriteLine("CreateMessageSink: delegating w/ url = {0}", url); return (IMessageSink)_clientSinkProvider.CreateSink(this, url, data); } DBG.Info(null, "CreateMessageSink: ignoring request..."); return null; } // CreateMessageSink public void Dispose() { // Nothing to do } } internal class PipeClientTransportSinkProvider : IClientChannelSinkProvider { internal PipeClientTransportSinkProvider() { } public IClientChannelSink CreateSink(IChannelSender channel, String url, Object data) { return new PipeClientTransportSink(url); } public IClientChannelSinkProvider Next { get { return null; } set { throw new NotSupportedException(); } } } internal class PipeClientTransportSink : IClientChannelSink { private String _pipeName; private PipeConnectionPool _pipeConnectionPool=null; private int _defaultRetryCount = 2; private WaitCallback callback; internal PipeClientTransportSink(String url) { String objuri = null; String chanuri = PipeConnection.Parse(url, out objuri); DBG.Info(null, "PipeClientTransportSink: creating pipe on uri: " + chanuri); //Console.WriteLine("PipeClientTransportSink {0}", chanuri); _pipeName = chanuri; _pipeConnectionPool = PipeConnectionPoolManager.LookupPool(_pipeName); callback = new WaitCallback(this.ReceiveCallback); } public IDictionary Properties { get { return(null); } } public void AsyncProcessRequest(IClientChannelSinkStack stack, IMessage msg, ITransportHeaders headers, Stream stream) { DBG.Info(null, "Async: Send the message across the pipe"); PipeConnection _pipe = SendWithRetry(msg, headers, stream); IMethodCallMessage mcm = (IMethodCallMessage)msg; MethodBase methodBase = mcm.MethodBase; bool oneway = RemotingServices.IsOneWay(methodBase); if (oneway) { if (_pipeConnectionPool != null) { _pipeConnectionPool.ReturnToPool(_pipe); } _pipe = null; } else { PipeConnectionCookie cookie = new PipeConnectionCookie(); cookie.pipe = _pipe; cookie.sinkStack = stack; //TODO Switch to use Completion port ThreadPool.QueueUserWorkItem(callback, cookie); } } private void ReceiveCallback(Object state) { //Console.WriteLine("ReceiveCallback TID {0}", Thread.CurrentThread.GetHashCode()); PipeConnectionCookie cookie = (PipeConnectionCookie)state; PipeConnection _pipe = cookie.pipe; IClientChannelSinkStack sinkStack = cookie.sinkStack; try { // Read response // _pipe.BeginReadMessage(); ITransportHeaders responseHeaders = _pipe.ReadHeaders(); responseHeaders["__CustomErrorsEnabled"] = false; Stream responseStream = _pipe.ReadStream(); _pipe.EndReadMessage(); if (_pipeConnectionPool != null) { _pipeConnectionPool.ReturnToPool(_pipe); } _pipe = null; sinkStack.AsyncProcessResponse(responseHeaders, responseStream); } catch (Exception e) { try { if (sinkStack != null) sinkStack.DispatchException(e); } catch(Exception ) { // Fatal Error.. ignore } } } // ReceiveCallback public void AsyncProcessResponse(IClientResponseChannelSinkStack stack, Object obj, ITransportHeaders headers, Stream stream) { throw new NotSupportedException(); } public Stream GetRequestStream(IMessage msg, ITransportHeaders headers) { // we don't do any serialization here. return(null); } public void ProcessMessage(IMessage msg, ITransportHeaders reqHead, Stream reqStm, out ITransportHeaders respHead, out Stream respStm) { DBG.Info(null, "Being asked to process the serialized message!"); //Console.WriteLine("ProcessMessage TID {0}", Thread.CurrentThread.GetHashCode()); // Send the message across the pipe. PipeConnection _pipe = SendWithRetry(msg, reqHead, reqStm); respHead = null; respStm = null; // Read response if (_pipe != null) { _pipe.BeginReadMessage(); respHead = _pipe.ReadHeaders(); respHead["__CustomErrorsEnabled"] = false; respStm = _pipe.ReadStream(); _pipe.EndReadMessage(); if (_pipeConnectionPool != null) { _pipeConnectionPool.ReturnToPool(_pipe); } _pipe = null; } } IClientChannelSink IClientChannelSink.NextChannelSink { get { return(null); } } public PipeConnection SendWithRetry(IMessage msg, ITransportHeaders reqHead, Stream reqStm) { IMethodCallMessage mcm = (IMethodCallMessage)msg; String uri = mcm.Uri; PipeConnection _pipe = null; int tryCount = _defaultRetryCount; long reqStmPosition = -1; if (reqStm.CanSeek == false) tryCount=1; else reqStmPosition = reqStm.Position; while ( tryCount>0) { try { if (_pipeConnectionPool != null) { DBG.Info(null, "Look in pipe connection in pool"); _pipe = (PipeConnection)_pipeConnectionPool.Obtain(); } // otherwise create a new connection if (_pipe == null) { _pipe = new PipeConnection(_pipeName, false, IntPtr.Zero); } //Send with Retry _pipe.BeginWriteMessage(); _pipe.WriteHeaders(uri, reqHead); _pipe.Write(reqStm); _pipe.EndWriteMessage(); tryCount=0; } catch(PipeIOException pe) { pe=pe; if (_pipe != null) { _pipe.Dispose(); _pipe = null; } tryCount--; reqStm.Position = reqStmPosition; } } return _pipe; } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: ContactUpdate.proto #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace TeamNote.Protocol { /// <summary>Holder for reflection information generated from ContactUpdate.proto</summary> public static partial class ContactUpdateReflection { #region Descriptor /// <summary>File descriptor for ContactUpdate.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static ContactUpdateReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "ChNDb250YWN0VXBkYXRlLnByb3RvIo4BCg1Db250YWN0VXBkYXRlEiIKA0Fk", "ZBgBIAMoCzIVLkNvbnRhY3RVcGRhdGUuQ2xpZW50Eg4KBlJlbW92ZRgCIAMo", "AxpJCgZDbGllbnQSEAoIQ2xpZW50SWQYASABKAMSDgoGT25saW5lGAIgASgI", "EgwKBE5hbWUYAyABKAkSDwoHU3VybmFtZRgEIAEoCUIWSAGqAhFUZWFtTm90", "ZS5Qcm90b2NvbGIGcHJvdG8z")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::TeamNote.Protocol.ContactUpdate), global::TeamNote.Protocol.ContactUpdate.Parser, new[]{ "Add", "Remove" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::TeamNote.Protocol.ContactUpdate.Types.Client), global::TeamNote.Protocol.ContactUpdate.Types.Client.Parser, new[]{ "ClientId", "Online", "Name", "Surname" }, null, null, null)}) })); } #endregion } #region Messages public sealed partial class ContactUpdate : pb::IMessage<ContactUpdate> { private static readonly pb::MessageParser<ContactUpdate> _parser = new pb::MessageParser<ContactUpdate>(() => new ContactUpdate()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<ContactUpdate> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::TeamNote.Protocol.ContactUpdateReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public ContactUpdate() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public ContactUpdate(ContactUpdate other) : this() { add_ = other.add_.Clone(); remove_ = other.remove_.Clone(); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public ContactUpdate Clone() { return new ContactUpdate(this); } /// <summary>Field number for the "Add" field.</summary> public const int AddFieldNumber = 1; private static readonly pb::FieldCodec<global::TeamNote.Protocol.ContactUpdate.Types.Client> _repeated_add_codec = pb::FieldCodec.ForMessage(10, global::TeamNote.Protocol.ContactUpdate.Types.Client.Parser); private readonly pbc::RepeatedField<global::TeamNote.Protocol.ContactUpdate.Types.Client> add_ = new pbc::RepeatedField<global::TeamNote.Protocol.ContactUpdate.Types.Client>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<global::TeamNote.Protocol.ContactUpdate.Types.Client> Add { get { return add_; } } /// <summary>Field number for the "Remove" field.</summary> public const int RemoveFieldNumber = 2; private static readonly pb::FieldCodec<long> _repeated_remove_codec = pb::FieldCodec.ForInt64(18); private readonly pbc::RepeatedField<long> remove_ = new pbc::RepeatedField<long>(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public pbc::RepeatedField<long> Remove { get { return remove_; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as ContactUpdate); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(ContactUpdate other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if(!add_.Equals(other.add_)) return false; if(!remove_.Equals(other.remove_)) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; hash ^= add_.GetHashCode(); hash ^= remove_.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { add_.WriteTo(output, _repeated_add_codec); remove_.WriteTo(output, _repeated_remove_codec); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; size += add_.CalculateSize(_repeated_add_codec); size += remove_.CalculateSize(_repeated_remove_codec); return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(ContactUpdate other) { if (other == null) { return; } add_.Add(other.add_); remove_.Add(other.remove_); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 10: { add_.AddEntriesFrom(input, _repeated_add_codec); break; } case 18: case 16: { remove_.AddEntriesFrom(input, _repeated_remove_codec); break; } } } } #region Nested types /// <summary>Container for nested types declared in the ContactUpdate message type.</summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static partial class Types { public sealed partial class Client : pb::IMessage<Client> { private static readonly pb::MessageParser<Client> _parser = new pb::MessageParser<Client>(() => new Client()); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pb::MessageParser<Client> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public static pbr::MessageDescriptor Descriptor { get { return global::TeamNote.Protocol.ContactUpdate.Descriptor.NestedTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Client() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Client(Client other) : this() { clientId_ = other.clientId_; online_ = other.online_; name_ = other.name_; surname_ = other.surname_; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public Client Clone() { return new Client(this); } /// <summary>Field number for the "ClientId" field.</summary> public const int ClientIdFieldNumber = 1; private long clientId_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public long ClientId { get { return clientId_; } set { clientId_ = value; } } /// <summary>Field number for the "Online" field.</summary> public const int OnlineFieldNumber = 2; private bool online_; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Online { get { return online_; } set { online_ = value; } } /// <summary>Field number for the "Name" field.</summary> public const int NameFieldNumber = 3; private string name_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "Surname" field.</summary> public const int SurnameFieldNumber = 4; private string surname_ = ""; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public string Surname { get { return surname_; } set { surname_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override bool Equals(object other) { return Equals(other as Client); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public bool Equals(Client other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (ClientId != other.ClientId) return false; if (Online != other.Online) return false; if (Name != other.Name) return false; if (Surname != other.Surname) return false; return true; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override int GetHashCode() { int hash = 1; if (ClientId != 0L) hash ^= ClientId.GetHashCode(); if (Online != false) hash ^= Online.GetHashCode(); if (Name.Length != 0) hash ^= Name.GetHashCode(); if (Surname.Length != 0) hash ^= Surname.GetHashCode(); return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void WriteTo(pb::CodedOutputStream output) { if (ClientId != 0L) { output.WriteRawTag(8); output.WriteInt64(ClientId); } if (Online != false) { output.WriteRawTag(16); output.WriteBool(Online); } if (Name.Length != 0) { output.WriteRawTag(26); output.WriteString(Name); } if (Surname.Length != 0) { output.WriteRawTag(34); output.WriteString(Surname); } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public int CalculateSize() { int size = 0; if (ClientId != 0L) { size += 1 + pb::CodedOutputStream.ComputeInt64Size(ClientId); } if (Online != false) { size += 1 + 1; } if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } if (Surname.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Surname); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(Client other) { if (other == null) { return; } if (other.ClientId != 0L) { ClientId = other.ClientId; } if (other.Online != false) { Online = other.Online; } if (other.Name.Length != 0) { Name = other.Name; } if (other.Surname.Length != 0) { Surname = other.Surname; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] public void MergeFrom(pb::CodedInputStream input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: input.SkipLastField(); break; case 8: { ClientId = input.ReadInt64(); break; } case 16: { Online = input.ReadBool(); break; } case 26: { Name = input.ReadString(); break; } case 34: { Surname = input.ReadString(); break; } } } } } } #endregion } #endregion } #endregion Designer generated code
using System; using System.Collections; using System.Collections.Generic; using System.Reflection; namespace FileHelpers { /// <summary>Indicates the <see cref="ConverterKind"/> used for read/write operations.</summary> /// <remarks>See the <a href="http://www.filehelpers.net/mustread">Complete attributes list</a> for more information and examples of each one.</remarks> [AttributeUsage(AttributeTargets.Field | AttributeTargets.Property)] public sealed class FieldConverterAttribute : Attribute { #region " Constructors " /// <summary>Indicates the <see cref="ConverterKind"/> used for read/write operations. </summary> /// <param name="converter">The <see cref="ConverterKind"/> used for the transformations.</param> public FieldConverterAttribute(ConverterKind converter) : this(converter, new string[] {}) {} /// <summary>Indicates the <see cref="ConverterKind"/> used for read/write operations. </summary> /// <param name="converter">The <see cref="ConverterKind"/> used for the transformations.</param> /// <param name="arg1">The first param passed directly to the Converter Constructor.</param> public FieldConverterAttribute(ConverterKind converter, string arg1) : this(converter, new string[] {arg1}) {} /// <summary>Indicates the <see cref="ConverterKind"/> used for read/write operations. </summary> /// <param name="converter">The <see cref="ConverterKind"/> used for the transformations.</param> /// <param name="arg1">The first param passed directly to the Converter Constructor.</param> /// <param name="arg2">The second param passed directly to the Converter Constructor.</param> public FieldConverterAttribute(ConverterKind converter, string arg1, string arg2) : this(converter, new string[] {arg1, arg2}) {} /// <summary>Indicates the <see cref="ConverterKind"/> used for read/write operations. </summary> /// <param name="converter">The <see cref="ConverterKind"/> used for the transformations.</param> /// <param name="arg1">The first param passed directly to the Converter Constructor.</param> /// <param name="arg2">The second param passed directly to the Converter Constructor.</param> /// <param name="arg3">The third param passed directly to the Converter Constructor.</param> public FieldConverterAttribute(ConverterKind converter, string arg1, string arg2, string arg3) : this(converter, new string[] {arg1, arg2, arg3}) {} /// <summary> /// Indicates the <see cref="ConverterKind"/> used for read/write operations. /// </summary> /// <param name="converter">The <see cref="ConverterKind"/> used for the transformations.</param> /// <param name="args">An array of parameters passed directly to the Converter</param> private FieldConverterAttribute(ConverterKind converter, params string[] args) { Kind = converter; Type convType; switch (converter) { case ConverterKind.Date: convType = typeof (ConvertHelpers.DateTimeConverter); break; case ConverterKind.DateMultiFormat: convType = typeof (ConvertHelpers.DateTimeMultiFormatConverter); break; case ConverterKind.Byte: convType = typeof (ConvertHelpers.ByteConverter); break; case ConverterKind.SByte: convType = typeof (ConvertHelpers.SByteConverter); break; case ConverterKind.Int16: convType = typeof (ConvertHelpers.Int16Converter); break; case ConverterKind.Int32: convType = typeof (ConvertHelpers.Int32Converter); break; case ConverterKind.Int64: convType = typeof (ConvertHelpers.Int64Converter); break; case ConverterKind.UInt16: convType = typeof (ConvertHelpers.UInt16Converter); break; case ConverterKind.UInt32: convType = typeof (ConvertHelpers.UInt32Converter); break; case ConverterKind.UInt64: convType = typeof (ConvertHelpers.UInt64Converter); break; case ConverterKind.Decimal: convType = typeof (ConvertHelpers.DecimalConverter); break; case ConverterKind.Double: convType = typeof (ConvertHelpers.DoubleConverter); break; // Added by Shreyas Narasimhan 17 March 2010 case ConverterKind.PercentDouble: convType = typeof (ConvertHelpers.PercentDoubleConverter); break; case ConverterKind.Single: convType = typeof (ConvertHelpers.SingleConverter); break; case ConverterKind.Boolean: convType = typeof (ConvertHelpers.BooleanConverter); break; // Added by Alexander Obolonkov 2007.11.08 case ConverterKind.Char: convType = typeof (ConvertHelpers.CharConverter); break; // Added by Alexander Obolonkov 2007.11.08 case ConverterKind.Guid: convType = typeof (ConvertHelpers.GuidConverter); break; default: throw new BadUsageException("Converter '" + converter.ToString() + "' not found, you must specify a valid converter."); } //mType = type; CreateConverter(convType, args); } /// <summary>Indicates a custom <see cref="ConverterBase"/> implementation.</summary> /// <param name="customConverter">The Type of your custom converter.</param> /// <param name="arg1">The first param passed directly to the Converter Constructor.</param> public FieldConverterAttribute(Type customConverter, string arg1) : this(customConverter, new string[] {arg1}) {} /// <summary>Indicates a custom <see cref="ConverterBase"/> implementation.</summary> /// <param name="customConverter">The Type of your custom converter.</param> /// <param name="arg1">The first param passed directly to the Converter Constructor.</param> /// <param name="arg2">The second param passed directly to the Converter Constructor.</param> public FieldConverterAttribute(Type customConverter, string arg1, string arg2) : this(customConverter, new string[] {arg1, arg2}) {} /// <summary>Indicates a custom <see cref="ConverterBase"/> implementation.</summary> /// <param name="customConverter">The Type of your custom converter.</param> /// <param name="arg1">The first param passed directly to the Converter Constructor.</param> /// <param name="arg2">The second param passed directly to the Converter Constructor.</param> /// <param name="arg3">The third param passed directly to the Converter Constructor.</param> public FieldConverterAttribute(Type customConverter, string arg1, string arg2, string arg3) : this(customConverter, new string[] {arg1, arg2, arg3}) {} /// <summary>Indicates a custom <see cref="ConverterBase"/> implementation.</summary> /// <param name="customConverter">The Type of your custom converter.</param> /// <param name="args">A list of params passed directly to your converter constructor.</param> public FieldConverterAttribute(Type customConverter, params object[] args) { CreateConverter(customConverter, args); } /// <summary>Indicates a custom <see cref="ConverterBase"/> implementation.</summary> /// <param name="customConverter">The Type of your custom converter.</param> public FieldConverterAttribute(Type customConverter) { CreateConverter(customConverter, new object[] {}); } #endregion #region " Converter " /// <summary>The final concrete converter used for FieldToString and StringToField operations </summary> public ConverterBase Converter { get; private set; } /// <summary>The <see cref="ConverterKind"/> if a default converter is used </summary> public ConverterKind Kind { get; private set; } #endregion #region " CreateConverter " private void CreateConverter(Type convType, object[] args) { if (typeof (ConverterBase).IsAssignableFrom(convType)) { ConstructorInfo constructor; constructor = convType.GetConstructor( BindingFlags.Public | BindingFlags.Instance | BindingFlags.NonPublic, null, ArgsToTypes(args), null); if (constructor == null) { if (args.Length == 0) { throw new BadUsageException("Empty constructor for converter: " + convType.Name + " was not found. You must add a constructor without args (can be public or private)"); } else { throw new BadUsageException("Constructor for converter: " + convType.Name + " with these arguments: (" + ArgsDesc(args) + ") was not found. You must add a constructor with this signature (can be public or private)"); } } try { Converter = (ConverterBase) constructor.Invoke(args); } catch (TargetInvocationException ex) { throw ex.InnerException; } } else if (convType.IsEnum) Converter = new EnumConverter(convType); else throw new BadUsageException("The custom converter must inherit from ConverterBase"); } #endregion #region " ArgsToTypes " private static Type[] ArgsToTypes(object[] args) { if (args == null) { throw new BadUsageException( "The args to the constructor can be null, if you do not want to pass anything into them."); } var res = new Type[args.Length]; for (int i = 0; i < args.Length; i++) { if (args[i] == null) res[i] = typeof (object); else res[i] = args[i].GetType(); } return res; } private static string ArgsDesc(object[] args) { string res = DisplayType(args[0]); for (int i = 1; i < args.Length; i++) res += ", " + DisplayType(args[i]); return res; } private static string DisplayType(object o) { if (o == null) return "Object"; else return o.GetType().Name; } #endregion internal void ValidateTypes(FieldInfo fi) { bool valid = false; Type fieldType = fi.FieldType; if (fieldType.IsValueType && fieldType.IsGenericType && fieldType.GetGenericTypeDefinition() == typeof (Nullable<>)) fieldType = fieldType.GetGenericArguments()[0]; switch (Kind) { case ConverterKind.None: valid = true; break; case ConverterKind.Date: case ConverterKind.DateMultiFormat: valid = typeof (DateTime) == fieldType; break; case ConverterKind.Byte: case ConverterKind.SByte: case ConverterKind.Int16: case ConverterKind.Int32: case ConverterKind.Int64: case ConverterKind.UInt16: case ConverterKind.UInt32: case ConverterKind.UInt64: case ConverterKind.Decimal: case ConverterKind.Double: case ConverterKind.Single: case ConverterKind.Boolean: case ConverterKind.Char: case ConverterKind.Guid: valid = Kind.ToString() == fieldType.UnderlyingSystemType.Name; break; case ConverterKind.PercentDouble: valid = typeof (double) == fieldType; break; } if (valid == false) { throw new BadUsageException( "The converter of the field: '" + fi.Name + "' is wrong. The field is of Type: " + fieldType.Name + " and the converter is for type: " + Kind.ToString()); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** ** ** Purpose: Searches for resources in Assembly manifest, used ** for assembly-based resource lookup. ** ** ===========================================================*/ namespace System.Resources { using System; using System.Collections; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Reflection; using System.Runtime.InteropServices; using System.Runtime.CompilerServices; using System.Runtime.Versioning; using System.Text; using System.Threading; using System.Diagnostics; using Microsoft.Win32; // // Note: this type is integral to the construction of exception objects, // and sometimes this has to be done in low memory situtations (OOM) or // to create TypeInitializationExceptions due to failure of a static class // constructor. This type needs to be extremely careful and assume that // any type it references may have previously failed to construct, so statics // belonging to that type may not be initialized. FrameworkEventSource.Log // is one such example. // internal class ManifestBasedResourceGroveler : IResourceGroveler { private ResourceManager.ResourceManagerMediator _mediator; public ManifestBasedResourceGroveler(ResourceManager.ResourceManagerMediator mediator) { // here and below: convert asserts to preconditions where appropriate when we get // contracts story in place. Debug.Assert(mediator != null, "mediator shouldn't be null; check caller"); _mediator = mediator; } public ResourceSet GrovelForResourceSet(CultureInfo culture, Dictionary<String, ResourceSet> localResourceSets, bool tryParents, bool createIfNotExists, ref StackCrawlMark stackMark) { Debug.Assert(culture != null, "culture shouldn't be null; check caller"); Debug.Assert(localResourceSets != null, "localResourceSets shouldn't be null; check caller"); ResourceSet rs = null; Stream stream = null; RuntimeAssembly satellite = null; // 1. Fixups for ultimate fallbacks CultureInfo lookForCulture = UltimateFallbackFixup(culture); // 2. Look for satellite assembly or main assembly, as appropriate if (lookForCulture.HasInvariantCultureName && _mediator.FallbackLoc == UltimateResourceFallbackLocation.MainAssembly) { // don't bother looking in satellites in this case satellite = _mediator.MainAssembly; } #if RESOURCE_SATELLITE_CONFIG // If our config file says the satellite isn't here, don't ask for it. else if (!lookForCulture.HasInvariantCultureName && !_mediator.TryLookingForSatellite(lookForCulture)) { satellite = null; } #endif else { satellite = GetSatelliteAssembly(lookForCulture, ref stackMark); if (satellite == null) { bool raiseException = (culture.HasInvariantCultureName && (_mediator.FallbackLoc == UltimateResourceFallbackLocation.Satellite)); // didn't find satellite, give error if necessary if (raiseException) { HandleSatelliteMissing(); } } } // get resource file name we'll search for. Note, be careful if you're moving this statement // around because lookForCulture may be modified from originally requested culture above. String fileName = _mediator.GetResourceFileName(lookForCulture); // 3. If we identified an assembly to search; look in manifest resource stream for resource file if (satellite != null) { // Handle case in here where someone added a callback for assembly load events. // While no other threads have called into GetResourceSet, our own thread can! // At that point, we could already have an RS in our hash table, and we don't // want to add it twice. lock (localResourceSets) { localResourceSets.TryGetValue(culture.Name, out rs); } stream = GetManifestResourceStream(satellite, fileName, ref stackMark); } // 4a. Found a stream; create a ResourceSet if possible if (createIfNotExists && stream != null && rs == null) { rs = CreateResourceSet(stream, satellite); } else if (stream == null && tryParents) { // 4b. Didn't find stream; give error if necessary bool raiseException = culture.HasInvariantCultureName; if (raiseException) { HandleResourceStreamMissing(fileName); } } return rs; } private CultureInfo UltimateFallbackFixup(CultureInfo lookForCulture) { CultureInfo returnCulture = lookForCulture; // If our neutral resources were written in this culture AND we know the main assembly // does NOT contain neutral resources, don't probe for this satellite. if (lookForCulture.Name == _mediator.NeutralResourcesCulture.Name && _mediator.FallbackLoc == UltimateResourceFallbackLocation.MainAssembly) { returnCulture = CultureInfo.InvariantCulture; } else if (lookForCulture.HasInvariantCultureName && _mediator.FallbackLoc == UltimateResourceFallbackLocation.Satellite) { returnCulture = _mediator.NeutralResourcesCulture; } return returnCulture; } internal static CultureInfo GetNeutralResourcesLanguage(Assembly a, ref UltimateResourceFallbackLocation fallbackLocation) { Debug.Assert(a != null, "assembly != null"); string cultureName = null; short fallback = 0; if (GetNeutralResourcesLanguageAttribute(((RuntimeAssembly)a).GetNativeHandle(), JitHelpers.GetStringHandleOnStack(ref cultureName), out fallback)) { if ((UltimateResourceFallbackLocation)fallback < UltimateResourceFallbackLocation.MainAssembly || (UltimateResourceFallbackLocation)fallback > UltimateResourceFallbackLocation.Satellite) { throw new ArgumentException(SR.Format(SR.Arg_InvalidNeutralResourcesLanguage_FallbackLoc, fallback)); } fallbackLocation = (UltimateResourceFallbackLocation)fallback; } else { fallbackLocation = UltimateResourceFallbackLocation.MainAssembly; return CultureInfo.InvariantCulture; } try { CultureInfo c = CultureInfo.GetCultureInfo(cultureName); return c; } catch (ArgumentException e) { // we should catch ArgumentException only. // Note we could go into infinite loops if mscorlib's // NeutralResourcesLanguageAttribute is mangled. If this assert // fires, please fix the build process for the BCL directory. if (a == typeof(Object).Assembly) { Debug.Assert(false, System.CoreLib.Name + "'s NeutralResourcesLanguageAttribute is a malformed culture name! name: \"" + cultureName + "\" Exception: " + e); return CultureInfo.InvariantCulture; } throw new ArgumentException(SR.Format(SR.Arg_InvalidNeutralResourcesLanguage_Asm_Culture, a.ToString(), cultureName), e); } } // Constructs a new ResourceSet for a given file name. // Use the assembly to resolve assembly manifest resource references. // Note that is can be null, but probably shouldn't be. // This method could use some refactoring. One thing at a time. internal ResourceSet CreateResourceSet(Stream store, Assembly assembly) { Debug.Assert(store != null, "I need a Stream!"); // Check to see if this is a Stream the ResourceManager understands, // and check for the correct resource reader type. if (store.CanSeek && store.Length > 4) { long startPos = store.Position; // not disposing because we want to leave stream open BinaryReader br = new BinaryReader(store); // Look for our magic number as a little endian Int32. int bytes = br.ReadInt32(); if (bytes == ResourceManager.MagicNumber) { int resMgrHeaderVersion = br.ReadInt32(); String readerTypeName = null, resSetTypeName = null; if (resMgrHeaderVersion == ResourceManager.HeaderVersionNumber) { br.ReadInt32(); // We don't want the number of bytes to skip. readerTypeName = System.CoreLib.FixupCoreLibName(br.ReadString()); resSetTypeName = System.CoreLib.FixupCoreLibName(br.ReadString()); } else if (resMgrHeaderVersion > ResourceManager.HeaderVersionNumber) { // Assume that the future ResourceManager headers will // have two strings for us - the reader type name and // resource set type name. Read those, then use the num // bytes to skip field to correct our position. int numBytesToSkip = br.ReadInt32(); long endPosition = br.BaseStream.Position + numBytesToSkip; readerTypeName = System.CoreLib.FixupCoreLibName(br.ReadString()); resSetTypeName = System.CoreLib.FixupCoreLibName(br.ReadString()); br.BaseStream.Seek(endPosition, SeekOrigin.Begin); } else { // resMgrHeaderVersion is older than this ResMgr version. // We should add in backwards compatibility support here. throw new NotSupportedException(SR.Format(SR.NotSupported_ObsoleteResourcesFile, _mediator.MainAssembly.GetSimpleName())); } store.Position = startPos; // Perf optimization - Don't use Reflection for our defaults. // Note there are two different sets of strings here - the // assembly qualified strings emitted by ResourceWriter, and // the abbreviated ones emitted by InternalResGen. if (CanUseDefaultResourceClasses(readerTypeName, resSetTypeName)) { return new RuntimeResourceSet(store); } else { // we do not want to use partial binding here. Type readerType = Type.GetType(readerTypeName, true); Object[] args = new Object[1]; args[0] = store; IResourceReader reader = (IResourceReader)Activator.CreateInstance(readerType, args); Object[] resourceSetArgs = new Object[1]; resourceSetArgs[0] = reader; Type resSetType; if (_mediator.UserResourceSet == null) { Debug.Assert(resSetTypeName != null, "We should have a ResourceSet type name from the custom resource file here."); resSetType = Type.GetType(resSetTypeName, true, false); } else resSetType = _mediator.UserResourceSet; ResourceSet rs = (ResourceSet)Activator.CreateInstance(resSetType, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.CreateInstance, null, resourceSetArgs, null, null); return rs; } } else { store.Position = startPos; } } if (_mediator.UserResourceSet == null) { return new RuntimeResourceSet(store); } else { Object[] args = new Object[2]; args[0] = store; args[1] = assembly; try { ResourceSet rs = null; // Add in a check for a constructor taking in an assembly first. try { rs = (ResourceSet)Activator.CreateInstance(_mediator.UserResourceSet, args); return rs; } catch (MissingMethodException) { } args = new Object[1]; args[0] = store; rs = (ResourceSet)Activator.CreateInstance(_mediator.UserResourceSet, args); return rs; } catch (MissingMethodException e) { throw new InvalidOperationException(SR.Format(SR.InvalidOperation_ResMgrBadResSet_Type, _mediator.UserResourceSet.AssemblyQualifiedName), e); } } } private Stream GetManifestResourceStream(RuntimeAssembly satellite, String fileName, ref StackCrawlMark stackMark) { Debug.Assert(satellite != null, "satellite shouldn't be null; check caller"); Debug.Assert(fileName != null, "fileName shouldn't be null; check caller"); // If we're looking in the main assembly AND if the main assembly was the person who // created the ResourceManager, skip a security check for private manifest resources. bool canSkipSecurityCheck = (_mediator.MainAssembly == satellite) && (_mediator.CallingAssembly == _mediator.MainAssembly); Stream stream = satellite.GetManifestResourceStream(_mediator.LocationInfo, fileName, canSkipSecurityCheck, ref stackMark); if (stream == null) { stream = CaseInsensitiveManifestResourceStreamLookup(satellite, fileName); } return stream; } // Looks up a .resources file in the assembly manifest using // case-insensitive lookup rules. Yes, this is slow. The metadata // dev lead refuses to make all assembly manifest resource lookups case-insensitive, // even optionally case-insensitive. [System.Security.DynamicSecurityMethod] // Methods containing StackCrawlMark local var has to be marked DynamicSecurityMethod private Stream CaseInsensitiveManifestResourceStreamLookup(RuntimeAssembly satellite, String name) { Debug.Assert(satellite != null, "satellite shouldn't be null; check caller"); Debug.Assert(name != null, "name shouldn't be null; check caller"); StringBuilder sb = new StringBuilder(); if (_mediator.LocationInfo != null) { String nameSpace = _mediator.LocationInfo.Namespace; if (nameSpace != null) { sb.Append(nameSpace); if (name != null) sb.Append(Type.Delimiter); } } sb.Append(name); String givenName = sb.ToString(); CompareInfo comparer = CultureInfo.InvariantCulture.CompareInfo; String canonicalName = null; foreach (String existingName in satellite.GetManifestResourceNames()) { if (comparer.Compare(existingName, givenName, CompareOptions.IgnoreCase) == 0) { if (canonicalName == null) { canonicalName = existingName; } else { throw new MissingManifestResourceException(SR.Format(SR.MissingManifestResource_MultipleBlobs, givenName, satellite.ToString())); } } } if (canonicalName == null) { return null; } // If we're looking in the main assembly AND if the main // assembly was the person who created the ResourceManager, // skip a security check for private manifest resources. bool canSkipSecurityCheck = _mediator.MainAssembly == satellite && _mediator.CallingAssembly == _mediator.MainAssembly; StackCrawlMark stackMark = StackCrawlMark.LookForMyCaller; return satellite.GetManifestResourceStream(canonicalName, ref stackMark, canSkipSecurityCheck); } private RuntimeAssembly GetSatelliteAssembly(CultureInfo lookForCulture, ref StackCrawlMark stackMark) { if (!_mediator.LookedForSatelliteContractVersion) { _mediator.SatelliteContractVersion = _mediator.ObtainSatelliteContractVersion(_mediator.MainAssembly); _mediator.LookedForSatelliteContractVersion = true; } RuntimeAssembly satellite = null; String satAssemblyName = GetSatelliteAssemblyName(); // Look up the satellite assembly, but don't let problems // like a partially signed satellite assembly stop us from // doing fallback and displaying something to the user. // Yet also somehow log this error for a developer. try { satellite = _mediator.MainAssembly.InternalGetSatelliteAssembly(satAssemblyName, lookForCulture, _mediator.SatelliteContractVersion, false, ref stackMark); } // Jun 08: for cases other than ACCESS_DENIED, we'll assert instead of throw to give release builds more opportunity to fallback. catch (FileLoadException fle) { // Ignore cases where the loader gets an access // denied back from the OS. This showed up for // href-run exe's at one point. int hr = fle._HResult; if (hr != Win32Marshal.MakeHRFromErrorCode(Win32Native.ERROR_ACCESS_DENIED)) { Debug.Assert(false, "[This assert catches satellite assembly build/deployment problems - report this message to your build lab & loc engineer]" + Environment.NewLine + "GetSatelliteAssembly failed for culture " + lookForCulture.Name + " and version " + (_mediator.SatelliteContractVersion == null ? _mediator.MainAssembly.GetVersion().ToString() : _mediator.SatelliteContractVersion.ToString()) + " of assembly " + _mediator.MainAssembly.GetSimpleName() + " with error code 0x" + hr.ToString("X", CultureInfo.InvariantCulture) + Environment.NewLine + "Exception: " + fle); } } // Don't throw for zero-length satellite assemblies, for compat with v1 catch (BadImageFormatException bife) { Debug.Assert(false, "[This assert catches satellite assembly build/deployment problems - report this message to your build lab & loc engineer]" + Environment.NewLine + "GetSatelliteAssembly failed for culture " + lookForCulture.Name + " and version " + (_mediator.SatelliteContractVersion == null ? _mediator.MainAssembly.GetVersion().ToString() : _mediator.SatelliteContractVersion.ToString()) + " of assembly " + _mediator.MainAssembly.GetSimpleName() + Environment.NewLine + "Exception: " + bife); } return satellite; } // Perf optimization - Don't use Reflection for most cases with // our .resources files. This makes our code run faster and we can // creating a ResourceReader via Reflection. This would incur // a security check (since the link-time check on the constructor that // takes a String is turned into a full demand with a stack walk) // and causes partially trusted localized apps to fail. private bool CanUseDefaultResourceClasses(String readerTypeName, String resSetTypeName) { Debug.Assert(readerTypeName != null, "readerTypeName shouldn't be null; check caller"); Debug.Assert(resSetTypeName != null, "resSetTypeName shouldn't be null; check caller"); if (_mediator.UserResourceSet != null) return false; // Ignore the actual version of the ResourceReader and // RuntimeResourceSet classes. Let those classes deal with // versioning themselves. AssemblyName mscorlib = new AssemblyName(ResourceManager.MscorlibName); if (readerTypeName != null) { if (!ResourceManager.CompareNames(readerTypeName, ResourceManager.ResReaderTypeName, mscorlib)) return false; } if (resSetTypeName != null) { if (!ResourceManager.CompareNames(resSetTypeName, ResourceManager.ResSetTypeName, mscorlib)) return false; } return true; } private String GetSatelliteAssemblyName() { String satAssemblyName = _mediator.MainAssembly.GetSimpleName(); satAssemblyName += ".resources"; return satAssemblyName; } private void HandleSatelliteMissing() { String satAssemName = _mediator.MainAssembly.GetSimpleName() + ".resources.dll"; if (_mediator.SatelliteContractVersion != null) { satAssemName += ", Version=" + _mediator.SatelliteContractVersion.ToString(); } AssemblyName an = new AssemblyName(); an.SetPublicKey(_mediator.MainAssembly.GetPublicKey()); byte[] token = an.GetPublicKeyToken(); int iLen = token.Length; StringBuilder publicKeyTok = new StringBuilder(iLen * 2); for (int i = 0; i < iLen; i++) { publicKeyTok.Append(token[i].ToString("x", CultureInfo.InvariantCulture)); } satAssemName += ", PublicKeyToken=" + publicKeyTok; String missingCultureName = _mediator.NeutralResourcesCulture.Name; if (missingCultureName.Length == 0) { missingCultureName = "<invariant>"; } throw new MissingSatelliteAssemblyException(SR.Format(SR.MissingSatelliteAssembly_Culture_Name, _mediator.NeutralResourcesCulture, satAssemName), missingCultureName); } private void HandleResourceStreamMissing(String fileName) { // Keep people from bothering me about resources problems if (_mediator.MainAssembly == typeof(Object).Assembly && _mediator.BaseName.Equals(System.CoreLib.Name)) { // This would break CultureInfo & all our exceptions. Debug.Assert(false, "Couldn't get " + System.CoreLib.Name + ResourceManager.ResFileExtension + " from " + System.CoreLib.Name + "'s assembly" + Environment.NewLine + Environment.NewLine + "Are you building the runtime on your machine? Chances are the BCL directory didn't build correctly. Type 'build -c' in the BCL directory. If you get build errors, look at buildd.log. If you then can't figure out what's wrong (and you aren't changing the assembly-related metadata code), ask a BCL dev.\n\nIf you did NOT build the runtime, you shouldn't be seeing this and you've found a bug."); // We cannot continue further - simply FailFast. string mesgFailFast = System.CoreLib.Name + ResourceManager.ResFileExtension + " couldn't be found! Large parts of the BCL won't work!"; System.Environment.FailFast(mesgFailFast); } // We really don't think this should happen - we always // expect the neutral locale's resources to be present. String resName = String.Empty; if (_mediator.LocationInfo != null && _mediator.LocationInfo.Namespace != null) resName = _mediator.LocationInfo.Namespace + Type.Delimiter; resName += fileName; throw new MissingManifestResourceException(SR.Format(SR.MissingManifestResource_NoNeutralAsm, resName, _mediator.MainAssembly.GetSimpleName())); } [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [System.Security.SuppressUnmanagedCodeSecurity] [return: MarshalAs(UnmanagedType.Bool)] internal static extern bool GetNeutralResourcesLanguageAttribute(RuntimeAssembly assemblyHandle, StringHandleOnStack cultureName, out short fallbackLocation); } }
using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using Flunet.Extensions; using Flunet.Attributes; using Flunet.Automata.Interfaces; using Flunet.Automata.Language; namespace Flunet.TypeAnalyzer { /// <summary> /// Builds an automata that checks that validates that every /// method with a <see cref="UniqueInScopeAttribute"/> is unique in its scope. /// </summary> public class UniquenessScopeValidationAutomataBuilder : ScopeValidationAutomataBuilder { #region Constructor /// <summary> /// Creates a new instance of <see cref="UniquenessScopeValidationAutomataBuilder"/> /// in respect to the given type. /// </summary> /// <param name="type">The given type.</param> public UniquenessScopeValidationAutomataBuilder(Type type) : base(type) { } #endregion #region Public Methods /// <summary> /// <see cref="ITypeAutomataBuilder.Build"/>. /// Builds a uniquess validation automata. /// </summary> public override IDeterministicAutomata<MethodInfo> Build(IDeterministicAutomata<MethodInfo> aggregatedAutomata) { IDictionary<string, ICollection<MethodInfo>> aliasesToMethods = GatherAliasesToMethods(); IDictionary<string, ICollection<string>> scopesToConstraints = GatherScopeConstraints(); ICollection<MethodInfo> alphabet = aggregatedAutomata.Alphabet; Dictionary<string, IDeterministicAutomata<MethodInfo>> scopesToUniquenessValidation = scopesToConstraints.ToDictionary (x => x.Key, x => BuildScopeValidation(x.Key, x.Value.Select(y => aliasesToMethods[y]), alphabet)); IDeterministicAutomata<MethodInfo> linkedScopes = LinkScopes(scopesToUniquenessValidation); IExtendableDeterministicAutomata<MethodInfo> result = linkedScopes.Intersect(aggregatedAutomata); result.Alphabet.AddRange(aggregatedAutomata.Alphabet); return result; } #endregion #region Private Methods /// <summary> /// Gathers a mapping between the aliases names to all methods /// with the given alias. /// </summary> /// <returns>The mapping between the aliases names to all methods /// with the given alias.</returns> private IDictionary<string, ICollection<MethodInfo>> GatherAliasesToMethods() { Dictionary<string, ICollection<MethodInfo>> result = new Dictionary<string, ICollection<MethodInfo>>(); GatherAliasesToMethods(this.Type, result); return result; } /// <summary> /// Inner implementation of <see cref="GatherAliasesToMethods()"/>. /// </summary> /// <param name="type">The current type to scan for aliases.</param> /// <param name="aliasesToMethods">The mapping between aliases to methods /// to fill.</param> private static void GatherAliasesToMethods(Type type, IDictionary<string, ICollection<MethodInfo>> aliasesToMethods) { foreach (MethodInfo currentMethod in type.GetMethods()) { if (currentMethod.HasAttribute<UniqueInScopeAttribute>()) { string methodAlias = currentMethod.Name; AliasAttribute methodAliasAttribute = currentMethod.GetAttribute<AliasAttribute>(); if (methodAliasAttribute != null) { methodAlias = methodAliasAttribute.Alias; } aliasesToMethods.Add(methodAlias, currentMethod); } if (currentMethod.ReturnType != type) { GatherAliasesToMethods(currentMethod.ReturnType, aliasesToMethods); } } } /// <summary> /// Gathers a mapping between the scope names to the unique aliases /// in them. /// </summary> /// <returns>The following mapping.</returns> private IDictionary<string, ICollection<string>> GatherScopeConstraints() { Dictionary<string, ICollection<string>> result = new Dictionary<string, ICollection<string>>(); GatherScopeConstraints(this.Type, result); return result; } /// <summary> /// Inner implementation of <see cref="GatherScopeConstraints()"/>. /// </summary> /// <param name="type">The current type to scan for /// <see cref="UniqueInScopeAttribute"/> attributes.</param> /// <param name="scopesToAliases">The mapping to fill.</param> private static void GatherScopeConstraints(Type type, IDictionary<string, ICollection<string>> scopesToAliases) { foreach (MethodInfo currentMethod in type.GetMethods()) { UniqueInScopeAttribute uniqueInScopeAttribute = currentMethod.GetAttribute<UniqueInScopeAttribute>(); if (uniqueInScopeAttribute != null) { string methodAlias = currentMethod.Name; AliasAttribute methodAliasAttribute = currentMethod.GetAttribute<AliasAttribute>(); if (methodAliasAttribute != null) { methodAlias = methodAliasAttribute.Alias; } scopesToAliases.Add(uniqueInScopeAttribute.Scope, methodAlias); } if (currentMethod.ReturnType != type) { GatherScopeConstraints(currentMethod.ReturnType, scopesToAliases); } } } /// <summary> /// Builds a validation automata for a given scope, given the methods /// that are unique in it, and the alphabet of the automata. /// </summary> /// <param name="scopeName">The given scope's name.</param> /// <param name="constraints">The methods that are unique in the givn /// scope.</param> /// <param name="alphabet">The alphabet of the automata.</param> /// <returns>The validation automata for the given scope.</returns> private static IDeterministicAutomata<MethodInfo> BuildScopeValidation(string scopeName, IEnumerable<ICollection<MethodInfo>> constraints, IEnumerable<MethodInfo> alphabet) { List<MethodInfo> scopeResetTokens = GetScopeResetTokens(scopeName, alphabet); var result = FundamentalAutomatas.True("Root", new ToStringComparer<MethodInfo>()); foreach (ICollection<MethodInfo> constraint in constraints) { var constraintValidation = FundamentalAutomatas.ZeroOrOneTimesSymbol (constraint, scopeResetTokens, new ToStringComparer<MethodInfo>()); result = result.Intersect(constraintValidation); } return result; } #endregion } }
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! using gax = Google.Api.Gax; using gciv = Google.Cloud.Iot.V1; using sys = System; namespace Google.Cloud.Iot.V1 { /// <summary>Resource name for the <c>Device</c> resource.</summary> public sealed partial class DeviceName : gax::IResourceName, sys::IEquatable<DeviceName> { /// <summary>The possible contents of <see cref="DeviceName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary> /// A resource name with pattern /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c>. /// </summary> ProjectLocationRegistryDevice = 1, } private static gax::PathTemplate s_projectLocationRegistryDevice = new gax::PathTemplate("projects/{project}/locations/{location}/registries/{registry}/devices/{device}"); /// <summary>Creates a <see cref="DeviceName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="DeviceName"/> containing the provided <paramref name="unparsedResourceName"/>. /// </returns> public static DeviceName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new DeviceName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="DeviceName"/> with the pattern /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="registryId">The <c>Registry</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="deviceId">The <c>Device</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="DeviceName"/> constructed from the provided ids.</returns> public static DeviceName FromProjectLocationRegistryDevice(string projectId, string locationId, string registryId, string deviceId) => new DeviceName(ResourceNameType.ProjectLocationRegistryDevice, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), registryId: gax::GaxPreconditions.CheckNotNullOrEmpty(registryId, nameof(registryId)), deviceId: gax::GaxPreconditions.CheckNotNullOrEmpty(deviceId, nameof(deviceId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="DeviceName"/> with pattern /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="registryId">The <c>Registry</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="deviceId">The <c>Device</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="DeviceName"/> with pattern /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c>. /// </returns> public static string Format(string projectId, string locationId, string registryId, string deviceId) => FormatProjectLocationRegistryDevice(projectId, locationId, registryId, deviceId); /// <summary> /// Formats the IDs into the string representation of this <see cref="DeviceName"/> with pattern /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="registryId">The <c>Registry</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="deviceId">The <c>Device</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="DeviceName"/> with pattern /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c>. /// </returns> public static string FormatProjectLocationRegistryDevice(string projectId, string locationId, string registryId, string deviceId) => s_projectLocationRegistryDevice.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(registryId, nameof(registryId)), gax::GaxPreconditions.CheckNotNullOrEmpty(deviceId, nameof(deviceId))); /// <summary>Parses the given resource name string into a new <see cref="DeviceName"/> instance.</summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c> /// </description> /// </item> /// </list> /// </remarks> /// <param name="deviceName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="DeviceName"/> if successful.</returns> public static DeviceName Parse(string deviceName) => Parse(deviceName, false); /// <summary> /// Parses the given resource name string into a new <see cref="DeviceName"/> instance; optionally allowing an /// unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c> /// </description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="deviceName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="DeviceName"/> if successful.</returns> public static DeviceName Parse(string deviceName, bool allowUnparsed) => TryParse(deviceName, allowUnparsed, out DeviceName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="DeviceName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c> /// </description> /// </item> /// </list> /// </remarks> /// <param name="deviceName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="DeviceName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string deviceName, out DeviceName result) => TryParse(deviceName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="DeviceName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description> /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c> /// </description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="deviceName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="DeviceName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string deviceName, bool allowUnparsed, out DeviceName result) { gax::GaxPreconditions.CheckNotNull(deviceName, nameof(deviceName)); gax::TemplatedResourceName resourceName; if (s_projectLocationRegistryDevice.TryParseName(deviceName, out resourceName)) { result = FromProjectLocationRegistryDevice(resourceName[0], resourceName[1], resourceName[2], resourceName[3]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(deviceName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private DeviceName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string deviceId = null, string locationId = null, string projectId = null, string registryId = null) { Type = type; UnparsedResource = unparsedResourceName; DeviceId = deviceId; LocationId = locationId; ProjectId = projectId; RegistryId = registryId; } /// <summary> /// Constructs a new instance of a <see cref="DeviceName"/> class from the component parts of pattern /// <c>projects/{project}/locations/{location}/registries/{registry}/devices/{device}</c> /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="registryId">The <c>Registry</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="deviceId">The <c>Device</c> ID. Must not be <c>null</c> or empty.</param> public DeviceName(string projectId, string locationId, string registryId, string deviceId) : this(ResourceNameType.ProjectLocationRegistryDevice, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), registryId: gax::GaxPreconditions.CheckNotNullOrEmpty(registryId, nameof(registryId)), deviceId: gax::GaxPreconditions.CheckNotNullOrEmpty(deviceId, nameof(deviceId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Device</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string DeviceId { get; } /// <summary> /// The <c>Location</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string LocationId { get; } /// <summary> /// The <c>Project</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string ProjectId { get; } /// <summary> /// The <c>Registry</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string RegistryId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.ProjectLocationRegistryDevice: return s_projectLocationRegistryDevice.Expand(ProjectId, LocationId, RegistryId, DeviceId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as DeviceName); /// <inheritdoc/> public bool Equals(DeviceName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(DeviceName a, DeviceName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(DeviceName a, DeviceName b) => !(a == b); } /// <summary>Resource name for the <c>Registry</c> resource.</summary> public sealed partial class RegistryName : gax::IResourceName, sys::IEquatable<RegistryName> { /// <summary>The possible contents of <see cref="RegistryName"/>.</summary> public enum ResourceNameType { /// <summary>An unparsed resource name.</summary> Unparsed = 0, /// <summary> /// A resource name with pattern <c>projects/{project}/locations/{location}/registries/{registry}</c>. /// </summary> ProjectLocationRegistry = 1, } private static gax::PathTemplate s_projectLocationRegistry = new gax::PathTemplate("projects/{project}/locations/{location}/registries/{registry}"); /// <summary>Creates a <see cref="RegistryName"/> containing an unparsed resource name.</summary> /// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param> /// <returns> /// A new instance of <see cref="RegistryName"/> containing the provided <paramref name="unparsedResourceName"/> /// . /// </returns> public static RegistryName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) => new RegistryName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName))); /// <summary> /// Creates a <see cref="RegistryName"/> with the pattern /// <c>projects/{project}/locations/{location}/registries/{registry}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="registryId">The <c>Registry</c> ID. Must not be <c>null</c> or empty.</param> /// <returns>A new instance of <see cref="RegistryName"/> constructed from the provided ids.</returns> public static RegistryName FromProjectLocationRegistry(string projectId, string locationId, string registryId) => new RegistryName(ResourceNameType.ProjectLocationRegistry, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), registryId: gax::GaxPreconditions.CheckNotNullOrEmpty(registryId, nameof(registryId))); /// <summary> /// Formats the IDs into the string representation of this <see cref="RegistryName"/> with pattern /// <c>projects/{project}/locations/{location}/registries/{registry}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="registryId">The <c>Registry</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="RegistryName"/> with pattern /// <c>projects/{project}/locations/{location}/registries/{registry}</c>. /// </returns> public static string Format(string projectId, string locationId, string registryId) => FormatProjectLocationRegistry(projectId, locationId, registryId); /// <summary> /// Formats the IDs into the string representation of this <see cref="RegistryName"/> with pattern /// <c>projects/{project}/locations/{location}/registries/{registry}</c>. /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="registryId">The <c>Registry</c> ID. Must not be <c>null</c> or empty.</param> /// <returns> /// The string representation of this <see cref="RegistryName"/> with pattern /// <c>projects/{project}/locations/{location}/registries/{registry}</c>. /// </returns> public static string FormatProjectLocationRegistry(string projectId, string locationId, string registryId) => s_projectLocationRegistry.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(registryId, nameof(registryId))); /// <summary>Parses the given resource name string into a new <see cref="RegistryName"/> instance.</summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>projects/{project}/locations/{location}/registries/{registry}</c></description> /// </item> /// </list> /// </remarks> /// <param name="registryName">The resource name in string form. Must not be <c>null</c>.</param> /// <returns>The parsed <see cref="RegistryName"/> if successful.</returns> public static RegistryName Parse(string registryName) => Parse(registryName, false); /// <summary> /// Parses the given resource name string into a new <see cref="RegistryName"/> instance; optionally allowing an /// unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>projects/{project}/locations/{location}/registries/{registry}</c></description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="registryName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <returns>The parsed <see cref="RegistryName"/> if successful.</returns> public static RegistryName Parse(string registryName, bool allowUnparsed) => TryParse(registryName, allowUnparsed, out RegistryName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern."); /// <summary> /// Tries to parse the given resource name string into a new <see cref="RegistryName"/> instance. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>projects/{project}/locations/{location}/registries/{registry}</c></description> /// </item> /// </list> /// </remarks> /// <param name="registryName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="result"> /// When this method returns, the parsed <see cref="RegistryName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string registryName, out RegistryName result) => TryParse(registryName, false, out result); /// <summary> /// Tries to parse the given resource name string into a new <see cref="RegistryName"/> instance; optionally /// allowing an unparseable resource name. /// </summary> /// <remarks> /// To parse successfully, the resource name must be formatted as one of the following: /// <list type="bullet"> /// <item> /// <description><c>projects/{project}/locations/{location}/registries/{registry}</c></description> /// </item> /// </list> /// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>. /// </remarks> /// <param name="registryName">The resource name in string form. Must not be <c>null</c>.</param> /// <param name="allowUnparsed"> /// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/> /// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is /// specified. /// </param> /// <param name="result"> /// When this method returns, the parsed <see cref="RegistryName"/>, or <c>null</c> if parsing failed. /// </param> /// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns> public static bool TryParse(string registryName, bool allowUnparsed, out RegistryName result) { gax::GaxPreconditions.CheckNotNull(registryName, nameof(registryName)); gax::TemplatedResourceName resourceName; if (s_projectLocationRegistry.TryParseName(registryName, out resourceName)) { result = FromProjectLocationRegistry(resourceName[0], resourceName[1], resourceName[2]); return true; } if (allowUnparsed) { if (gax::UnparsedResourceName.TryParse(registryName, out gax::UnparsedResourceName unparsedResourceName)) { result = FromUnparsed(unparsedResourceName); return true; } } result = null; return false; } private RegistryName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string locationId = null, string projectId = null, string registryId = null) { Type = type; UnparsedResource = unparsedResourceName; LocationId = locationId; ProjectId = projectId; RegistryId = registryId; } /// <summary> /// Constructs a new instance of a <see cref="RegistryName"/> class from the component parts of pattern /// <c>projects/{project}/locations/{location}/registries/{registry}</c> /// </summary> /// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param> /// <param name="registryId">The <c>Registry</c> ID. Must not be <c>null</c> or empty.</param> public RegistryName(string projectId, string locationId, string registryId) : this(ResourceNameType.ProjectLocationRegistry, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), registryId: gax::GaxPreconditions.CheckNotNullOrEmpty(registryId, nameof(registryId))) { } /// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary> public ResourceNameType Type { get; } /// <summary> /// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an /// unparsed resource name. /// </summary> public gax::UnparsedResourceName UnparsedResource { get; } /// <summary> /// The <c>Location</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string LocationId { get; } /// <summary> /// The <c>Project</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string ProjectId { get; } /// <summary> /// The <c>Registry</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name. /// </summary> public string RegistryId { get; } /// <summary>Whether this instance contains a resource name with a known pattern.</summary> public bool IsKnownPattern => Type != ResourceNameType.Unparsed; /// <summary>The string representation of the resource name.</summary> /// <returns>The string representation of the resource name.</returns> public override string ToString() { switch (Type) { case ResourceNameType.Unparsed: return UnparsedResource.ToString(); case ResourceNameType.ProjectLocationRegistry: return s_projectLocationRegistry.Expand(ProjectId, LocationId, RegistryId); default: throw new sys::InvalidOperationException("Unrecognized resource-type."); } } /// <summary>Returns a hash code for this resource name.</summary> public override int GetHashCode() => ToString().GetHashCode(); /// <inheritdoc/> public override bool Equals(object obj) => Equals(obj as RegistryName); /// <inheritdoc/> public bool Equals(RegistryName other) => ToString() == other?.ToString(); /// <inheritdoc/> public static bool operator ==(RegistryName a, RegistryName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false); /// <inheritdoc/> public static bool operator !=(RegistryName a, RegistryName b) => !(a == b); } public partial class Device { /// <summary> /// <see cref="gciv::DeviceName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public gciv::DeviceName DeviceName { get => string.IsNullOrEmpty(Name) ? null : gciv::DeviceName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } } public partial class DeviceRegistry { /// <summary> /// <see cref="gciv::RegistryName"/>-typed view over the <see cref="Name"/> resource name property. /// </summary> public gciv::RegistryName RegistryName { get => string.IsNullOrEmpty(Name) ? null : gciv::RegistryName.Parse(Name, allowUnparsed: true); set => Name = value?.ToString() ?? ""; } } }
// Copyright 2011 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. namespace Microsoft.Data.OData.Metadata { #region Namespaces using System; using System.Collections.Generic; using System.Data.Services.Common; using System.Diagnostics; using System.Linq; using Microsoft.Data.Edm; using Microsoft.Data.OData; #endregion Namespaces /// <summary> /// Annotation stored on an entity type to hold entity property mapping information. /// </summary> internal sealed class ODataEntityPropertyMappingCache { /// <summary> /// A list of the EPM mappings this cache was constructed for. /// Used to determine whether the cache is dirty or not. /// </summary> private readonly ODataEntityPropertyMappingCollection mappings; /// <summary> /// Inherited EntityPropertyMapping attributes. /// </summary> private readonly List<EntityPropertyMappingAttribute> mappingsForInheritedProperties; /// <summary> /// Own EntityPropertyMapping attributes. /// </summary> private readonly List<EntityPropertyMappingAttribute> mappingsForDeclaredProperties; /// <summary> /// EPM source tree for the type this annotation belongs to. /// </summary> private readonly EpmSourceTree epmSourceTree; /// <summary> /// EPM target tree for the type this annotation belongs to. /// </summary> private readonly EpmTargetTree epmTargetTree; /// <summary> /// EDM model. /// </summary> private readonly IEdmModel model; /// <summary>The total number of entity property mappings for the entity type that this cache is created for (on the type itself and all its base types).</summary> private readonly int totalMappingCount; /// <summary> /// Constructor. /// </summary> /// <param name="mappings">The EPM mappings to create the cache for.</param> /// <param name="model">The EDM model.</param> /// <param name="totalMappingCount">The total number of entity property mappings /// for the entity type that this cache is created for (on the type itself and all its base types).</param> internal ODataEntityPropertyMappingCache(ODataEntityPropertyMappingCollection mappings, IEdmModel model, int totalMappingCount) { DebugUtils.CheckNoExternalCallers(); Debug.Assert(model.IsUserModel(), "model.IsUserModel()"); this.mappings = mappings; this.model = model; this.totalMappingCount = totalMappingCount; // Note that we new up everything here eagerly because we will only create the EPM annotation for types // for which we know for sure that they have EPM and thus we will need all of these anyway. this.mappingsForInheritedProperties = new List<EntityPropertyMappingAttribute>(); this.mappingsForDeclaredProperties = mappings == null ? new List<EntityPropertyMappingAttribute>() : new List<EntityPropertyMappingAttribute>(mappings); this.epmTargetTree = new EpmTargetTree(); this.epmSourceTree = new EpmSourceTree(this.epmTargetTree); } /// <summary> /// Inherited EntityPropertyMapping attributes. /// </summary> internal List<EntityPropertyMappingAttribute> MappingsForInheritedProperties { get { DebugUtils.CheckNoExternalCallers(); return this.mappingsForInheritedProperties; } } /// <summary> /// Own EntityPropertyMapping attributes. /// </summary> internal List<EntityPropertyMappingAttribute> MappingsForDeclaredProperties { get { DebugUtils.CheckNoExternalCallers(); return this.mappingsForDeclaredProperties; } } /// <summary> /// EPM source tree for the type this annotation belongs to. /// </summary> internal EpmSourceTree EpmSourceTree { get { DebugUtils.CheckNoExternalCallers(); return this.epmSourceTree; } } /// <summary> /// EPM target tree for the type this annotation belongs to. /// </summary> internal EpmTargetTree EpmTargetTree { get { DebugUtils.CheckNoExternalCallers(); return this.epmTargetTree; } } /// <summary> /// All EntityPropertyMapping attributes. /// </summary> internal IEnumerable<EntityPropertyMappingAttribute> AllMappings { get { DebugUtils.CheckNoExternalCallers(); return this.MappingsForDeclaredProperties.Concat(this.MappingsForInheritedProperties); } } /// <summary> /// The total number of entity property mappings for the entity type that this cache is created for (on the type itself and all its base types). /// </summary> internal int TotalMappingCount { get { DebugUtils.CheckNoExternalCallers(); return this.totalMappingCount; } } /// <summary> /// Initializes the EPM annotation with EPM information from the specified type. /// </summary> /// <param name="definingEntityType">Entity type to use the EPM infromation from.</param> /// <param name="affectedEntityType">Entity type for this the EPM information is being built.</param> internal void BuildEpmForType(IEdmEntityType definingEntityType, IEdmEntityType affectedEntityType) { DebugUtils.CheckNoExternalCallers(); Debug.Assert(definingEntityType != null, "definingEntityType != null"); Debug.Assert(affectedEntityType != null, "affectedEntityType != null"); if (definingEntityType.BaseType != null) { this.BuildEpmForType(definingEntityType.BaseEntityType(), affectedEntityType); } ODataEntityPropertyMappingCollection mappingsForType = this.model.GetEntityPropertyMappings(definingEntityType); if (mappingsForType == null) { return; } foreach (EntityPropertyMappingAttribute mapping in mappingsForType) { this.epmSourceTree.Add(new EntityPropertyMappingInfo(mapping, definingEntityType, affectedEntityType)); if (definingEntityType == affectedEntityType) { if (!PropertyExistsOnType(affectedEntityType, mapping)) { this.MappingsForInheritedProperties.Add(mapping); this.MappingsForDeclaredProperties.Remove(mapping); } } } } /// <summary> /// Checks whether the current cache is dirty with respect to the <paramref name="propertyMappings"/>. /// </summary> /// <param name="propertyMappings">The EPM mappings to check this cache against.</param> /// <returns>true if the <paramref name="propertyMappings"/> are not the same as the ones the cache has been created for (or have changed).</returns> internal bool IsDirty(ODataEntityPropertyMappingCollection propertyMappings) { DebugUtils.CheckNoExternalCallers(); // NOTE: we only allow adding more mappings to an existing collection; so if the // references of the collections are the same and the counts are the same there has been no change. if (this.mappings == null && propertyMappings == null) { return false; } if (!object.ReferenceEquals(this.mappings, propertyMappings)) { return true; } return this.mappings.Count != propertyMappings.Count; } /// <summary> /// Does given property in the attribute exist in the specified type. /// </summary> /// <param name="structuredType">The type to inspect.</param> /// <param name="epmAttribute">Attribute which has PropertyName.</param> /// <returns>true if property exists in the specified type, false otherwise.</returns> private static bool PropertyExistsOnType(IEdmStructuredType structuredType, EntityPropertyMappingAttribute epmAttribute) { Debug.Assert(structuredType != null, "structuredType != null"); Debug.Assert(epmAttribute != null, "epmAttribute != null"); int indexOfSeparator = epmAttribute.SourcePath.IndexOf('/'); String propertyToLookFor = indexOfSeparator == -1 ? epmAttribute.SourcePath : epmAttribute.SourcePath.Substring(0, indexOfSeparator); return structuredType.DeclaredProperties.Any(p => p.Name == propertyToLookFor); } } }
// // MassStorageDevice.cs // // Author: // Aaron Bockover <abockover@novell.com> // // Copyright (C) 2008 Novell, Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.IO; using System.Collections.Generic; using Mono.Unix; using Hyena; using Banshee.Base; using Banshee.Hardware; using Banshee.Collection; using Banshee.Collection.Database; namespace Banshee.Dap.MassStorage { public class MassStorageDevice : IDeviceMediaCapabilities { private MassStorageSource source; public MassStorageSource Source { get { return source; } set { source = value; } } public MassStorageDevice () { } public MassStorageDevice (MassStorageSource source) { Source = source; } public virtual void SourceInitialize () { } public virtual bool DeleteTrackHook (DatabaseTrackInfo track) { return true; } public virtual bool LoadDeviceConfiguration () { string path = IsAudioPlayerPath; string path_rockbox = System.IO.Path.Combine (source.Volume.MountPoint, ".rockbox/config.cfg"); StreamReader reader = null; if (!File.Exists (path) && !File.Exists (path_rockbox) ) { return false; } if (File.Exists (path_rockbox) ) { Hyena.Log.DebugFormat ("Found RockBox Device"); name = Catalog.GetString ("Rockbox Device"); audio_folders = new string [] {"Music/","Videos/"}; video_folders = new string [] {"Videos/"}; folder_depth = 2; playback_mime_types = new string [] {"application/ogg","audio/x-ms-wma","audio/mpeg","audio/mp4","audio/x-wav"}; playlist_formats = new string [] {"audio/x-mpegurl"}; playlist_path = "Playlists/"; cover_art_file_name = "cover.jpg"; cover_art_file_type = "jpeg"; cover_art_size = 320; } if (File.Exists (path)) { try { foreach (KeyValuePair<string, string []> item in new KeyValueParser (reader = new StreamReader (path))) { try { switch (item.Key) { case "name": name = item.Value[0]; break; case "cover_art_file_type": cover_art_file_type = item.Value[0].ToLower (); break; case "cover_art_file_name": cover_art_file_name = item.Value[0]; break; case "cover_art_size": Int32.TryParse (item.Value[0], out cover_art_size); break; case "audio_folders": audio_folders = item.Value; break; case "video_folders": video_folders = item.Value; break; case "output_formats": playback_mime_types = item.Value; break; case "playlist_format": playlist_formats = item.Value; break; case "playlist_path": playlist_path = item.Value[0]; break; case "folder_depth": if (!Int32.TryParse (item.Value[0], out folder_depth)) { folder_depth = -1; } Hyena.Log.DebugFormat ("MassStorageDevice.LoadDeviceConfiguration {0}", folder_depth); break; default: throw new ApplicationException ("unsupported key"); } } catch (Exception e) { Log.Exception ("Invalid .is_audio_player item " + item.Key, e); } } } catch (Exception e) { Log.Exception ("Error parsing " + path, e); } finally { if (reader != null) { reader.Dispose (); } } } has_is_audio_player_file = true; return true; } public virtual bool GetTrackPath (TrackInfo track, out string path) { path = null; return false; } private bool has_is_audio_player_file; public bool HasIsAudioPlayerFile { get { return has_is_audio_player_file; } } private string IsAudioPlayerPath { get { return System.IO.Path.Combine (source.Volume.MountPoint, ".is_audio_player"); } } private string name; public virtual string Name { get { return name ?? source.Volume.Name; } } private int cover_art_size; public virtual int CoverArtSize { get { return cover_art_size; } } private int folder_depth = -1; public virtual int FolderDepth { get { return folder_depth; } } private string [] audio_folders = new string[0]; public virtual string [] AudioFolders { get { return audio_folders; } } private string [] video_folders = new string[0]; public virtual string [] VideoFolders { get { return video_folders; } } private string cover_art_file_type; public virtual string CoverArtFileType { get { return cover_art_file_type; } } private string cover_art_file_name; public virtual string CoverArtFileName { get { return cover_art_file_name; } } private string [] playlist_formats; public virtual string [] PlaylistFormats { get { return playlist_formats; } } private string playlist_path; public virtual string PlaylistPath { get { return playlist_path; } } private string [] playback_mime_types; public virtual string [] PlaybackMimeTypes { get { return playback_mime_types; } } public virtual string DeviceType { get { return "mass-storage"; } } public virtual string [] GetIconNames () { return null; } public bool IsType (string type) { return type == DeviceType; } } }
using System; using System.Collections.Generic; using System.IO; using System.Runtime.Serialization; using System.Runtime.Serialization.Formatters.Binary; using Knowledge.Prospector.Common; using Knowledge.Prospector.Data.Entities; using Knowledge.Prospector.Data.Relationships; namespace Knowledge.Prospector.Data.Collections { /// <summary> /// Entity graph used for storing knowledges obtained from article. /// </summary> [Serializable] public class EntityGraph { public EntityGraph() { _relationships.OnAdd += new SetEvent<IRelationship>(_relationships_OnAdd); _subclassRelationships.OnAdd += new SetEvent<SubclassRelationship>(_subclassRelationships_OnAdd); _propertyRelationships.OnAdd += new SetEvent<PropertyRelationship>(_propertyRelationships_OnAdd); _equivalenceRelationships.OnAdd += new SetEvent<EquivalenceRelationship>(_equivalenceRelationships_OnAdd); _ConditionalRuleRelationships.OnAdd += new SetEvent<ConditionalRuleRelationship>(_ConditionalRuleRelationships_OnAdd); } public void Dump(string fileName) { IFormatter formatter = new BinaryFormatter(); Stream stream = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None); formatter.Serialize(stream, this); stream.Close(); } public static EntityGraph LoadDump(string fileName) { IFormatter formatter = new BinaryFormatter(); Stream stream = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.Read); EntityGraph result = (EntityGraph)formatter.Deserialize(stream); stream.Close(); return result; } bool _relationships_OnAdd(IRelationship item) { return BeforeAddNewRelationship(item); } bool _equivalenceRelationships_OnAdd(EquivalenceRelationship item) { return BeforeAddNewRelationship(item); } bool _propertyRelationships_OnAdd(PropertyRelationship item) { return BeforeAddNewRelationship(item); } bool _subclassRelationships_OnAdd(SubclassRelationship item) { return BeforeAddNewRelationship(item); } bool _ConditionalRuleRelationships_OnAdd(ConditionalRuleRelationship item) { return BeforeAddNewRelationship(item); } /// <summary> /// Adds the relationship to the graph. Also adds the entities from the relationship to the graph. /// Execute IRelationship.GenerateId() before adding. /// </summary> /// <param name="item">The relationship to add to the IEntityGraph.</param> bool BeforeAddNewRelationship(IRelationship item) { if (item == null || item.Entities == null || item.Entities.Count == 0) return false; foreach (ITrueEntity entity in item.Entities) Add(entity); return true; } #region Properties #region Entities IEntitySet<IClassEntity> _classes = new EntitySet<IClassEntity>(); IEntitySet<IPropertyEntity> _properties = new EntitySet<IPropertyEntity>(); IEntitySet<IIndividualEntity> _individuals = new EntitySet<IIndividualEntity>(); IEntitySet<IDatatypeEntity> _datatypes = new EntitySet<IDatatypeEntity>(); /// <summary> /// Gets set of all IClassEntity entities contained in the graph. /// </summary> public IEntitySet<IClassEntity> Classes { get { return _classes; } } /// <summary> /// Gets set of all IPropertyEntity entities contained in the graph. /// </summary> public IEntitySet<IPropertyEntity> Properties { get { return _properties; } } /// <summary> /// Gets set of all IIndividualEntity entities contained in the graph. /// </summary> public IEntitySet<IIndividualEntity> Individuals { get { return _individuals; } } /// <summary> /// Gets set of all IDatatypeEntity entities contained in the graph. /// </summary> public IEntitySet<IDatatypeEntity> Datatypes { get { return _datatypes; } } #endregion #region Relationships IRelationshipSet<IRelationship> _relationships = new RelationshipSet<IRelationship>(); IRelationshipSet<SubclassRelationship> _subclassRelationships = new RelationshipSet<SubclassRelationship>(); IRelationshipSet<PropertyRelationship> _propertyRelationships = new RelationshipSet<PropertyRelationship>(); IRelationshipSet<SubpropertyRelationship> _subpropertyRelationships = new RelationshipSet<SubpropertyRelationship>(); IRelationshipSet<EquivalenceRelationship> _equivalenceRelationships = new RelationshipSet<EquivalenceRelationship>(); IRelationshipSet<ConditionalRuleRelationship> _ConditionalRuleRelationships = new RelationshipSet<ConditionalRuleRelationship>(); public IRelationshipSet<IRelationship> Relationships { get { return _relationships; } } /// <summary> /// Gets all SubclassRelationship contained in the graph. /// </summary> public IRelationshipSet<SubclassRelationship> SubclassRelationships { get { return _subclassRelationships; } set { _subclassRelationships = value; } } /// <summary> /// Gets all PropertyRelationship contained in the graph. /// </summary> public IRelationshipSet<PropertyRelationship> PropertyRelationships { get { return _propertyRelationships; } set { _propertyRelationships = value; } } /// <summary> /// Gets all SubpropertyRelationship contained in the graph. /// </summary> public IRelationshipSet<SubpropertyRelationship> SubpropertyRelationships { get { return _subpropertyRelationships; } } /// <summary> /// Gets all EquivalenceRelationship contained in the graph. /// </summary> public IRelationshipSet<EquivalenceRelationship> EquivalenceRelationships { get { return _equivalenceRelationships; } } /// <summary> /// Gets all ConditionalRuleRelationship contained in the graph. /// </summary> public IRelationshipSet<ConditionalRuleRelationship> ConditionalRuleRelationships { get { return _ConditionalRuleRelationships; } } #endregion #endregion #region Add functions /// <summary> /// Adds ITrueEntity to the graph. /// </summary> /// <param name="trueEntity"></param> public void Add(ITrueEntity trueEntity) { if (trueEntity is IClassEntity) Add(trueEntity as IClassEntity); else if (trueEntity is IPropertyEntity) Add(trueEntity as IPropertyEntity); else if (trueEntity is IIndividualEntity) Add(trueEntity as IIndividualEntity); else if (trueEntity is IDatatypeEntity) Add(trueEntity as IDatatypeEntity); else throw new ArgumentException("Unsopported subtype of ITrueEntity : " + trueEntity.GetType(), "trueEntity"); } /// <summary> /// Adds IClassEntity to the graph. /// </summary> /// <param name="classEntity"></param> public void Add(IClassEntity classEntity) { Classes.Add(classEntity); } /// <summary> /// Adds IPropertyEntity to the graph. /// </summary> /// <param name="propertyEntity"></param> public void Add(IPropertyEntity propertyEntity) { Properties.Add(propertyEntity); } /// <summary> /// Adds IIndividualEntity to the graph. /// </summary> /// <param name="individualEntity"></param> public void Add(IIndividualEntity individualEntity) { Individuals.Add(individualEntity); } /// <summary> /// Adds IDatatypeEntity to the graph. /// </summary> /// <param name="datatypeEntity"></param> public void Add(IDatatypeEntity datatypeEntity) { Datatypes.Add(datatypeEntity); } /// <summary> /// Adds IRelationship to the graph. /// </summary> /// <param name="relationship"></param> public void Add(IRelationship relationship) { //if (relationship is SubclassRelationship) // Add(relationship as SubclassRelationship); //else if (relationship is PropertyRelationship) // Add(relationship as PropertyRelationship); //else if (relationship is EquivalenceRelationship) // Add(relationship as EquivalenceRelationship); //else Relationships.Add(relationship); } /// <summary> /// Adds SubclassRelationship to the graph. /// </summary> /// <param name="subclassRelationship"></param> public void Add(SubclassRelationship subclassRelationship) { SubclassRelationships.Add(subclassRelationship); } /// <summary> /// Adds PropertyRelationship to the graph. /// </summary> /// <param name="propertyRelationship"></param> public void Add(PropertyRelationship propertyRelationship) { PropertyRelationships.Add(propertyRelationship); } /// <summary> /// Adds SubpropertyRelationship to the graph. /// </summary> /// <param name="subpropertyRelationship"></param> public void Add(SubpropertyRelationship subpropertyRelationship) { SubpropertyRelationships.Add(subpropertyRelationship); } /// <summary> /// Adds EquivalenceRelationship to the graph. /// </summary> /// <param name="equivalenceRelationship"></param> public void Add(EquivalenceRelationship equivalenceRelationship) { EquivalenceRelationships.Add(equivalenceRelationship); } public void Add(ConditionalRuleRelationship conditionalRuleRelationship) { ConditionalRuleRelationships.Add(conditionalRuleRelationship); } #endregion #region Get.. functions /// <summary> /// Gets set of all ITrueEntity entities contained in the graph. /// </summary> public IEntitySet<ITrueEntity> GetAllEntities() { IEntitySet<ITrueEntity> all = new EntitySet<ITrueEntity>(); all.Add<IClassEntity>(Classes); all.Add<IPropertyEntity>(Properties); all.Add<IIndividualEntity>(Individuals); all.Add<IDatatypeEntity>(Datatypes); return all; } /// <summary> /// Gets all IRelationship contained in the graph. /// </summary> public IRelationship[] GetAllRelationships() { int size = Relationships.Count + SubclassRelationships.Count + PropertyRelationships.Count + EquivalenceRelationships.Count; List<IRelationship> all = new List<IRelationship>(size); foreach (Relationship r in Relationships.GetItems()) all.Add(r); foreach (SubclassRelationship sr in SubclassRelationships.GetItems()) all.Add(sr); foreach (PropertyRelationship pr in PropertyRelationships.GetItems()) all.Add(pr); foreach (EquivalenceRelationship er in EquivalenceRelationships.GetItems()) all.Add(er); return all.ToArray(); } /// <summary> /// Returns all parent classes of class entity from graph. /// </summary> /// <param name="entity"></param> /// <returns>All parent classes.</returns> public IEntitySet<IClassEntity> GetParents(IClassEntity entity) { IEntitySet<IClassEntity> temp = new EntitySet<IClassEntity>(); foreach (SubclassRelationship sr in SubclassRelationships.GetItems()) { if (sr.Descendant.Identity == entity.Identity) temp.Add(sr.Parent); } return temp; } /// <summary> /// Returns all parent properties of property entity from graph. /// </summary> /// <param name="entity"></param> /// <returns>All parent properties.</returns> public IEntitySet<IPropertyEntity> GetParents(IPropertyEntity entity) { return new EntitySet<IPropertyEntity>(); } /// <summary> /// Returns all properties from graph related with classEntity by PropertyRelationship. /// </summary> /// <param name="entity"></param> /// <returns></returns> public IEntitySet<IPropertyEntity> GetProperties(IClassEntity entity) { IEntitySet<IPropertyEntity> temp = new EntitySet<IPropertyEntity>(); foreach (PropertyRelationship pr in PropertyRelationships.GetItems()) { if (pr.Class.Identity == entity.Identity) temp.Add(pr.Property); } return temp; } /// <summary> /// Returns all classes from graph related with propertyEntity by PropertyRelationship. /// </summary> /// <param name="entity"></param> /// <returns></returns> public IEntitySet<IClassEntity> GetClasses(IPropertyEntity entity) { IEntitySet<IClassEntity> temp = new EntitySet<IClassEntity>(); foreach (PropertyRelationship pr in PropertyRelationships.GetItems()) { if (pr.Property.Identity == entity.Identity) temp.Add(pr.Class); } return temp; } #endregion } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using osu.Framework.Allocation; using osu.Framework.Bindables; using osu.Framework.Extensions.Color4Extensions; using osu.Framework.Graphics; using osu.Framework.Graphics.Colour; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Effects; using osu.Framework.Graphics.Shapes; using osu.Framework.Graphics.Sprites; using osu.Game.Beatmaps; using osu.Game.Graphics; using osu.Game.Graphics.Containers; using osu.Game.Graphics.Sprites; using osu.Game.Online.Chat; using osu.Game.Online.Rooms; using osu.Game.Overlays; using osu.Game.Screens.OnlinePlay.Components; using osuTK; using osuTK.Graphics; namespace osu.Game.Screens.OnlinePlay.Lounge.Components { public class DrawableRoom : CompositeDrawable { protected const float CORNER_RADIUS = 10; private const float height = 100; public readonly Room Room; [Resolved] private BeatmapManager beatmaps { get; set; } protected Container ButtonsContainer { get; private set; } private readonly Bindable<MatchType> roomType = new Bindable<MatchType>(); private readonly Bindable<RoomCategory> roomCategory = new Bindable<RoomCategory>(); private readonly Bindable<bool> hasPassword = new Bindable<bool>(); private DrawableRoomParticipantsList drawableRoomParticipantsList; private RoomSpecialCategoryPill specialCategoryPill; private PasswordProtectedIcon passwordIcon; private EndDateInfo endDateInfo; private DelayedLoadWrapper wrapper; public DrawableRoom(Room room) { Room = room; RelativeSizeAxes = Axes.X; Height = height; Masking = true; CornerRadius = CORNER_RADIUS; EdgeEffect = new EdgeEffectParameters { Type = EdgeEffectType.Shadow, Colour = Color4.Black.Opacity(40), Radius = 5, }; } [BackgroundDependencyLoader] private void load(OverlayColourProvider colours) { ButtonsContainer = new Container { Anchor = Anchor.CentreRight, Origin = Anchor.CentreRight, RelativeSizeAxes = Axes.Y, AutoSizeAxes = Axes.X }; InternalChildren = new[] { // This resolves internal 1px gaps due to applying the (parenting) corner radius and masking across multiple filling background sprites. new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Background5, }, CreateBackground().With(d => { d.RelativeSizeAxes = Axes.Both; }), wrapper = new DelayedLoadWrapper(() => new Container { Name = @"Room content", RelativeSizeAxes = Axes.Both, // This negative padding resolves 1px gaps between this background and the background above. Padding = new MarginPadding { Left = 20, Vertical = -0.5f }, Child = new Container { RelativeSizeAxes = Axes.Both, Masking = true, CornerRadius = CORNER_RADIUS, Children = new Drawable[] { new GridContainer { RelativeSizeAxes = Axes.Both, ColumnDimensions = new[] { new Dimension(GridSizeMode.Relative, 0.2f) }, Content = new[] { new Drawable[] { new Box { RelativeSizeAxes = Axes.Both, Colour = colours.Background5, }, new Box { RelativeSizeAxes = Axes.Both, Colour = ColourInfo.GradientHorizontal(colours.Background5, colours.Background5.Opacity(0.3f)) }, } } }, new Container { Name = @"Left details", RelativeSizeAxes = Axes.Both, Padding = new MarginPadding { Left = 20, Vertical = 5 }, Children = new Drawable[] { new FillFlowContainer { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Direction = FillDirection.Vertical, Children = new Drawable[] { new FillFlowContainer { AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Spacing = new Vector2(5), Children = new Drawable[] { new RoomStatusPill { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft }, specialCategoryPill = new RoomSpecialCategoryPill { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft }, endDateInfo = new EndDateInfo { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, }, } }, new FillFlowContainer { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Padding = new MarginPadding { Top = 3 }, Direction = FillDirection.Vertical, Children = new Drawable[] { new RoomNameText(), new RoomStatusText() } } }, }, new FillFlowContainer { Anchor = Anchor.BottomLeft, Origin = Anchor.BottomLeft, AutoSizeAxes = Axes.Both, Direction = FillDirection.Horizontal, Spacing = new Vector2(5), Children = new Drawable[] { new PlaylistCountPill { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, }, new StarRatingRangeDisplay { Anchor = Anchor.CentreLeft, Origin = Anchor.CentreLeft, Scale = new Vector2(0.8f) } } } } }, new FillFlowContainer { Name = "Right content", Anchor = Anchor.CentreRight, Origin = Anchor.CentreRight, AutoSizeAxes = Axes.X, RelativeSizeAxes = Axes.Y, Spacing = new Vector2(5), Padding = new MarginPadding { Right = 10, Vertical = 20, }, Children = new Drawable[] { ButtonsContainer, drawableRoomParticipantsList = new DrawableRoomParticipantsList { Anchor = Anchor.CentreRight, Origin = Anchor.CentreRight, NumberOfCircles = NumberOfAvatars } } }, passwordIcon = new PasswordProtectedIcon { Alpha = 0 } }, }, }, 0) { RelativeSizeAxes = Axes.Both, } }; } protected override void LoadComplete() { base.LoadComplete(); wrapper.DelayedLoadComplete += _ => { wrapper.FadeInFromZero(200); roomCategory.BindTo(Room.Category); roomCategory.BindValueChanged(c => { if (c.NewValue == RoomCategory.Spotlight) specialCategoryPill.Show(); else specialCategoryPill.Hide(); }, true); roomType.BindTo(Room.Type); roomType.BindValueChanged(t => { endDateInfo.Alpha = t.NewValue == MatchType.Playlists ? 1 : 0; }, true); hasPassword.BindTo(Room.HasPassword); hasPassword.BindValueChanged(v => passwordIcon.Alpha = v.NewValue ? 1 : 0, true); }; } protected override IReadOnlyDependencyContainer CreateChildDependencies(IReadOnlyDependencyContainer parent) { return new CachedModelDependencyContainer<Room>(base.CreateChildDependencies(parent)) { Model = { Value = Room } }; } private int numberOfAvatars = 7; public int NumberOfAvatars { get => numberOfAvatars; set { numberOfAvatars = value; if (drawableRoomParticipantsList != null) drawableRoomParticipantsList.NumberOfCircles = value; } } protected virtual Drawable CreateBackground() => new OnlinePlayBackgroundSprite(); private class RoomNameText : OsuSpriteText { [Resolved(typeof(Room), nameof(Online.Rooms.Room.Name))] private Bindable<string> name { get; set; } public RoomNameText() { Font = OsuFont.GetFont(size: 28); } [BackgroundDependencyLoader] private void load() { Current = name; } } private class RoomStatusText : OnlinePlayComposite { [Resolved] private OsuColour colours { get; set; } private SpriteText statusText; private LinkFlowContainer beatmapText; public RoomStatusText() { RelativeSizeAxes = Axes.X; AutoSizeAxes = Axes.Y; Width = 0.5f; } [BackgroundDependencyLoader] private void load() { InternalChild = new GridContainer { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, ColumnDimensions = new[] { new Dimension(GridSizeMode.AutoSize), }, RowDimensions = new[] { new Dimension(GridSizeMode.AutoSize) }, Content = new[] { new Drawable[] { statusText = new OsuSpriteText { Font = OsuFont.Default.With(size: 16), Colour = colours.Lime1 }, beatmapText = new LinkFlowContainer(s => { s.Font = OsuFont.Default.With(size: 16); s.Colour = colours.Lime1; }) { RelativeSizeAxes = Axes.X, // workaround to ensure only the first line of text shows, emulating truncation (but without ellipsis at the end). // TODO: remove when text/link flow can support truncation with ellipsis natively. Height = 16, Masking = true } } } }; } protected override void LoadComplete() { base.LoadComplete(); SelectedItem.BindValueChanged(onSelectedItemChanged, true); } private void onSelectedItemChanged(ValueChangedEvent<PlaylistItem> item) { beatmapText.Clear(); if (Type.Value == MatchType.Playlists) { statusText.Text = "Ready to play"; return; } if (item.NewValue?.Beatmap.Value != null) { statusText.Text = "Currently playing "; beatmapText.AddLink(item.NewValue.Beatmap.Value.GetDisplayTitleRomanisable(), LinkAction.OpenBeatmap, item.NewValue.Beatmap.Value.OnlineID.ToString(), creationParameters: s => { s.Truncate = true; }); } } } public class PasswordProtectedIcon : CompositeDrawable { [BackgroundDependencyLoader] private void load(OsuColour colours) { Anchor = Anchor.TopRight; Origin = Anchor.TopRight; Size = new Vector2(32); InternalChildren = new Drawable[] { new Box { Anchor = Anchor.TopRight, Origin = Anchor.TopCentre, Colour = colours.Gray5, Rotation = 45, RelativeSizeAxes = Axes.Both, Width = 2, }, new SpriteIcon { Icon = FontAwesome.Solid.Lock, Anchor = Anchor.TopRight, Origin = Anchor.TopRight, Margin = new MarginPadding(6), Size = new Vector2(14), } }; } } } }
// (c) Copyright Esri, 2010 - 2016 // This source is subject to the Apache 2.0 License. // Please see http://www.apache.org/licenses/LICENSE-2.0.html for details. // All other rights reserved. using System; using System.Collections.Generic; using System.Text; using System.Runtime.InteropServices; using ESRI.ArcGIS.esriSystem; using ESRI.ArcGIS.Framework; using ESRI.ArcGIS.ADF.CATIDs; using ESRI.ArcGIS.Editor; using ESRI.ArcGIS.Geodatabase; using ESRI.ArcGIS.Geometry; using ESRI.ArcGIS.OSM.GeoProcessing; using ESRI.ArcGIS.Carto; using ESRI.ArcGIS.EditorExt; using System.IO; using System.Windows.Forms; using System.Resources; using System.Xml; using System.Reflection; using System.Linq; using ESRI.ArcGIS.ADF; namespace ESRI.ArcGIS.OSM.Editor { [Guid("faa799f0-bdc7-4ca4-af0c-a8d591c22058")] [ClassInterface(ClassInterfaceType.None)] [ProgId("OSMEditor.OSMEditorExtension")] public class OSMEditorExtension : IExtension { [DllImport("user32.dll", CharSet = CharSet.Ansi)] private static extern int ShowWindow(int hWnd, int nCmdShow); private const short SW_SHOW = 5; private const short SW_HIDE = 0; #region COM Registration Function(s) [ComRegisterFunction()] [ComVisible(false)] static void RegisterFunction(Type registerType) { // Required for ArcGIS Component Category Registrar support ArcGISCategoryRegistration(registerType); } [ComUnregisterFunction()] [ComVisible(false)] static void UnregisterFunction(Type registerType) { // Required for ArcGIS Component Category Registrar support ArcGISCategoryUnregistration(registerType); } #region ArcGIS Component Category Registrar generated code /// <summary> /// Required method for ArcGIS Component Category registration - /// Do not modify the contents of this method with the code editor. /// </summary> private static void ArcGISCategoryRegistration(Type registerType) { string regKey = string.Format("HKEY_CLASSES_ROOT\\CLSID\\{{{0}}}", registerType.GUID); MxExtension.Register(regKey); } /// <summary> /// Required method for ArcGIS Component Category unregistration - /// Do not modify the contents of this method with the code editor. /// </summary> private static void ArcGISCategoryUnregistration(Type registerType) { string regKey = string.Format("HKEY_CLASSES_ROOT\\CLSID\\{{{0}}}", registerType.GUID); MxExtension.Unregister(regKey); } #endregion #endregion private IApplication m_application; private IEditEvents2_Event m_editEvents2; private IEditEvents_Event m_editEvents; private IEditor3 m_editor3 = null; private ISpatialReference m_wgs84 = null; private ResourceManager resourceManager; private string m_osmbaseurl = String.Empty; private string m_osmDomainsFilePath = String.Empty; private string m_osmFeaturePropertiesFilePath = String.Empty; private bool m_isSettingsUpdateRequired = false; private bool m_LicenseAlertShownOnce = false; private ESRI.ArcGIS.Editor.IEditor m_editor = null; private ESRI.ArcGIS.Editor.IEnumRow m_enumRow = null; Dictionary<string, string> m_editorConfigurationSettings = null; OSMFeatureInspectorUI m_osmFeatureInspector = null; ESRI.ArcGIS.Editor.IObjectInspector m_inspector = null; ESRI.ArcGIS.OSM.OSMClassExtension.OSMUtility _osmUtility = null; #region IExtension Members /// <summary> /// Name of extension. Do not exceed 31 characters /// </summary> public string Name { get { return "OSMEditorExtension"; } } public void Shutdown() { // persist osm editor specific information like the osm base url, etc. OSMGPFactory.StoreOSMEditorSettings(m_editorConfigurationSettings); m_editEvents.OnDeleteFeature -= new IEditEvents_OnDeleteFeatureEventHandler(m_editEvents_OnDeleteFeature); m_editEvents.OnChangeFeature -= new IEditEvents_OnChangeFeatureEventHandler(m_editEvents_OnChangeFeature); m_editEvents.OnCreateFeature -= new IEditEvents_OnCreateFeatureEventHandler(m_editEvents_OnCreateFeature); m_editEvents.OnStartEditing -= new IEditEvents_OnStartEditingEventHandler(m_editEvents_OnStartEditing); m_editEvents2 = null; m_editEvents = null; m_editor3 = null; m_application = null; } public string OSMBaseURL { get { m_editorConfigurationSettings = OSMGPFactory.ReadOSMEditorSettings(); if (m_editorConfigurationSettings.ContainsKey("osmbaseurl") == true) { m_osmbaseurl = m_editorConfigurationSettings["osmbaseurl"]; } return m_osmbaseurl; } set { if (String.IsNullOrEmpty(value) == false) { if (String.IsNullOrEmpty(m_osmbaseurl) == false) { if (value.Equals(m_osmbaseurl) == false) { m_osmbaseurl = value; if (m_editorConfigurationSettings.ContainsKey("osmbaseurl")) { m_editorConfigurationSettings["osmbaseurl"] = m_osmbaseurl; } else { m_editorConfigurationSettings.Add("osmbaseurl", m_osmbaseurl); } OSMGPFactory.StoreOSMEditorSettings(m_editorConfigurationSettings); m_isSettingsUpdateRequired = true; } } } } } public string OSMDomainsXmlFilePath { get { if (m_editorConfigurationSettings.ContainsKey("osmdomainsfilepath")) { m_osmDomainsFilePath = m_editorConfigurationSettings["osmdomainsfilepath"]; } return m_osmDomainsFilePath; } set { if (String.IsNullOrEmpty(value) == false) { if (String.IsNullOrEmpty(m_osmDomainsFilePath) == false) { if (value.Equals(m_osmDomainsFilePath) == false) { m_osmDomainsFilePath = value; if (m_editorConfigurationSettings.ContainsKey("osmdomainsfilepath")) { m_editorConfigurationSettings["osmdomainsfilepath"] = m_osmDomainsFilePath; } else { m_editorConfigurationSettings.Add("osmdomainsfilepath", m_osmDomainsFilePath); } OSMGPFactory.StoreOSMEditorSettings(m_editorConfigurationSettings); m_isSettingsUpdateRequired = true; } } } } } public string OSMFeaturePropertiesXmlFilePath { get { if (m_editorConfigurationSettings.ContainsKey("osmfeaturepropertiesfilepath")) { m_osmFeaturePropertiesFilePath = m_editorConfigurationSettings["osmfeaturepropertiesfilepath"]; } return m_osmFeaturePropertiesFilePath; } set { if (String.IsNullOrEmpty(value) == false) { if (String.IsNullOrEmpty(m_osmFeaturePropertiesFilePath) == false) { if (value.Equals(m_osmFeaturePropertiesFilePath) == false) { m_osmFeaturePropertiesFilePath = value; if (m_editorConfigurationSettings.ContainsKey("osmfeaturepropertiesfilepath")) { m_editorConfigurationSettings["osmfeaturepropertiesfilepath"] = m_osmFeaturePropertiesFilePath; } else { m_editorConfigurationSettings.Add("osmfeaturepropertiesfilepath", m_osmFeaturePropertiesFilePath); } OSMGPFactory.StoreOSMEditorSettings(m_editorConfigurationSettings); m_isSettingsUpdateRequired = true; } } } } } public bool IsSettingsUpdateRequired { get { return m_isSettingsUpdateRequired; } } public OSMEditorExtension() { } public void Startup(ref object initializationData) { try { m_application = initializationData as IApplication; if (m_application == null) return; ISpatialReferenceFactory spatialReferenceFactory = new SpatialReferenceEnvironmentClass() as ISpatialReferenceFactory; m_wgs84 = spatialReferenceFactory.CreateGeographicCoordinateSystem((int)esriSRGeoCSType.esriSRGeoCS_WGS1984) as ISpatialReference; //Get the editor. UID editorUid = new UID(); editorUid.Value = "esriEditor.Editor"; m_editor3 = m_application.FindExtensionByCLSID(editorUid) as IEditor3; m_editEvents2 = m_editor3 as IEditEvents2_Event; m_editEvents = m_editor3 as IEditEvents_Event; m_editEvents.OnCreateFeature += new IEditEvents_OnCreateFeatureEventHandler(m_editEvents_OnCreateFeature); m_editEvents.OnChangeFeature += new IEditEvents_OnChangeFeatureEventHandler(m_editEvents_OnChangeFeature); m_editEvents.OnDeleteFeature += new IEditEvents_OnDeleteFeatureEventHandler(m_editEvents_OnDeleteFeature); m_editEvents.OnStartEditing += new IEditEvents_OnStartEditingEventHandler(m_editEvents_OnStartEditing); resourceManager = new ResourceManager("ESRI.ArcGIS.OSM.Editor.OSMFeatureInspectorStrings", this.GetType().Assembly); _osmUtility = new OSMClassExtension.OSMUtility(); // retrtrieve osm editor specfic information m_editorConfigurationSettings = OSMGPFactory.ReadOSMEditorSettings(); } catch { } } void m_editEvents_OnStartEditing() { // only show the license alert during a session of ArcMap once if (testEditorContentforOSMLicense() && m_LicenseAlertShownOnce == false) { LicenseAlertDialog osmLicenseAlert = new LicenseAlertDialog(); // show the alert about the license and remind the user about the nature of the OSM data if (osmLicenseAlert.ShowDialog() != DialogResult.OK) { // if the user doesn't aggree with the statement, end the edit session m_editor3.StopEditing(false); } m_LicenseAlertShownOnce = true; } // acquire an exclusive lock on the revision table as well for the current workspace as well } private bool testEditorContentforOSMLicense() { bool osmDataExists = false; try { IEnumLayer enumLayer = m_editor3.Map.get_Layers(null, true); enumLayer.Reset(); ILayer layer = enumLayer.Next(); while (layer != null) { if (layer is IFeatureLayer) { IFeatureClass featureClass = ((IFeatureLayer)layer).FeatureClass; // check if the current feature class being edited is acutally an OpenStreetMap feature class // all other feature classes should not be touched by this extension UID osmFeatureClassExtensionCLSID = featureClass.EXTCLSID; if (osmFeatureClassExtensionCLSID != null) { if (osmFeatureClassExtensionCLSID.Value.ToString().Equals("{65CA4847-8661-45eb-8E1E-B2985CA17C78}", StringComparison.InvariantCultureIgnoreCase) == true) { osmDataExists = true; break; } } } layer = enumLayer.Next(); } } catch {} return osmDataExists; } /// <summary> /// Method to persist changes into the configuation file, if required. /// </summary> public void PersistOSMSettings() { if (m_isSettingsUpdateRequired) { OSMGPFactory.StoreOSMEditorSettings(m_editorConfigurationSettings); } } void m_editEvents_OnDeleteFeature(IObject obj) { // check if the deleted feature participates in a relation // applies to point, lines, and polygons // if it does participate in a relation ask the user if it is ok to delete IFeatureClass currentObjectFeatureClass = obj.Class as IFeatureClass; if ((currentObjectFeatureClass == null) || (currentObjectFeatureClass.EXTCLSID == null)) return; // check if the current feature class being edited is acutally an OpenStreetMap feature class // all other feature class should not be touched by this extension UID osmEditorExtensionCLSID = currentObjectFeatureClass.EXTCLSID; if (osmEditorExtensionCLSID.Value.ToString().Equals("{65CA4847-8661-45eb-8E1E-B2985CA17C78}", StringComparison.InvariantCultureIgnoreCase) == false) { return; } // at this point we are only handling geometry types // relation types are using a separate UI IFeature deletedFeature = obj as IFeature; if (deletedFeature == null) return; // block changing features that are supporting features for multi-part geometries (relations) if (deletedFeature.Shape is IPolygon || deletedFeature.Shape is IPolyline) { int memberOFFieldIndex = deletedFeature.Fields.FindField("osmMemberOf"); int membersFieldIndex = deletedFeature.Fields.FindField("osmMembers"); int osmIDFieldIndex = deletedFeature.Fields.FindField("OSMID"); int osmID = 0; if (osmIDFieldIndex > -1) { object osmIDValue = deletedFeature.get_Value(osmIDFieldIndex); if (osmIDValue != DBNull.Value) { osmID = Convert.ToInt32(osmIDValue); } } if (membersFieldIndex > -1) { ESRI.ArcGIS.OSM.OSMClassExtension.member[] relationMembers = _osmUtility.retrieveMembers(deletedFeature, membersFieldIndex); if (relationMembers != null) { if (relationMembers.Length > 0) { string abortMessage = String.Format(resourceManager.GetString("OSMEditor_FeatureInspector_multipartdeleteparentconflictmessage"), osmID); MessageBox.Show(abortMessage, resourceManager.GetString("OSMEditor_FeatureInspector_relationconflictcaption"), MessageBoxButtons.OK, MessageBoxIcon.Stop); m_editor3.AbortOperation(); } } } if (memberOFFieldIndex > -1) { List<string> isMemberOfList = _osmUtility.retrieveIsMemberOf(deletedFeature, memberOFFieldIndex); Dictionary<string, string> dictofParentsAndTypes = _osmUtility.parseIsMemberOfList(isMemberOfList); StringBuilder typeAndIDString = new StringBuilder(); foreach (var item in dictofParentsAndTypes) { switch (item.Value) { case "rel": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_relationidtext") + item.Key + ","); break; case "ply": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_polygonidtext") + item.Key + ","); break; case "ln": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_polylineidtext") + item.Key + ","); break; case "pt": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_pointidtext") + item.Key + ","); break; default: break; } } if (typeAndIDString.Length > 0) { string parentsString = typeAndIDString.ToString(0, typeAndIDString.Length - 1); string abortMessage = String.Format(resourceManager.GetString("OSMEditor_FeatureInspector_relationsconflictmessage"), osmID, parentsString); MessageBox.Show(abortMessage, resourceManager.GetString("OSMEditor_FeatureInspector_relationconflictcaption"), MessageBoxButtons.OK, MessageBoxIcon.Stop); m_editor3.AbortOperation(); return; } } } else if (deletedFeature.Shape is IPoint) { // if we are dealing with points to be deleted then we'll determine the connectedness via a spatial query and then the attributes indicating that // the higher order feature is part of a relation IFeatureClass lineFeatureClass = ESRI.ArcGIS.OSM.OSMClassExtension.OpenStreetMapClassExtension.findMatchingFeatureClass(deletedFeature, esriGeometryType.esriGeometryPolyline); ISpatialFilter searchPointFilter = new SpatialFilterClass(); searchPointFilter.Geometry = deletedFeature.Shape; searchPointFilter.SpatialRel = esriSpatialRelEnum.esriSpatialRelTouches; TestRelationMembership(deletedFeature, lineFeatureClass, searchPointFilter); IFeatureClass polygonFeatureClass = ESRI.ArcGIS.OSM.OSMClassExtension.OpenStreetMapClassExtension.findMatchingFeatureClass(deletedFeature, esriGeometryType.esriGeometryPolygon); TestRelationMembership(deletedFeature, polygonFeatureClass, searchPointFilter); } string featureClassName = ((IDataset)obj.Class).Name; // find the correspoding relation table int baseIndex = featureClassName.IndexOf("_osm_"); int deleteOSMIDFieldIndex = obj.Fields.FindField("OSMID"); int deleteIsMemberOfFieldIndex = obj.Fields.FindField("osmMemberOf"); if (baseIndex > -1) { string relationTableName = featureClassName.Substring(0, baseIndex) + "_osm_relation"; IFeatureWorkspace featureWorkspace = m_editor3.EditWorkspace as IFeatureWorkspace; ITable relationTable = featureWorkspace.OpenTable(relationTableName); int relationOSMIDFieldIndex = relationTable.Fields.FindField("OSMID"); List<string> memberOfList = _osmUtility.retrieveIsMemberOf(deletedFeature, deleteIsMemberOfFieldIndex); Dictionary<string, string> isMemberOfIdsAndTypes = _osmUtility.parseIsMemberOfList(memberOfList); if (memberOfList.Count > 0) { // the deleted feature is referenced by a relation // check with the user if it is ok to delete // if OK then we are dealing with the delete upon stop editing, if cancel undo the delete string relationsString = String.Empty; int relationCount = 0; foreach (var memberOfItem in isMemberOfIdsAndTypes) { if (memberOfItem.Value == "rel") { relationCount = relationCount + 1; relationsString = relationsString + memberOfItem.Key + ","; } } string errorMessage = String.Empty; if (relationCount > 1) { errorMessage = string.Format(resourceManager.GetString("OSMEditor_FeatureInspector_relationsconflictmessage"), deletedFeature.get_Value(deleteOSMIDFieldIndex), relationsString.Substring(0, relationsString.Length - 1)); } else { errorMessage = string.Format(resourceManager.GetString("OSMEditor_FeatureInspector_relationconflictmessage"), deletedFeature.get_Value(deleteOSMIDFieldIndex), relationsString.Substring(0, relationsString.Length - 1)); } if (MessageBox.Show(errorMessage, resourceManager.GetString("OSMEditor_FeatureInspector_relationconflictcaption"), MessageBoxButtons.OKCancel, MessageBoxIcon.Warning) == DialogResult.Cancel) { m_editor3.AbortOperation(); } } } } /// <summary> /// /// </summary> /// <param name="deletedFeature"></param> /// <param name="testFeatureClass"></param> /// <param name="searchPointFilter"></param> private void TestRelationMembership(IFeature deletedFeature, IFeatureClass testFeatureClass, ISpatialFilter searchPointFilter) { using (ESRI.ArcGIS.OSM.OSMClassExtension.ComReleaser comReleaser = new ESRI.ArcGIS.OSM.OSMClassExtension.ComReleaser()) { int testMemberOfFieldIndex = testFeatureClass.Fields.FindField("osmMemberOf"); int testMembersFieldIndex = testFeatureClass.Fields.FindField("osmMembers"); int testOsmIDFieldIndex = testFeatureClass.Fields.FindField("OSMID"); IFeatureCursor searchCursor = testFeatureClass.Search(searchPointFilter, false); comReleaser.ManageLifetime(searchCursor); IFeature touchedFeature = searchCursor.NextFeature(); while (touchedFeature != null) { long osmID = 0; if (testOsmIDFieldIndex > -1) { object osmIDValue = touchedFeature.get_Value(testOsmIDFieldIndex); if (osmIDValue != DBNull.Value) { osmID = Convert.ToInt64(osmIDValue); } } if (testMembersFieldIndex > -1) { ESRI.ArcGIS.OSM.OSMClassExtension.member[] relationMembers = _osmUtility.retrieveMembers(touchedFeature, testMembersFieldIndex); if (relationMembers != null) { if (relationMembers.Length > 0) { string abortMessage = String.Format(resourceManager.GetString("OSMEditor_FeatureInspector_pointmemberofrelation"), osmID); MessageBox.Show(abortMessage, resourceManager.GetString("OSMEditor_FeatureInspector_relationconflictcaption"), MessageBoxButtons.OK, MessageBoxIcon.Stop); m_editor3.AbortOperation(); return; } } } if (testMemberOfFieldIndex > -1) { List<string> isMemberOfList = _osmUtility.retrieveIsMemberOf(touchedFeature, testMemberOfFieldIndex); Dictionary<string, string> dictofParentsAndTypes = _osmUtility.parseIsMemberOfList(isMemberOfList); StringBuilder typeAndIDString = new StringBuilder(); foreach (var item in dictofParentsAndTypes) { switch (item.Value) { case "rel": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_relationidtext") + item.Key + ","); break; case "ply": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_polygonidtext") + item.Key + ","); break; case "ln": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_polylineidtext") + item.Key + ","); break; case "pt": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_pointidtext") + item.Key + ","); break; default: break; } } if (typeAndIDString.Length > 0) { string parentsString = typeAndIDString.ToString(0, typeAndIDString.Length - 1); string abortMessage = String.Format(resourceManager.GetString("OSMEditor_FeatureInspector_pointmemberofrelation"), parentsString); MessageBox.Show(abortMessage, resourceManager.GetString("OSMEditor_FeatureInspector_relationconflictcaption"), MessageBoxButtons.OK, MessageBoxIcon.Stop); m_editor3.AbortOperation(); return; } } touchedFeature = searchCursor.NextFeature(); } } } void m_editEvents_OnChangeFeature(IObject obj) { // check if feature contains more than 2000 nodes/vertices // applies to lines and polygons // notify the user and offer a split // check if feature geometry is multi-part // applies to lines and polygons // notify the user and offer a conversion to relation IFeatureClass currentObjectFeatureClass = obj.Class as IFeatureClass; if ((currentObjectFeatureClass == null) || (currentObjectFeatureClass.EXTCLSID == null)) return; // check if the current feature class being edited is acutally an OpenStreetMap feature class // all other feature class should not be touched by this extension UID osmEditorExtensionCLSID = currentObjectFeatureClass.EXTCLSID; if (osmEditorExtensionCLSID.Value.ToString().Equals("{65CA4847-8661-45eb-8E1E-B2985CA17C78}", StringComparison.InvariantCultureIgnoreCase) == false) { return; } IFeature currentFeature = obj as IFeature; if (currentFeature == null) return; IPointCollection pointCollection = currentFeature.Shape as IPointCollection; if (pointCollection == null) { return; } // block changing features that are supporting features for multi-part geometries (relations) if (currentFeature.Shape is IPolygon || currentFeature.Shape is IPolyline) { if (((IFeatureChanges)currentFeature).ShapeChanged == true) { int memberOFFieldIndex = currentFeature.Fields.FindField("osmMemberOf"); int membersFieldIndex = currentFeature.Fields.FindField("osmMembers"); int osmIDFieldIndex = currentFeature.Fields.FindField("OSMID"); long osmID = 0; if (osmIDFieldIndex > -1) { object osmIDValue = currentFeature.get_Value(osmIDFieldIndex); if (osmIDValue != DBNull.Value) { osmID = Convert.ToInt64(osmIDValue); } } if (membersFieldIndex > -1) { ESRI.ArcGIS.OSM.OSMClassExtension.member[] relationMembers = _osmUtility.retrieveMembers(currentFeature, membersFieldIndex); if (relationMembers != null) { if (relationMembers.Length > 0) { string abortMessage = String.Format(resourceManager.GetString("OSMEditor_FeatureInspector_multipartchangeparentconflictmessage"), osmID); MessageBox.Show(abortMessage, resourceManager.GetString("OSMEditor_FeatureInspector_relationconflictcaption"), MessageBoxButtons.OK, MessageBoxIcon.Stop); m_editor3.AbortOperation(); } } } if (memberOFFieldIndex > -1) { List<string> isMemberOfList = _osmUtility.retrieveIsMemberOf(currentFeature, memberOFFieldIndex); Dictionary<string, string> dictofParentsAndTypes = _osmUtility.parseIsMemberOfList(isMemberOfList); StringBuilder typeAndIDString = new StringBuilder(); foreach (var item in dictofParentsAndTypes) { switch (item.Value) { case "rel": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_relationidtext") + item.Key + ","); break; case "ply": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_polygonidtext") + item.Key + ","); break; case "ln": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_polylineidtext") + item.Key + ","); break; case "pt": typeAndIDString.Append(resourceManager.GetString("OSMEditor_FeatureInspector_pointidtext") + item.Key + ","); break; default: break; } } if (typeAndIDString.Length > 0) { string parentsString = typeAndIDString.ToString(0, typeAndIDString.Length - 1); string abortMessage = String.Format(resourceManager.GetString("OSMEditor_FeatureInspector_multipartchangeconflictmessage"), osmID, parentsString); MessageBox.Show(abortMessage, resourceManager.GetString("OSMEditor_FeatureInspector_relationconflictcaption"), MessageBoxButtons.OK, MessageBoxIcon.Stop); m_editor3.AbortOperation(); } } } } ISegmentCollection segmentCollection = currentFeature.Shape as ISegmentCollection; bool densifyRequired = false; for (int segmentIndex = 0; segmentIndex < segmentCollection.SegmentCount; segmentIndex++) { ISegment segment = segmentCollection.get_Segment(segmentIndex); if (!(segment is Line)) { densifyRequired = true; break; } } if (densifyRequired) { IGeometryEnvironment4 geometryEnvironment = new GeometryEnvironmentClass() as IGeometryEnvironment4; double densifyTolerance = geometryEnvironment.AutoDensifyTolerance; double deviationTolerance = geometryEnvironment.DeviationAutoDensifyTolerance; IPolycurve polycurve = currentFeature.Shape as IPolycurve; polycurve.Densify(densifyTolerance, deviationTolerance); currentFeature.Shape = polycurve; obj.Store(); } } void m_editEvents_OnCreateFeature(IObject obj) { // check if feature contains more than 2000 nodes/vertices // applies to lines and polygons // notify the user and offer a split // check if feature geometry is multi-part // applies to lines and polygons // notify the user and offer a conversion to relation IFeatureClass currentObjectFeatureClass = obj.Class as IFeatureClass; if ((currentObjectFeatureClass == null) || (currentObjectFeatureClass.EXTCLSID == null)) return; // check if the current feature class being edited is acutally an OpenStreetMap feature class // all other feature class should not be touched by this extension UID osmEditorExtensionCLSID = currentObjectFeatureClass.EXTCLSID; if (osmEditorExtensionCLSID.Value.ToString().Equals("{65CA4847-8661-45eb-8E1E-B2985CA17C78}", StringComparison.InvariantCultureIgnoreCase) == false) { return; } IFeature currentFeature = obj as IFeature; if (currentFeature == null) return; ISegmentCollection segmentCollection = currentFeature.Shape as ISegmentCollection; bool densifyRequired = false; if (segmentCollection != null) { for (int segmentIndex = 0; segmentIndex < segmentCollection.SegmentCount; segmentIndex++) { ISegment segment = segmentCollection.get_Segment(segmentIndex); if (!(segment is Line)) { densifyRequired = true; break; } } } if (densifyRequired) { IGeometryEnvironment4 geometryEnvironment = new GeometryEnvironmentClass() as IGeometryEnvironment4; double densifyTolerance = geometryEnvironment.AutoDensifyTolerance; double deviationTolerance = geometryEnvironment.DeviationAutoDensifyTolerance; IPolycurve polycurve = currentFeature.Shape as IPolycurve; polycurve.Densify(densifyTolerance, deviationTolerance); currentFeature.Shape = polycurve; obj.Store(); } } #endregion #region IObjectInspector Members public void Clear() { if (m_inspector == null) { m_inspector = new FeatureInspectorClass(); } m_inspector.Clear(); } public void Copy(ESRI.ArcGIS.Geodatabase.IRow srcRow) { if (m_inspector == null) { m_inspector = new FeatureInspectorClass(); } m_inspector.Copy(srcRow); } public int HWND { get { if (m_osmFeatureInspector == null) { m_osmFeatureInspector = new OSMFeatureInspectorUI(); } return m_osmFeatureInspector.Handle.ToInt32(); } } public int tabHwnd { get { if (m_osmFeatureInspector == null) { m_osmFeatureInspector = new OSMFeatureInspectorUI(); } return m_osmFeatureInspector.Handle.ToInt32(); } } public void Inspect(ESRI.ArcGIS.Editor.IEnumRow objects, ESRI.ArcGIS.Editor.IEditor Editor) { try { if (m_osmFeatureInspector == null) { m_osmFeatureInspector = new OSMFeatureInspectorUI(); } if (m_inspector == null) { m_inspector = new FeatureInspectorClass(); } ShowWindow(m_inspector.HWND, SW_SHOW); m_inspector.Inspect(objects, Editor); if (Editor == null) { return; } if (objects == null) { return; } if (m_osmFeatureInspector.IsInitialized == false) m_osmFeatureInspector.Init(Editor); m_editor = Editor; m_enumRow = objects; IEnumFeature enumFeatures = Editor.EditSelection; enumFeatures.Reset(); int featureCount = 0; while (enumFeatures.Next() != null) { featureCount = featureCount + 1; } IEnumRow enumRow = objects; enumRow.Reset(); IRow row = enumRow.Next(); IFeature inspFeature = (IFeature)row; //user selected the layer name instead of a feature. if (objects.Count > 1) { m_osmFeatureInspector.prepareGrid4Features(objects); } else { m_osmFeatureInspector.prepareGrid4Feature(inspFeature); } m_osmFeatureInspector.currentlyEditedRows = enumRow; } catch (Exception ex) { if (m_osmFeatureInspector != null) { m_osmFeatureInspector.ClearGrid(); } System.Diagnostics.Debug.WriteLine(ex.Message); } } #endregion } }
#region Copyright // // Nini Configuration Project. // Copyright (C) 2006 Brent R. Matzelle. All rights reserved. // // This software is published under the terms of the MIT X11 license, a copy of // which has been included with this distribution in the LICENSE.txt file. // #endregion using System; using System.Collections; namespace Nini.Config { #region ConfigEventHandler class /// <include file='ConfigEventArgs.xml' path='//Delegate[@name="ConfigEventHandler"]/docs/*' /> public delegate void ConfigEventHandler (object sender, ConfigEventArgs e); /// <include file='ConfigEventArgs.xml' path='//Class[@name="ConfigEventArgs"]/docs/*' /> public class ConfigEventArgs : EventArgs { IConfig config = null; /// <include file='ConfigEventArgs.xml' path='//Constructor[@name="ConstructorIConfig"]/docs/*' /> public ConfigEventArgs (IConfig config) { this.config = config; } /// <include file='ConfigEventArgs.xml' path='//Property[@name="Config"]/docs/*' /> public IConfig Config { get { return config; } } } #endregion /// <include file='ConfigCollection.xml' path='//Class[@name="ConfigCollection"]/docs/*' /> public class ConfigCollection : ICollection, IEnumerable, IList { #region Private variables ArrayList configList = new ArrayList (); ConfigSourceBase owner = null; #endregion #region Constructors /// <include file='ConfigCollection.xml' path='//Constructor[@name="Constructor"]/docs/*' /> public ConfigCollection (ConfigSourceBase owner) { this.owner = owner; } #endregion #region Public properties /// <include file='ConfigCollection.xml' path='//Property[@name="Count"]/docs/*' /> public int Count { get { return configList.Count; } } /// <include file='ConfigCollection.xml' path='//Property[@name="IsSynchronized"]/docs/*' /> public bool IsSynchronized { get { return false; } } /// <include file='ConfigCollection.xml' path='//Property[@name="SyncRoot"]/docs/*' /> public object SyncRoot { get { return this; } } /// <include file='ConfigCollection.xml' path='//Property[@name="ItemIndex"]/docs/*' /> public IConfig this[int index] { get { return (IConfig)configList[index]; } } /// <include file='ConfigCollection.xml' path='//Property[@name="ItemIndex"]/docs/*' /> object IList.this[int index] { get { return configList[index]; } set { } } /// <include file='ConfigCollection.xml' path='//Property[@name="ItemName"]/docs/*' /> public IConfig this[string configName] { get { IConfig result = null; foreach (IConfig config in configList) { if (config.Name == configName) { result = config; break; } } return result; } } /// <include file='ConfigCollection.xml' path='//Property[@name="IsFixedSize"]/docs/*' /> public bool IsFixedSize { get { return false; } } /// <include file='ConfigCollection.xml' path='//Property[@name="IsReadOnly"]/docs/*' /> public bool IsReadOnly { get { return false; } } #endregion #region Public methods /// <include file='ConfigCollection.xml' path='//Method[@name="Add"]/docs/*' /> public void Add (IConfig config) { if (configList.Contains (config)) { throw new ArgumentException ("IConfig already exists"); } IConfig existingConfig = this[config.Name]; if (existingConfig != null) { // Set all new keys string[] keys = config.GetKeys (); for (int i = 0; i < keys.Length; i++) { existingConfig.Set (keys[i], config.Get (keys[i])); } } else { configList.Add (config); OnConfigAdded (new ConfigEventArgs (config)); } } /// <include file='ConfigCollection.xml' path='//Method[@name="Add"]/docs/*' /> int IList.Add (object config) { IConfig newConfig = config as IConfig; if (newConfig == null) { throw new Exception ("Must be an IConfig"); } else { this.Add (newConfig); return IndexOf (newConfig); } } /// <include file='ConfigCollection.xml' path='//Method[@name="AddName"]/docs/*' /> public IConfig Add (string name) { ConfigBase result = null; if (this[name] == null) { result = new ConfigBase (name, owner); configList.Add (result); OnConfigAdded (new ConfigEventArgs (result)); } else { throw new ArgumentException ("An IConfig of that name already exists"); } return result; } /// <include file='ConfigCollection.xml' path='//Method[@name="Remove"]/docs/*' /> public void Remove (IConfig config) { configList.Remove (config); OnConfigRemoved (new ConfigEventArgs (config)); } /// <include file='ConfigCollection.xml' path='//Method[@name="Remove"]/docs/*' /> public void Remove (object config) { configList.Remove (config); OnConfigRemoved (new ConfigEventArgs ((IConfig)config)); } /// <include file='ConfigCollection.xml' path='//Method[@name="RemoveAt"]/docs/*' /> public void RemoveAt (int index) { IConfig config = (IConfig)configList[index]; configList.RemoveAt (index); OnConfigRemoved (new ConfigEventArgs (config)); } /// <include file='ConfigCollection.xml' path='//Method[@name="Clear"]/docs/*' /> public void Clear () { configList.Clear (); } /// <include file='ConfigCollection.xml' path='//Method[@name="GetEnumerator"]/docs/*' /> public IEnumerator GetEnumerator () { return configList.GetEnumerator (); } /// <include file='ConfigCollection.xml' path='//Method[@name="CopyTo"]/docs/*' /> public void CopyTo (Array array, int index) { configList.CopyTo (array, index); } /// <include file='ConfigCollection.xml' path='//Method[@name="CopyToStrong"]/docs/*' /> public void CopyTo (IConfig[] array, int index) { ((ICollection)configList).CopyTo (array, index); } /// <include file='ConfigCollection.xml' path='//Method[@name="Contains"]/docs/*' /> public bool Contains (object config) { return configList.Contains (config); } /// <include file='ConfigCollection.xml' path='//Method[@name="IndexOf"]/docs/*' /> public int IndexOf (object config) { return configList.IndexOf (config); } /// <include file='ConfigCollection.xml' path='//Method[@name="Insert"]/docs/*' /> public void Insert (int index, object config) { configList.Insert (index, config); } #endregion #region Public events /// <include file='ConfigCollection.xml' path='//Event[@name="ConfigAdded"]/docs/*' /> public event ConfigEventHandler ConfigAdded; /// <include file='ConfigCollection.xml' path='//Event[@name="ConfigRemoved"]/docs/*' /> public event ConfigEventHandler ConfigRemoved; #endregion #region Protected methods /// <include file='ConfigCollection.xml' path='//Method[@name="OnConfigAdded"]/docs/*' /> protected void OnConfigAdded (ConfigEventArgs e) { if (ConfigAdded != null) { ConfigAdded (this, e); } } /// <include file='ConfigCollection.xml' path='//Method[@name="OnConfigRemoved"]/docs/*' /> protected void OnConfigRemoved (ConfigEventArgs e) { if (ConfigRemoved != null) { ConfigRemoved (this, e); } } #endregion #region Private methods #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace Bond.IO.Unsafe { using System; using System.Diagnostics; using System.IO; /// <summary> /// Implements IInputStream on top of System.Stream /// </summary> public class InputStream : InputBuffer, ICloneable<InputStream> { // Default setting for maximum incremental allocation chunk size before reading from stream public const int DefaultAllocationChunk = 128 * 1024 * 1024; // Active setting for maximum incremental allocation chunk size before reading from stream public static int ActiveAllocationChunk { get { return activeAllocationChunk; } set { if (value <= 0) { throw new ArgumentOutOfRangeException(nameof(value), "Value must be positive."); } activeAllocationChunk = value; } } static readonly byte[][] EmptyTempBuffers = new byte[0][]; static int activeAllocationChunk; readonly Stream stream; readonly int bufferLength; static InputStream() { ActiveAllocationChunk = DefaultAllocationChunk; } // When we read more data from the stream we can overwrite the // existing buffer only if it hasn't been exposed via ReadBytes or // Clone. Otherwise a new buffer has to be allocated. bool canReuseBuffer; public override long Length { get { return stream.Length; } } public override long Position { get { return stream.Position - (end - position); } set { position = checked ((int)(value - stream.Position)) + end; } } public InputStream(Stream stream, int bufferLength = 64 * 1024) : base(new byte[bufferLength], 0, 0) { this.stream = stream; this.bufferLength = bufferLength; canReuseBuffer = true; } InputStream(InputStream that) : base(that) { stream = that.stream.Clone(); bufferLength = that.bufferLength; canReuseBuffer = false; } /// <summary> /// Create a clone of the current state of the buffer /// </summary> public new InputStream Clone() { canReuseBuffer = false; return new InputStream(this); } /// <summary> /// Read an array of bytes verbatim /// </summary> /// <param name="count">Number of bytes to read</param> /// <exception cref="EndOfStreamException"/> public override ArraySegment<byte> ReadBytes(int count) { var result = base.ReadBytes(count); canReuseBuffer = false; return result; } internal override void EndOfStream(int count) { // The unread bytes left in the buffer. May be negative, which // indicates that this stream has been advanced beyond where we // are in the underlying stream and some bytes will need to be // skipped. var remaining = end - position; bool failed = false; byte[][] tempBuffers = EmptyTempBuffers; // Check whether we need to read in chunks to avoid allocating a // ton of memory ahead of time. if ((count > buffer.Length && (count - buffer.Length > ActiveAllocationChunk))) { // Calculate number of temp buffers; we round down since the // last chunk is read directly into final buffer. Note: // Difference is adjusted by -1 to round down correctly in // cases where the difference is exactly a multiple of the // allocation chunk size. int numTempBuffers = (count - buffer.Length - 1) / ActiveAllocationChunk; tempBuffers = new byte[numTempBuffers][]; for (int i = 0; i < tempBuffers.Length; i++) { tempBuffers[i] = new byte[ActiveAllocationChunk]; if (remaining < 0) { // We need to skip ahead in the underlying stream. // Borrow the buffer to do the skipping before we do // the real read. // Only should happen for the first iteration, as we // reset remaining. Debug.Assert(i == 0); AdvanceUnderlyingStream(-remaining, tempBuffers[i]); remaining = 0; } var bytesRead = stream.Read(tempBuffers[i], 0, ActiveAllocationChunk); if (bytesRead != ActiveAllocationChunk) { failed = true; break; } } } if (!failed) { var oldBuffer = buffer; if (!canReuseBuffer || count > buffer.Length) { buffer = new byte[Math.Max(bufferLength, count)]; canReuseBuffer = true; } int offset; if (remaining > 0) { // Copy any remaining bytes from the old buffer into the // final buffer. This may just move the bytes to the // beginning of the buffer. Buffer.BlockCopy(oldBuffer, position, buffer, 0, remaining); offset = remaining; } else if (remaining < 0) { // Nothing in the old buffer, but we need to skip ahead // in the underlying stream. AdvanceUnderlyingStream(-remaining, buffer); offset = 0; } else { // The stars are aligned, so just start at the beginning // of the final buffer. offset = 0; } // Copy from any temp buffers into the final buffer. In the // common case, there are no temp buffers. foreach (byte[] tempBuffer in tempBuffers) { Buffer.BlockCopy( tempBuffer, 0, buffer, offset, tempBuffer.Length); offset += tempBuffer.Length; } // Read the final block; update valid length and position. end = offset + stream.Read(buffer, offset, buffer.Length - offset); position = 0; } if (count > end) { base.EndOfStream(count - end); } } /// <summary> /// Advances the underlying stream by <paramref name="count"/> bytes. /// </summary> /// <remarks>Correctly handles streams that cannot Seek.</remarks> /// <param name="count">The number of bytes to advance.</param> /// <param name="scratchBuffer"> /// An already allocated buffer to use if dummy reads need to be /// performed. /// </param> void AdvanceUnderlyingStream(int count, byte[] scratchBuffer) { Debug.Assert(scratchBuffer != null); if (stream.CanSeek) { stream.Seek(count, SeekOrigin.Current); } else { while (count > 0) { int bytesRead = stream.Read( scratchBuffer, offset: 0, count: Math.Min(scratchBuffer.Length, count)); count -= bytesRead; if (bytesRead == 0) { base.EndOfStream(count); } } } } } }
//BSD, 2014-present, WinterDev //---------------------------------------------------------------------------- // Anti-Grain Geometry - Version 2.4 // Copyright (C) 2002-2005 Maxim Shemanarev (http://www.antigrain.com) // // C# Port port by: Lars Brubaker // larsbrubaker@gmail.com // Copyright (C) 2007 // // Permission to copy, use, modify, sell and distribute this software // is granted provided this copyright notice appears in all copies. // This software is provided "as is" without express or implied // warranty, and with no claim as to its suitability for any purpose. // //---------------------------------------------------------------------------- // Contact: mcseem@antigrain.com // mcseemagg@yahoo.com // http://www.antigrain.com //---------------------------------------------------------------------------- // // Viewport transformer - simple orthogonal conversions from world coordinates // to screen (device) ones. // //---------------------------------------------------------------------------- using System; namespace PixelFarm.CpuBlit.VertexProcessing { //----------------------------------------------------------trans_viewport public sealed class Viewport { double _world_x1; double _world_y1; double _world_x2; double _world_y2; double _device_x1; double _device_y1; double _device_x2; double _device_y2; AspectRatio _aspect; bool _is_valid; double _align_x; double _align_y; double _wx1; double _wy1; double _wx2; double _wy2; double _dx1; double _dy1; double _kx; double _ky; public enum AspectRatio { //aspect_ratio_e Stretch, Meet, Slice } //------------------------------------------------------------------- public Viewport() { _world_x1 = (0.0); _world_y1 = (0.0); _world_x2 = (1.0); _world_y2 = (1.0); _device_x1 = (0.0); _device_y1 = (0.0); _device_x2 = (1.0); _device_y2 = (1.0); _aspect = AspectRatio.Stretch; _is_valid = (true); _align_x = (0.5); _align_y = (0.5); _wx1 = (0.0); _wy1 = (0.0); _wx2 = (1.0); _wy2 = (1.0); _dx1 = (0.0); _dy1 = (0.0); _kx = (1.0); _ky = (1.0); } //------------------------------------------------------------------- public void preserve_aspect_ratio(double alignx, double aligny, AspectRatio aspect) { _align_x = alignx; _align_y = aligny; _aspect = aspect; update(); } //------------------------------------------------------------------- public void device_viewport(double x1, double y1, double x2, double y2) { _device_x1 = x1; _device_y1 = y1; _device_x2 = x2; _device_y2 = y2; update(); } //------------------------------------------------------------------- public void world_viewport(double x1, double y1, double x2, double y2) { _world_x1 = x1; _world_y1 = y1; _world_x2 = x2; _world_y2 = y2; update(); } //------------------------------------------------------------------- public void device_viewport(out double x1, out double y1, out double x2, out double y2) { x1 = _device_x1; y1 = _device_y1; x2 = _device_x2; y2 = _device_y2; } //------------------------------------------------------------------- public void world_viewport(out double x1, out double y1, out double x2, out double y2) { x1 = _world_x1; y1 = _world_y1; x2 = _world_x2; y2 = _world_y2; } //------------------------------------------------------------------- public void world_viewport_actual(out double x1, out double y1, out double x2, out double y2) { x1 = _wx1; y1 = _wy1; x2 = _wx2; y2 = _wy2; } //------------------------------------------------------------------- public bool is_valid() => _is_valid; public double align_x() => _align_x; public double align_y() => _align_y; public AspectRatio aspect_ratio() => _aspect; //------------------------------------------------------------------- public void transform(ref double x, ref double y) { x = (x - _wx1) * _kx + _dx1; y = (y - _wy1) * _ky + _dy1; } //------------------------------------------------------------------- public void transform_scale_only(ref double x, ref double y) { x *= _kx; y *= _ky; } //------------------------------------------------------------------- public void inverse_transform(ref double x, ref double y) { x = (x - _dx1) / _kx + _wx1; y = (y - _dy1) / _ky + _wy1; } //------------------------------------------------------------------- public void inverse_transform_scale_only(ref double x, ref double y) { x /= _kx; y /= _ky; } //------------------------------------------------------------------- public double device_dx() => _dx1 - _wx1 * _kx; public double device_dy() => _dy1 - _wy1 * _ky; //------------------------------------------------------------------- public double scale_x() => _kx; //------------------------------------------------------------------- public double scale_y() => _ky; //------------------------------------------------------------------- public double scale() => (_kx + _ky) * 0.5; //------------------------------------------------------------------- public Affine to_affine() { Affine mtx = Affine.NewTranslation(-_wx1, -_wy1); mtx *= Affine.NewScaling(_kx, _ky); mtx *= Affine.NewTranslation(_dx1, _dy1); return mtx; } //------------------------------------------------------------------- public Affine to_affine_scale_only() => Affine.NewScaling(_kx, _ky); void update() { double epsilon = 1e-30; if (Math.Abs(_world_x1 - _world_x2) < epsilon || Math.Abs(_world_y1 - _world_y2) < epsilon || Math.Abs(_device_x1 - _device_x2) < epsilon || Math.Abs(_device_y1 - _device_y2) < epsilon) { _wx1 = _world_x1; _wy1 = _world_y1; _wx2 = _world_x1 + 1.0; _wy2 = _world_y2 + 1.0; _dx1 = _device_x1; _dy1 = _device_y1; _kx = 1.0; _ky = 1.0; _is_valid = false; return; } double world_x1 = _world_x1; double world_y1 = _world_y1; double world_x2 = _world_x2; double world_y2 = _world_y2; double device_x1 = _device_x1; double device_y1 = _device_y1; double device_x2 = _device_x2; double device_y2 = _device_y2; if (_aspect != AspectRatio.Stretch) { double d; _kx = (device_x2 - device_x1) / (world_x2 - world_x1); _ky = (device_y2 - device_y1) / (world_y2 - world_y1); if ((_aspect == AspectRatio.Meet) == (_kx < _ky)) { d = (world_y2 - world_y1) * _ky / _kx; world_y1 += (world_y2 - world_y1 - d) * _align_y; world_y2 = world_y1 + d; } else { d = (world_x2 - world_x1) * _kx / _ky; world_x1 += (world_x2 - world_x1 - d) * _align_x; world_x2 = world_x1 + d; } } _wx1 = world_x1; _wy1 = world_y1; _wx2 = world_x2; _wy2 = world_y2; _dx1 = device_x1; _dy1 = device_y1; _kx = (device_x2 - device_x1) / (world_x2 - world_x1); _ky = (device_y2 - device_y1) / (world_y2 - world_y1); _is_valid = true; } }; }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections; public class ArrayBinarySort2 { private const int c_MIN_SIZE = 64; private const int c_MAX_SIZE = 1024; private const int c_NUM_LOOPS = 50; public static int Main() { ArrayBinarySort2 ac = new ArrayBinarySort2(); TestLibrary.TestFramework.BeginTestCase("Array.Sort(Array, Array, int, int, IComparer)"); if (ac.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; TestLibrary.TestFramework.LogInformation(""); TestLibrary.TestFramework.LogInformation("[Negative]"); retVal = NegTest1() && retVal; retVal = NegTest2() && retVal; retVal = NegTest3() && retVal; return retVal; } public bool PosTest1() { bool retVal = true; Array keys; Array items; int length; IComparer myc; byte element; TestLibrary.TestFramework.BeginScenario("PosTest1: Array.Sort(Array, Array, int, int, IComparer) "); try { myc = new MyComparer(); for (int j=0; j<c_NUM_LOOPS; j++) { // creat the array length = (TestLibrary.Generator.GetInt32(-55) % (c_MAX_SIZE-c_MIN_SIZE)) + c_MIN_SIZE; keys = Array.CreateInstance(typeof(byte), length); items = Array.CreateInstance(typeof(byte), length); // fill the array for (int i=0; i<keys.Length; i++) { keys.SetValue((object)TestLibrary.Generator.GetByte(-55), i); items.SetValue(keys.GetValue(i), i); } Array.Sort(keys, items, 0, length, myc); // ensure that all the elements are sorted element = (byte)keys.GetValue(0); for(int i=0; i<keys.Length; i++) { if (element > (byte)keys.GetValue(i)) { TestLibrary.TestFramework.LogError("000", "Unexpected key: Element (" + element + ") is greater than (" + (byte)keys.GetValue(i) + ")"); retVal = false; } if ((byte)items.GetValue(i) != (byte)keys.GetValue(i)) { TestLibrary.TestFramework.LogError("001", "Unexpected item: Expected(" + (byte)keys.GetValue(i) + ") Actual(" + (byte)items.GetValue(i) + ")"); retVal = false; } } } } catch (Exception e) { TestLibrary.TestFramework.LogError("002", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; Array keys; Array items; int length; IComparer myc; byte element; TestLibrary.TestFramework.BeginScenario("PosTest2: Array.Sort(Array, Array, int, int, IComparer) items is null"); try { myc = new MyComparer(); for (int j=0; j<c_NUM_LOOPS; j++) { // creat the array length = (TestLibrary.Generator.GetInt32(-55) % (c_MAX_SIZE-c_MIN_SIZE)) + c_MIN_SIZE; keys = Array.CreateInstance(typeof(byte), length); items = null; // fill the array for (int i=0; i<keys.Length; i++) { keys.SetValue((object)TestLibrary.Generator.GetByte(-55), i); } Array.Sort(keys, items, 0, length, myc); // ensure that all the elements are sorted element = (byte)keys.GetValue(0); for(int i=0; i<keys.Length; i++) { if (element > (byte)keys.GetValue(i)) { TestLibrary.TestFramework.LogError("003", "Unexpected key: Element (" + element + ") is greater than (" + (byte)keys.GetValue(i) + ")"); retVal = false; } } } } catch (Exception e) { TestLibrary.TestFramework.LogError("004", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest1() { bool retVal = true; Array keys; Array items; IComparer myc; TestLibrary.TestFramework.BeginScenario("NegTest1: Array.Sort(Array, Array, int, int, IComparer) keys is null"); try { keys = null; items = null; myc = new MyComparer(); Array.Sort(keys, items, 0, 0, myc); TestLibrary.TestFramework.LogError("005", "Exception expected."); retVal = false; } catch (ArgumentNullException) { // expected } catch (Exception e) { TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest2() { bool retVal = true; Array keys; Array items; IComparer myc; int length; TestLibrary.TestFramework.BeginScenario("NegTest2: Array.Sort(Array, Array, int, int, IComparer) length < 0"); try { // creat the array length = (TestLibrary.Generator.GetInt32(-55) % (c_MAX_SIZE-c_MIN_SIZE)) + c_MIN_SIZE; keys = Array.CreateInstance(typeof(byte), length); items = Array.CreateInstance(typeof(byte), length); myc = new MyComparer(); Array.Sort(keys, items, 0, -1, myc); TestLibrary.TestFramework.LogError("007", "Exception expected."); retVal = false; } catch (ArgumentOutOfRangeException) { // expected } catch (Exception e) { TestLibrary.TestFramework.LogError("008", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool NegTest3() { bool retVal = true; Array keys; Array items; IComparer myc; int length; TestLibrary.TestFramework.BeginScenario("NegTest3: Array.Sort(Array, Array, int, int, IComparer) length too long"); try { // creat the array length = (TestLibrary.Generator.GetInt32(-55) % (c_MAX_SIZE-c_MIN_SIZE)) + c_MIN_SIZE; keys = Array.CreateInstance(typeof(byte), length); items = Array.CreateInstance(typeof(byte), length); myc = new MyComparer(); Array.Sort(keys, items, length+10, length, myc); TestLibrary.TestFramework.LogError("009", "Exception expected."); retVal = false; } catch (ArgumentException) { // expected } catch (Exception e) { TestLibrary.TestFramework.LogError("010", "Unexpected exception: " + e); retVal = false; } return retVal; } public class MyComparer : IComparer { public int Compare(object obj1, object obj2) { if ((byte)obj1 == (byte)obj2) return 0; return ((byte)obj1 < (byte)obj2) ? -1 : 1; } } }
using Orleans.Persistence.AdoNet.Storage; using Orleans.Providers; using Orleans.Runtime; using Orleans.Serialization; using System; using System.Collections.Generic; using System.Data; using System.Data.Common; using System.Diagnostics; using System.Globalization; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Orleans.Configuration; using Orleans.Hosting; namespace Orleans.Storage { /// <summary> /// Logging codes used by <see cref="AdoNetStorageProvider"/>. /// </summary> /// <remarks> These are taken from <em>Orleans.Providers.ProviderErrorCode</em> and <em>Orleans.Providers.AzureProviderErrorCode</em>.</remarks> internal enum RelationalStorageProviderCodes { //These is from Orleans.Providers.ProviderErrorCode and Orleans.Providers.AzureProviderErrorCode. ProvidersBase = 200000, RelationalProviderBase = ProvidersBase + 400, RelationalProviderDeleteError = RelationalProviderBase + 8, RelationalProviderInitProvider = RelationalProviderBase + 9, RelationalProviderNoDeserializer = RelationalProviderBase + 10, RelationalProviderNoStateFound = RelationalProviderBase + 11, RelationalProviderClearing = RelationalProviderBase + 12, RelationalProviderCleared = RelationalProviderBase + 13, RelationalProviderReading = RelationalProviderBase + 14, RelationalProviderRead = RelationalProviderBase + 15, RelationalProviderReadError = RelationalProviderBase + 16, RelationalProviderWriting = RelationalProviderBase + 17, RelationalProviderWrote = RelationalProviderBase + 18, RelationalProviderWriteError = RelationalProviderBase + 19 } public static class AdoNetGrainStorageFactory { public static IGrainStorage Create(IServiceProvider services, string name) { IOptionsSnapshot<AdoNetGrainStorageOptions> optionsSnapshot = services.GetRequiredService<IOptionsSnapshot<AdoNetGrainStorageOptions>>(); return ActivatorUtilities.CreateInstance<AdoNetGrainStorage>(services, Options.Create(optionsSnapshot.Get(name)), name); } } /// <summary> /// A storage provider for writing grain state data to relational storage. /// </summary> /// <remarks> /// <para> /// Required configuration params: <c>DataConnectionString</c> /// </para> /// <para> /// Optional configuration params: /// <c>AdoInvariant</c> -- defaults to <c>System.Data.SqlClient</c> /// <c>UseJsonFormat</c> -- defaults to <c>false</c> /// <c>UseXmlFormat</c> -- defaults to <c>false</c> /// <c>UseBinaryFormat</c> -- defaults to <c>true</c> /// </para> /// </remarks> [DebuggerDisplay("Name = {Name}, ConnectionString = {Storage.ConnectionString}")] public class AdoNetGrainStorage: IGrainStorage, ILifecycleParticipant<ISiloLifecycle> { private SerializationManager serializationManager; /// <summary> /// Tag for BinaryFormatSerializer /// </summary> public const string BinaryFormatSerializerTag = "BinaryFormatSerializer"; /// <summary> /// Tag for JsonFormatSerializer /// </summary> public const string JsonFormatSerializerTag = "JsonFormatSerializer"; /// <summary> /// Tag for XmlFormatSerializer /// </summary> public const string XmlFormatSerializerTag = "XmlFormatSerializer"; /// <summary> /// The Service ID for which this relational provider is used. /// </summary> private readonly string serviceId; private ILogger logger; /// <summary> /// The storage used for back-end operations. /// </summary> private IRelationalStorage Storage { get; set; } /// <summary> /// These chars are delimiters when used to extract a class base type from a class /// that is either <see cref="Type.AssemblyQualifiedName"/> or <see cref="Type.FullName"/>. /// <see cref="ExtractBaseClass(string)"/>. /// </summary> private static char[] BaseClassExtractionSplitDelimeters { get; } = new[] { '[', ']' }; /// <summary> /// The default query to initialize this structure from the Orleans database. /// </summary> public const string DefaultInitializationQuery = "SELECT QueryKey, QueryText FROM OrleansQuery WHERE QueryKey = 'WriteToStorageKey' OR QueryKey = 'ReadFromStorageKey' OR QueryKey = 'ClearStorageKey'"; /// <summary> /// The queries currently used. When this is updated, the new queries will take effect immediately. /// </summary> public RelationalStorageProviderQueries CurrentOperationalQueries { get; set; } /// <summary> /// A strategy to pick a serializer or a deserializer for storage operations. This can be used to: /// 1) Add a custom serializer or deserializer for use in storage provider operations. /// 2) In combination with serializer or deserializer to update stored object version. /// 3) Per-grain storage format selection /// 4) Switch storage format first by reading using the save format and then writing in the new format. /// </summary> public IStorageSerializationPicker StorageSerializationPicker { get; set; } /// <summary> /// The hash generator used to hash natural keys, grain ID and grain type to a more narrow index. /// </summary> public IStorageHasherPicker HashPicker { get; set; } = new StorageHasherPicker(new[] { new OrleansDefaultHasher() }); private AdoNetGrainStorageOptions options; private IProviderRuntime providerRuntime; private string name; public AdoNetGrainStorage( ILogger<AdoNetGrainStorage> logger, IProviderRuntime providerRuntime, IOptions<AdoNetGrainStorageOptions> options, IOptions<ClusterOptions> clusterOptions, string name) { this.options = options.Value; this.providerRuntime = providerRuntime; this.name = name; this.logger = logger; this.serviceId = clusterOptions.Value.ServiceId.ToString(); } public void Participate(ISiloLifecycle lifecycle) { lifecycle.Subscribe(this.options.InitStage, Init, Close); } /// <summary>Clear state data function for this storage provider.</summary> /// <see cref="IStorageProvider.ClearStateAsync(string, GrainReference, IGrainState)"/>. public async Task ClearStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { //It assumed these parameters are always valid. If not, an exception will be thrown, //even if not as clear as when using explicitly checked parameters. var grainId = GrainIdAndExtensionAsString(grainReference); var baseGrainType = ExtractBaseClass(grainType); if(logger.IsEnabled(LogLevel.Trace)) { logger.Trace((int)RelationalStorageProviderCodes.RelationalProviderClearing, LogString("Clearing grain state", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString())); } string storageVersion = null; try { var grainIdHash = HashPicker.PickHasher(serviceId, this.name, baseGrainType, grainReference, grainState).Hash(grainId.GetHashBytes()); var grainTypeHash = HashPicker.PickHasher(serviceId, this.name, baseGrainType, grainReference, grainState).Hash(Encoding.UTF8.GetBytes(baseGrainType)); var clearRecord = (await Storage.ReadAsync(CurrentOperationalQueries.ClearState, command => { command.AddParameter("GrainIdHash", grainIdHash); command.AddParameter("GrainIdN0", grainId.N0Key); command.AddParameter("GrainIdN1", grainId.N1Key); command.AddParameter("GrainTypeHash", grainTypeHash); command.AddParameter("GrainTypeString", baseGrainType); command.AddParameter("GrainIdExtensionString", grainId.StringKey); command.AddParameter("ServiceId", serviceId); command.AddParameter("GrainStateVersion", !string.IsNullOrWhiteSpace(grainState.ETag) ? int.Parse(grainState.ETag, CultureInfo.InvariantCulture) : default(int?)); }, (selector, resultSetCount, token) => Task.FromResult(selector.GetValue(0).ToString()), CancellationToken.None).ConfigureAwait(false)); storageVersion = clearRecord.SingleOrDefault(); } catch(Exception ex) { logger.Error((int)RelationalStorageProviderCodes.RelationalProviderDeleteError, LogString("Error clearing grain state", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString(), ex.Message), ex); throw; } const string OperationString = "ClearState"; var inconsistentStateException = CheckVersionInconsistency(OperationString, serviceId, this.name, storageVersion, grainState.ETag, baseGrainType, grainId.ToString()); if(inconsistentStateException != null) { throw inconsistentStateException; } //No errors found, the version of the state held by the grain can be updated and also the state. grainState.ETag = storageVersion; if(logger.IsEnabled(LogLevel.Trace)) { logger.Trace((int)RelationalStorageProviderCodes.RelationalProviderCleared, LogString("Cleared grain state", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString())); } } /// <summary> Read state data function for this storage provider.</summary> /// <see cref="IStorageProvider.ReadStateAsync(string, GrainReference, IGrainState)"/>. public async Task ReadStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { //It assumed these parameters are always valid. If not, an exception will be thrown, even if not as clear //as with explicitly checked parameters. var grainId = GrainIdAndExtensionAsString(grainReference); var baseGrainType = ExtractBaseClass(grainType); if (logger.IsEnabled(LogLevel.Trace)) { logger.Trace((int)RelationalStorageProviderCodes.RelationalProviderReading, LogString("Reading grain state", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString())); } try { SerializationChoice choice =StorageSerializationPicker.PickDeserializer(serviceId, this.name, baseGrainType, grainReference, grainState, null); if(choice.Deserializer == null) { var errorString = LogString("No deserializer found", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString()); logger.Error((int)RelationalStorageProviderCodes.RelationalProviderNoDeserializer, errorString); throw new InvalidOperationException(errorString); } var commandBehavior = choice.PreferStreaming ? CommandBehavior.SequentialAccess : CommandBehavior.Default; var grainStateType = grainState.State.GetType(); var grainIdHash = HashPicker.PickHasher(serviceId, this.name, baseGrainType, grainReference, grainState).Hash(grainId.GetHashBytes()); var grainTypeHash = HashPicker.PickHasher(serviceId, this.name, baseGrainType, grainReference, grainState).Hash(Encoding.UTF8.GetBytes(baseGrainType)); var readRecords = (await Storage.ReadAsync(CurrentOperationalQueries.ReadFromStorage, (command => { command.AddParameter("GrainIdHash", grainIdHash); command.AddParameter("GrainIdN0", grainId.N0Key); command.AddParameter("GrainIdN1", grainId.N1Key); command.AddParameter("GrainTypeHash", grainTypeHash); command.AddParameter("GrainTypeString", baseGrainType); command.AddParameter("GrainIdExtensionString", grainId.StringKey); command.AddParameter("ServiceId", serviceId); }), async (selector, resultSetCount, token) => { object storageState = null; int? version; if(choice.PreferStreaming) { //When streaming via ADO.NET, using CommandBehavior.SequentialAccess, the order of //the columns on how they are read needs to be exactly this. const int binaryColumnPositionInSelect = 0; const int xmlColumnPositionInSelect = 1; const int jsonColumnPositionInSelect = 2; var streamSelector = (DbDataReader)selector; if(!(await streamSelector.IsDBNullAsync(binaryColumnPositionInSelect))) { using(var downloadStream = streamSelector.GetStream(binaryColumnPositionInSelect, Storage)) { storageState = choice.Deserializer.Deserialize(downloadStream, grainStateType); } } if(!(await streamSelector.IsDBNullAsync(xmlColumnPositionInSelect))) { using(var downloadStream = streamSelector.GetTextReader(xmlColumnPositionInSelect)) { storageState = choice.Deserializer.Deserialize(downloadStream, grainStateType); } } if(!(await streamSelector.IsDBNullAsync(jsonColumnPositionInSelect))) { using(var downloadStream = streamSelector.GetTextReader(jsonColumnPositionInSelect)) { storageState = choice.Deserializer.Deserialize(downloadStream, grainStateType); } } version = await streamSelector.GetValueAsync<int?>("Version"); } else { //All but one of these should be null. All will be read and an appropriate deserializer picked. //NOTE: When streaming will be implemented, it is worthwhile to optimize this so that the defined //serializer will be picked and then streaming tried according to its tag. object payload; payload = selector.GetValueOrDefault<byte[]>("PayloadBinary"); if(payload == null) { payload = selector.GetValueOrDefault<string>("PayloadXml"); } if(payload == null) { payload = selector.GetValueOrDefault<string>("PayloadJson"); } if(payload != null) { storageState = choice.Deserializer.Deserialize(payload, grainStateType); } version = selector.GetNullableInt32("Version"); } return Tuple.Create(storageState, version?.ToString(CultureInfo.InvariantCulture)); }, CancellationToken.None, commandBehavior).ConfigureAwait(false)).SingleOrDefault(); object state = readRecords != null ? readRecords.Item1 : null; string etag = readRecords != null ? readRecords.Item2 : null; if(state == null) { logger.Info((int)RelationalStorageProviderCodes.RelationalProviderNoStateFound, LogString("Null grain state read (default will be instantiated)", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString())); state = Activator.CreateInstance(grainStateType); } grainState.State = state; grainState.ETag = etag; if (logger.IsEnabled(LogLevel.Trace)) { logger.Trace((int)RelationalStorageProviderCodes.RelationalProviderRead, LogString("Read grain state", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString())); } } catch(Exception ex) { logger.Error((int)RelationalStorageProviderCodes.RelationalProviderReadError, LogString("Error reading grain state", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString(), ex.Message), ex); throw; } } /// <summary> Write state data function for this storage provider.</summary> /// <see cref="IStorageProvider.WriteStateAsync"/> public async Task WriteStateAsync(string grainType, GrainReference grainReference, IGrainState grainState) { //It assumed these parameters are always valid. If not, an exception will be thrown, even if not as clear //as with explicitly checked parameters. var data = grainState.State; var grainId = GrainIdAndExtensionAsString(grainReference); var baseGrainType = ExtractBaseClass(grainType); if (logger.IsEnabled(LogLevel.Trace)) { logger.Trace((int)RelationalStorageProviderCodes.RelationalProviderWriting, LogString("Writing grain state", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString())); } string storageVersion = null; try { var grainIdHash = HashPicker.PickHasher(serviceId, this.name, baseGrainType, grainReference, grainState).Hash(grainId.GetHashBytes()); var grainTypeHash = HashPicker.PickHasher(serviceId, this.name, baseGrainType, grainReference, grainState).Hash(Encoding.UTF8.GetBytes(baseGrainType)); var writeRecord = await Storage.ReadAsync(CurrentOperationalQueries.WriteToStorage, command => { command.AddParameter("GrainIdHash", grainIdHash); command.AddParameter("GrainIdN0", grainId.N0Key); command.AddParameter("GrainIdN1", grainId.N1Key); command.AddParameter("GrainTypeHash", grainTypeHash); command.AddParameter("GrainTypeString", baseGrainType); command.AddParameter("GrainIdExtensionString", grainId.StringKey); command.AddParameter("ServiceId", serviceId); command.AddParameter("GrainStateVersion", !string.IsNullOrWhiteSpace(grainState.ETag) ? int.Parse(grainState.ETag, CultureInfo.InvariantCulture) : default(int?)); SerializationChoice serializer = StorageSerializationPicker.PickSerializer(serviceId, this.name, baseGrainType, grainReference, grainState); command.AddParameter("PayloadBinary", (byte[])(serializer.Serializer.Tag == BinaryFormatSerializerTag ? serializer.Serializer.Serialize(data) : null)); command.AddParameter("PayloadJson", (string)(serializer.Serializer.Tag == JsonFormatSerializerTag ? serializer.Serializer.Serialize(data) : null)); command.AddParameter("PayloadXml", (string)(serializer.Serializer.Tag == XmlFormatSerializerTag ? serializer.Serializer.Serialize(data) : null)); }, (selector, resultSetCount, token) => { return Task.FromResult(selector.GetNullableInt32("NewGrainStateVersion").ToString()); }, CancellationToken.None).ConfigureAwait(false); storageVersion = writeRecord.SingleOrDefault(); } catch(Exception ex) { logger.Error((int)RelationalStorageProviderCodes.RelationalProviderWriteError, LogString("Error writing grain state", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString(), ex.Message), ex); throw; } const string OperationString = "WriteState"; var inconsistentStateException = CheckVersionInconsistency(OperationString, serviceId, this.name, storageVersion, grainState.ETag, baseGrainType, grainId.ToString()); if(inconsistentStateException != null) { throw inconsistentStateException; } //No errors found, the version of the state held by the grain can be updated. grainState.ETag = storageVersion; if (logger.IsEnabled(LogLevel.Trace)) { logger.Trace((int)RelationalStorageProviderCodes.RelationalProviderWrote, LogString("Wrote grain state", serviceId, this.name, grainState.ETag, baseGrainType, grainId.ToString())); } } /// <summary> Initialization function for this storage provider. </summary> private async Task Init(CancellationToken cancellationToken) { this.serializationManager = providerRuntime.ServiceProvider.GetRequiredService<SerializationManager>(); //NOTE: StorageSerializationPicker should be defined outside and given as a parameter in constructor or via Init in IProviderConfiguration perhaps. //Currently this limits one's options to much to the current situation of providing only one serializer for serialization and deserialization //with no regard to state update or serializer changes. Maybe have this serialized as a JSON in props and read via a key? StorageSerializationPicker = new DefaultRelationalStoragePicker(this.ConfigureDeserializers(options, providerRuntime), this.ConfigureSerializers(options, providerRuntime)); Storage = RelationalStorage.CreateInstance(options.Invariant, options.ConnectionString); var queries = await Storage.ReadAsync(DefaultInitializationQuery, command => { }, (selector, resultSetCount, token) => { return Task.FromResult(Tuple.Create(selector.GetValue<string>("QueryKey"), selector.GetValue<string>("QueryText"))); }).ConfigureAwait(false); CurrentOperationalQueries = new RelationalStorageProviderQueries( queries.Single(i => i.Item1 == "WriteToStorageKey").Item2, queries.Single(i => i.Item1 == "ReadFromStorageKey").Item2, queries.Single(i => i.Item1 == "ClearStorageKey").Item2); logger.Info((int)RelationalStorageProviderCodes.RelationalProviderInitProvider, $"Initialized storage provider: ServiceId={serviceId} ProviderName={this.name} Invariant={Storage.InvariantName} ConnectionString={Storage.ConnectionString}."); } /// <summary> /// Close this provider /// </summary> private Task Close(CancellationToken token) { return Task.CompletedTask; } /// <summary> /// Checks for version inconsistency as defined in the database scripts. /// </summary> /// <param name="serviceId">Service Id.</param> /// <param name="providerName">The name of this storage provider.</param> /// <param name="operation">The operation attempted.</param> /// <param name="storageVersion">The version from storage.</param> /// <param name="grainVersion">The grain version.</param> /// <param name="normalizedGrainType">Grain type without generics information.</param> /// <param name="grainId">The grain ID.</param> /// <returns>An exception for throwing or <em>null</em> if no violation was detected.</returns> /// <remarks>This means that the version was not updated in the database or the version storage version was something else than null /// when the grain version was null, meaning effectively a double activation and save.</remarks> private static InconsistentStateException CheckVersionInconsistency(string operation, string serviceId, string providerName, string storageVersion, string grainVersion, string normalizedGrainType, string grainId) { //If these are the same, it means no row was inserted or updated in the storage. //Effectively it means the UPDATE or INSERT conditions failed due to ETag violation. //Also if grainState.ETag storageVersion is null and storage comes back as null, //it means two grains were activated an the other one succeeded in writing its state. // //NOTE: the storage could return also the new and old ETag (Version), but currently it doesn't. if(storageVersion == grainVersion || storageVersion == string.Empty) { //TODO: Note that this error message should be canonical across back-ends. return new InconsistentStateException($"Version conflict ({operation}): ServiceId={serviceId} ProviderName={providerName} GrainType={normalizedGrainType} GrainId={grainId} ETag={grainVersion}."); } return null; } /// <summary> /// Writes a consistent log message from the given parameters. /// </summary> /// <param name="operationProlog">A free form prolog information to log.</param> /// <param name="serviceId">Service Id.</param> /// <param name="providerName">The name of this storage provider.</param> /// <param name="version">The grain version.</param> /// <param name="normalizedGrainType">Grain type without generics information.</param> /// <param name="grainId">The grain ID.</param> /// <param name="exceptionMessage">An optional exception message information to log.</param> /// <returns>A log string to be printed.</returns> private string LogString(string operationProlog, string serviceId, string providerName, string version, string normalizedGrainType, string grainId, string exceptionMessage = null) { const string Exception = " Exception="; return $"{operationProlog}: ServiceId={serviceId} ProviderName={providerName} GrainType={normalizedGrainType} GrainId={grainId} ETag={version}{(exceptionMessage != null ? Exception + exceptionMessage : string.Empty)}."; } /// <summary> /// Extracts a grain ID as a string and appends the key extension with '#' infix is present. /// </summary> /// <param name="grainReference">The reference from which to extract the ID.</param> /// <returns>The grain ID as a string.</returns> /// <remarks>This likely should exist in Orleans core in more optimized form.</remarks> private static AdoGrainKey GrainIdAndExtensionAsString(GrainReference grainReference) { //Kudos for https://github.com/tsibelman for the algorithm. See more at https://github.com/dotnet/orleans/issues/1905. string keyExtension; AdoGrainKey key; if(grainReference.IsPrimaryKeyBasedOnLong()) { key = new AdoGrainKey(grainReference.GetPrimaryKeyLong(out keyExtension), keyExtension); } else { key = new AdoGrainKey(grainReference.GetPrimaryKey(out keyExtension), keyExtension); } return key; } /// <summary> /// Extracts a base class from a string that is either <see cref="Type.AssemblyQualifiedName"/> or /// <see cref="Type.FullName"/> or returns the one given as a parameter if no type is given. /// </summary> /// <param name="typeName">The base class name to give.</param> /// <returns>The extracted base class or the one given as a parameter if it didn't have a generic part.</returns> private static string ExtractBaseClass(string typeName) { var genericPosition = typeName.IndexOf("`", StringComparison.OrdinalIgnoreCase); if(genericPosition != -1) { //The following relies the generic argument list to be in form as described //at https://msdn.microsoft.com/en-us/library/w3f99sx1.aspx. var split = typeName.Split(BaseClassExtractionSplitDelimeters, StringSplitOptions.RemoveEmptyEntries); return split[0] + string.Format($"[{string.Join(",", split.Skip(1).Where(i => i.Length > 1 && i[0] != ',').Select(i => string.Format($"[{i.Substring(0, i.IndexOf(',', i.IndexOf(',') + 1))}]")))}]"); } return typeName; } private ICollection<IStorageDeserializer> ConfigureDeserializers(AdoNetGrainStorageOptions options, IProviderRuntime providerRuntime) { var deserializers = new List<IStorageDeserializer>(); if(options.UseJsonFormat) { var typeResolver = providerRuntime.ServiceProvider.GetRequiredService<ITypeResolver>(); var jsonSettings = OrleansJsonSerializer.UpdateSerializerSettings(OrleansJsonSerializer.GetDefaultSerializerSettings(typeResolver, providerRuntime.GrainFactory), options.UseFullAssemblyNames, options.IndentJson, options.TypeNameHandling); deserializers.Add(new OrleansStorageDefaultJsonDeserializer(jsonSettings, JsonFormatSerializerTag)); } if(options.UseXmlFormat) { deserializers.Add(new OrleansStorageDefaultXmlDeserializer(XmlFormatSerializerTag)); } //if none are set true, configure binary format serializer by default if(!options.UseXmlFormat && !options.UseJsonFormat) { deserializers.Add(new OrleansStorageDefaultBinaryDeserializer(this.serializationManager, BinaryFormatSerializerTag)); } return deserializers; } private ICollection<IStorageSerializer> ConfigureSerializers(AdoNetGrainStorageOptions options, IProviderRuntime providerRuntime) { var serializers = new List<IStorageSerializer>(); if(options.UseJsonFormat) { var typeResolver = providerRuntime.ServiceProvider.GetRequiredService<ITypeResolver>(); var jsonSettings = OrleansJsonSerializer.UpdateSerializerSettings(OrleansJsonSerializer.GetDefaultSerializerSettings(typeResolver, providerRuntime.GrainFactory), options.UseFullAssemblyNames, options.IndentJson, options.TypeNameHandling); serializers.Add(new OrleansStorageDefaultJsonSerializer(jsonSettings, JsonFormatSerializerTag)); } if(options.UseXmlFormat) { serializers.Add(new OrleansStorageDefaultXmlSerializer(XmlFormatSerializerTag)); } //if none are set true, configure binary format serializer by default if (!options.UseXmlFormat && !options.UseJsonFormat) { serializers.Add(new OrleansStorageDefaultBinarySerializer(this.serializationManager, BinaryFormatSerializerTag)); } return serializers; } } }
namespace Microsoft.Protocols.TestSuites.MS_OXCFOLD { using System; using System.Collections.Generic; using System.Threading; using Microsoft.Protocols.TestSuites.Common; using Microsoft.Protocols.TestTools; /// <summary> /// Implementation of the MS-OXCFOLD adapter. /// </summary> public partial class MS_OXCFOLDAdapter : ManagedAdapterBase, IMS_OXCFOLDAdapter { #region Variables /// <summary> /// The OxcropsClient instance. /// </summary> private OxcropsClient oxcropsClient; /// <summary> /// Original bytes array. /// </summary> private byte[] rawData; #endregion Variables /// <summary> /// Overrides IAdapter's Initialize method, to set testSite.DefaultProtocolDocShortName. /// </summary> /// <param name="testSite">Transfer ITestSite into adapter, make adapter can use ITestSite's function.</param> public override void Initialize(ITestSite testSite) { base.Initialize(testSite); Site.DefaultProtocolDocShortName = "MS-OXCFOLD"; Common.MergeConfiguration(testSite); this.oxcropsClient = new OxcropsClient(MapiContext.GetDefaultRpcContext(this.Site)); } #region MS_OXCFOLDAdapter methods /// <summary> /// Connect to the server for RPC calling. /// </summary> /// <param name="connectionType">The type of connection</param> /// <returns>If the behavior of connecting server is successful, the server will return true; otherwise, return false.</returns> public bool DoConnect(ConnectionType connectionType) { return this.oxcropsClient.Connect( Common.GetConfigurationPropertyValue("SutComputerName", this.Site), connectionType, Common.GetConfigurationPropertyValue("AdminUserEssdn", this.Site), Common.GetConfigurationPropertyValue("Domain", this.Site), Common.GetConfigurationPropertyValue("AdminUserName", this.Site), Common.GetConfigurationPropertyValue("AdminUserPassword", this.Site)); } /// <summary> /// Connect to the server for RPC calling. /// </summary> /// <param name="server">Server to connect.</param> /// <param name="connectionType">the type of connection</param> /// <param name="userDN">UserDN used to connect server.</param> /// <param name="domain">The domain the server is deployed.</param> /// <param name="userName">The domain account name.</param> /// <param name="password">Password value.</param> /// <returns>If the behavior of connecting server is successful, the server will return true; otherwise, return false.</returns> public bool DoConnect(string server, ConnectionType connectionType, string userDN, string domain, string userName, string password) { return this.oxcropsClient.Connect( server, connectionType, userDN, domain, userName, password); } /// <summary> /// Client calls it to disconnect the connection with Server. /// </summary> /// <returns>The return value indicates the disconnection state.</returns> public bool DoDisconnect() { return this.oxcropsClient.Disconnect(); } /// <summary> /// Sends ROP request with single operation and single input object handle with expected SuccessResponse. /// </summary> /// <param name="ropRequest">ROP request object.</param> /// <param name="insideObjHandle">Server object handle in request.</param> /// <param name="ropResponse">ROP response object.</param> /// <param name="responseSOHTable">Server objects handles in response.</param> /// <returns>An unsigned integer get from server. 0 indicates success, other values indicate failure.</returns> public uint DoRopCall(ISerializable ropRequest, uint insideObjHandle, ref object ropResponse, ref List<List<uint>> responseSOHTable) { return this.ExcuteRopCall(ropRequest, insideObjHandle, ref ropResponse, ref responseSOHTable, ref this.rawData); } /// <summary> /// Sends ROP request with single operation and multiple input object handles with expected SuccessResponse. /// </summary> /// <param name="ropRequest">ROP request object.</param> /// <param name="insideObjHandle">The list of server object handles in request.</param> /// <param name="ropResponse">ROP response object.</param> /// <param name="responseSOHTable">Server objects handles in response.</param> /// <returns>An unsigned integer get from server. 0 indicates success, other values indicate failure.</returns> public uint DoRopCall(ISerializable ropRequest, List<uint> insideObjHandle, ref object ropResponse, ref List<List<uint>> responseSOHTable) { return this.ExcuteRopCall(ropRequest, insideObjHandle, ref ropResponse, ref responseSOHTable, ref this.rawData); } /// <summary> /// Creates either public folders or private mailbox folders. /// </summary> /// <param name="ropCreateFolderRequest">RopCreateFolderRequest object.</param> /// <param name="insideObjHandle">Server object handle in RopCreateFolderRequest.</param> /// <param name="responseSOHTable">Server objects handles in RopCreateFolderResponse.</param> /// <returns>RopCreateFolderResponse object.</returns> public RopCreateFolderResponse CreateFolder(RopCreateFolderRequest ropCreateFolderRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropCreateFolderRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopCreateFolderResponse ropCreateFolderResponse = (RopCreateFolderResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropCreateFolderResponse.ReturnValue) { this.VerifyRopCreateFolder(ropCreateFolderResponse); } #endregion return ropCreateFolderResponse; } /// <summary> /// Opens an existing folder. /// </summary> /// <param name="ropOpenFolderRequest">RopOpenFolderRequest object.</param> /// <param name="insideObjHandle">Server object handle in RopOpenFolderRequest.</param> /// <param name="responseSOHTable">Server objects handles in RopOpenFolderResponse.</param> /// <returns>RopOpenFolderResponse object.</returns> public RopOpenFolderResponse OpenFolder(RopOpenFolderRequest ropOpenFolderRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); ropOpenFolderRequest.RopId = (byte)RopId.RopOpenFolder; ropOpenFolderRequest.LogonId = Constants.CommonLogonId; ropOpenFolderRequest.InputHandleIndex = Constants.CommonInputHandleIndex; ropOpenFolderRequest.OutputHandleIndex = Constants.CommonOutputHandleIndex; this.ExcuteRopCall((ISerializable)ropOpenFolderRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopOpenFolderResponse ropOpenFolderResponse = (RopOpenFolderResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropOpenFolderResponse.ReturnValue) { this.VerifyRopOpenFolder(ropOpenFolderResponse); } #endregion return ropOpenFolderResponse; } /// <summary> /// Removes a subfolder. /// </summary> /// <param name="ropDeleteFolderRequest">RopDeleteFolderRequest object.</param> /// <param name="insideObjHandle">Server object handle in RopDeleteFolderRequest.</param> /// <param name="responseSOHTable">Server objects handles in RopDeleteFolderResponse.</param> /// <returns>RopDeleteFolderResponse object.</returns> public RopDeleteFolderResponse DeleteFolder(RopDeleteFolderRequest ropDeleteFolderRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropDeleteFolderRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopDeleteFolderResponse ropDeleteFolderResponse = (RopDeleteFolderResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropDeleteFolderResponse.ReturnValue) { this.VerifyRopDeleteFolder(ropDeleteFolderResponse); } #endregion return ropDeleteFolderResponse; } /// <summary> /// Establishes search criteria for a search folder. /// </summary> /// <param name="ropSetSearchCriteriaRequest">RopSetSearchCriteriaRequest object.</param> /// <param name="insideObjHandle">Server object handle in RopSetSearchCriteriaRequest.</param> /// <param name="responseSOHTable">Server objects handles in RopSetSearchCriteriaResponse.</param> /// <returns>RopSetSearchCriteriaResponse object.</returns> public RopSetSearchCriteriaResponse SetSearchCriteria(RopSetSearchCriteriaRequest ropSetSearchCriteriaRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); int count = 0; bool setSearchCriteriaComplete = false; int waitTime = int.Parse(Common.GetConfigurationPropertyValue("WaitTime", this.Site)); int retryCount = int.Parse(Common.GetConfigurationPropertyValue("RetryCount", this.Site)); RopSetSearchCriteriaResponse ropSetSearchCriteriaResponse; do { this.ExcuteRopCall((ISerializable)ropSetSearchCriteriaRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); ropSetSearchCriteriaResponse = (RopSetSearchCriteriaResponse)temp; // Error code 0x499 indicates the server is not ready to initialize a search now, the client should wait for a while and try again later. if (0x499 == ropSetSearchCriteriaResponse.ReturnValue) { Thread.Sleep(waitTime); } else { setSearchCriteriaComplete = true; } if (count > retryCount) { Site.Assert.Fail("The server failed to initialize a search!"); } count++; } while (!setSearchCriteriaComplete); #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropSetSearchCriteriaResponse.ReturnValue) { this.VerifyRopSetSearchCriteria(ropSetSearchCriteriaResponse); } #endregion Capture Code return ropSetSearchCriteriaResponse; } /// <summary> /// Obtains the search criteria and the status of a search for a search folder. /// </summary> /// <param name="ropGetSearchCriteriaRequest">RopGetSearchCriteriaRequest object.</param> /// <param name="insideObjHandle">Server object handle in RopGetSearchCriteriaRequest.</param> /// <param name="responseSOHTable">Server objects handles in RopGetSearchCriteriaResponse.</param> /// <returns>RopGetSearchCriteriaResponse object.</returns> public RopGetSearchCriteriaResponse GetSearchCriteria(RopGetSearchCriteriaRequest ropGetSearchCriteriaRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropGetSearchCriteriaRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopGetSearchCriteriaResponse ropGetSearchCriteriaResponse = (RopGetSearchCriteriaResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropGetSearchCriteriaResponse.ReturnValue) { this.VerifyRopGetSearchCriteria(ropGetSearchCriteriaResponse); } #endregion return ropGetSearchCriteriaResponse; } /// <summary> /// Moves or copies messages from a source folder to a destination folder. /// </summary> /// <param name="ropMoveCopyMessagesRequest">RopMoveCopyMessagesRequest object.</param> /// <param name="insideObjHandle">Server object handles in RopMoveCopyMessagesRequest.</param> /// <param name="responseSOHTable">Server objects handles in RopMoveCopyMessagesResponse.</param> /// <returns>RopMoveCopyMessagesResponse object.</returns> public RopMoveCopyMessagesResponse MoveCopyMessages(RopMoveCopyMessagesRequest ropMoveCopyMessagesRequest, List<uint> insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropMoveCopyMessagesRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopMoveCopyMessagesResponse ropMoveCopyMessagesResponse = (RopMoveCopyMessagesResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropMoveCopyMessagesResponse.ReturnValue) { this.VerifyRopMoveCopyMessages(ropMoveCopyMessagesResponse); } #endregion return ropMoveCopyMessagesResponse; } /// <summary> /// Moves a folder from one parent to another. /// </summary> /// <param name="ropMoveFolderRequest">RopMoveFolderRequest object.</param> /// <param name="insideObjHandle">Server object handles in RopMoveFolderRequest.</param> /// <param name="responseSOHTable">Server objects handles in RopMoveFolderResponse.</param> /// <returns>RopMoveFolderResponse object.</returns> public RopMoveFolderResponse MoveFolder(RopMoveFolderRequest ropMoveFolderRequest, List<uint> insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropMoveFolderRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopMoveFolderResponse ropMoveFolderResponse = (RopMoveFolderResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropMoveFolderResponse.ReturnValue) { this.VerifyRopMoveFolder(ropMoveFolderResponse); } #endregion return ropMoveFolderResponse; } /// <summary> /// Creates a new folder on the destination parent folder, copying the properties and content of the source folder to the new folder. /// </summary> /// <param name="ropCopyFolderRequest">RopCopyFolderRequest object.</param> /// <param name="insideObjHandle">Server object handles in RopCopyFolderRequest.</param> /// <param name="responseSOHTable">Server objects handles in RopCopyFolderResponse.</param> /// <returns>RopCopyFolderResponse object.</returns> public RopCopyFolderResponse CopyFolder(RopCopyFolderRequest ropCopyFolderRequest, List<uint> insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropCopyFolderRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopCopyFolderResponse ropCopyFolderResponse = (RopCopyFolderResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropCopyFolderResponse.ReturnValue) { this.VerifyRopCopyFolder(ropCopyFolderResponse); } #endregion return ropCopyFolderResponse; } /// <summary> /// Soft deletes all messages and subfolders from a folder without deleting the folder itself. /// </summary> /// <param name="ropEmptyFolderRequest">RopEmptyFolderRequest object.</param> /// <param name="insideObjHandle">Server object handle in EmptyFolderRequest.</param> /// <param name="responseSOHTable">Server objects handles in RopEmptyFolderResponse.</param> /// <returns>RopEmptyFolderResponse object.</returns> public RopEmptyFolderResponse EmptyFolder(RopEmptyFolderRequest ropEmptyFolderRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropEmptyFolderRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopEmptyFolderResponse ropEmptyFolderResponse = (RopEmptyFolderResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropEmptyFolderResponse.ReturnValue) { this.VerifyRopEmptyFolder(ropEmptyFolderResponse); } #endregion return ropEmptyFolderResponse; } /// <summary> /// Hard deletes all messages and subfolders from a folder without deleting the folder itself. /// </summary> /// <param name="ropHardDeleteMessagesAndSubfoldersRequest">RopHardDeleteMessagesAndSubfoldersRequest object.</param> /// <param name="insideObjHandle">Server object handle in RopHardDeleteMessagesAndSubfolders.</param> /// <param name="responseSOHTable">Server objects handles in RopHardDeleteMessagesAndSubfoldersResponse.</param> /// <returns>RopHardDeleteMessagesAndSubfoldersResponse object.</returns> public RopHardDeleteMessagesAndSubfoldersResponse HardDeleteMessagesAndSubfolders( RopHardDeleteMessagesAndSubfoldersRequest ropHardDeleteMessagesAndSubfoldersRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropHardDeleteMessagesAndSubfoldersRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopHardDeleteMessagesAndSubfoldersResponse ropHardDeleteMessagesAndSubfoldersResponse = (RopHardDeleteMessagesAndSubfoldersResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropHardDeleteMessagesAndSubfoldersResponse.ReturnValue) { this.VerifyRopHardDeleteMessagesAndSubfolders(ropHardDeleteMessagesAndSubfoldersResponse); } #endregion return ropHardDeleteMessagesAndSubfoldersResponse; } /// <summary> /// Deletes one or more messages from a folder. /// </summary> /// <param name="ropDeleteMessagesRequest">RopDeleteMessagesRequest object.</param> /// <param name="insideObjHandle">Server object handle in RopDeleteMessages.</param> /// <param name="responseSOHTable">Server objects handles in RopDeleteMessagesResponse.</param> /// <returns>RopDeleteMessagesResponse object.</returns> public RopDeleteMessagesResponse DeleteMessages(RopDeleteMessagesRequest ropDeleteMessagesRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropDeleteMessagesRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopDeleteMessagesResponse ropDeleteMessagesResponse = (RopDeleteMessagesResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropDeleteMessagesResponse.ReturnValue) { this.VerifyRopDeleteMessages(ropDeleteMessagesResponse); } #endregion return ropDeleteMessagesResponse; } /// <summary> /// Hard deletes one or more messages that are listed in the request buffer. /// </summary> /// <param name="ropHardDeleteMessagesRequest">RopHardDeleteMessagesRequest object.</param> /// <param name="insideObjHandle">Server object handle in RopHardDeleteMessages.</param> /// <param name="responseSOHTable">Server objects handles in RopHardDeleteMessagesResponse.</param> /// <returns>RopHardDeleteMessagesResponse object.</returns> public RopHardDeleteMessagesResponse HardDeleteMessages(RopHardDeleteMessagesRequest ropHardDeleteMessagesRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropHardDeleteMessagesRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopHardDeleteMessagesResponse ropHardDeleteMessagesResponse = (RopHardDeleteMessagesResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropHardDeleteMessagesResponse.ReturnValue) { this.VerifyRopHardDeleteMessages(ropHardDeleteMessagesResponse); } #endregion return ropHardDeleteMessagesResponse; } /// <summary> /// Retrieves the hierarchy table for a folder. /// </summary> /// <param name="ropGetHierarchyTableRequest">RopGetHierarchyTableRequest object.</param> /// <param name="insideObjHandle">Server object handle RopGetHierarchyTable.</param> /// <param name="responseSOHTable">Server objects handles in RopGetHierarchyTableResponse.</param> /// <returns>RopGetHierarchyTableResponse object.</returns> public RopGetHierarchyTableResponse GetHierarchyTable(RopGetHierarchyTableRequest ropGetHierarchyTableRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropGetHierarchyTableRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); RopGetHierarchyTableResponse ropGetHierarchyTableResponse = (RopGetHierarchyTableResponse)temp; #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropGetHierarchyTableResponse.ReturnValue) { this.VerifyRopGetHierarchyTable(ropGetHierarchyTableResponse); } #endregion return ropGetHierarchyTableResponse; } /// <summary> /// Retrieves the contents table for a folder. /// </summary> /// <param name="ropGetContentsTableRequest">RopGetContentsTableRequest object.</param> /// <param name="insideObjHandle">Server object handle in RopGetContentsTable.</param> /// <param name="responseSOHTable">Server objects handles in RopGetContentsTableResponse.</param> /// <returns>RopGetContentsTableResponse object.</returns> public RopGetContentsTableResponse GetContentsTable(RopGetContentsTableRequest ropGetContentsTableRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); RopGetContentsTableResponse ropGetContentsTableResponse; bool contentsTableLocked = true; int count = 0; int waitTime = int.Parse(Common.GetConfigurationPropertyValue("WaitTime", this.Site)); int retryCount = int.Parse(Common.GetConfigurationPropertyValue("RetryCount", this.Site)); do { this.ExcuteRopCall((ISerializable)ropGetContentsTableRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); ropGetContentsTableResponse = (RopGetContentsTableResponse)temp; // The contents table was locked by another database operation. if (ropGetContentsTableResponse.ReturnValue == 4294965994) { Site.Log.Add(LogEntryKind.Comment, " JET_errTableLocked:" + ropGetContentsTableResponse.ReturnValue); Thread.Sleep(waitTime); } else { contentsTableLocked = false; } if (count > retryCount) { break; } count++; } while (contentsTableLocked); #region Capture Code // The ReturnValue equal to 0x00000000 indicate ROP operation success if (0x00000000 == ropGetContentsTableResponse.ReturnValue) { this.VerifyRopGetContentsTable(ropGetContentsTableResponse); } #endregion return ropGetContentsTableResponse; } /// <summary> /// Set folder object properties. /// </summary> /// <param name="ropSetPropertiesRequest">RopSetPropertiesRequest object.</param> /// <param name="insideObjHandle">Server object handle in SetProperties.</param> /// <param name="responseSOHTable">Server objects handles in RopSetPropertiesResponse.</param> /// <returns>RopSetPropertiesResponse object.</returns> public RopSetPropertiesResponse SetFolderObjectProperties(RopSetPropertiesRequest ropSetPropertiesRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object temp = new object(); this.ExcuteRopCall((ISerializable)ropSetPropertiesRequest, insideObjHandle, ref temp, ref responseSOHTable, ref this.rawData); return (RopSetPropertiesResponse)temp; } /// <summary> /// Get folder object specific properties. /// </summary> /// <param name="ropGetPropertiesSpecificRequest">RopGetPropertiesSpecificRequest object</param> /// <param name="insideObjHandle">Server object handle in GetPropertiesSpecific.</param> /// <param name="responseSOHTable">Server objects handles in RopGetPropertiesSpecificResponse.</param> /// <returns>RopGetPropertiesSpecificResponse object.</returns> public RopGetPropertiesSpecificResponse GetFolderObjectSpecificProperties(RopGetPropertiesSpecificRequest ropGetPropertiesSpecificRequest, uint insideObjHandle, ref List<List<uint>> responseSOHTable) { object ropResponse = new object(); this.ExcuteRopCall((ISerializable)ropGetPropertiesSpecificRequest, insideObjHandle, ref ropResponse, ref responseSOHTable, ref this.rawData); RopGetPropertiesSpecificResponse response = (RopGetPropertiesSpecificResponse)ropResponse; if (0x00000000 == response.ReturnValue) { // The getPropertiesSpecificResponse.ReturnValue equals 0 means that this Rop is successful. // So the propertyTags in getPropertiesSpecificRequest is correct this.VerifyGetFolderSpecificProperties(ropGetPropertiesSpecificRequest.PropertyTags); } return response; } /// <summary> /// Get all properties of a folder object. /// </summary> /// <param name="inputHandle">The handle specified the folder RopGetPropertiesAll Rop operation performs on.</param> /// <param name="responseSOHTable">Server objects handles in RopGetPropertiesSpecificResponse.</param> /// <returns>RopGetPropertiesAllResponse object.</returns> public RopGetPropertiesAllResponse GetFolderPropertiesAll(uint inputHandle, ref List<List<uint>> responseSOHTable) { object ropResponse = new object(); RopGetPropertiesAllRequest request = new RopGetPropertiesAllRequest { RopId = (byte)RopId.RopGetPropertiesAll, LogonId = Constants.CommonLogonId, InputHandleIndex = 0, PropertySizeLimit = ushort.MaxValue, WantUnicode = 0x01 }; this.ExcuteRopCall((ISerializable)request, inputHandle, ref ropResponse, ref responseSOHTable, ref this.rawData); RopGetPropertiesAllResponse response = (RopGetPropertiesAllResponse)ropResponse; if (0x00000000 == response.ReturnValue) { this.VerifyGetFolderPropertiesAll(response); } return response; } #endregion #region Help methods /// <summary> /// Execute a ROP call. /// </summary> /// <param name="ropRequest">ROP request objects</param> /// <param name="insideObjHandle">Server object handle in request.</param> /// <param name="response">ROP response object.</param> /// <param name="responseSOHTable">Server objects handles in response.</param> /// <param name="rawData">The original ROP response payload.</param> /// <returns>An unsigned integer get from server. 0 indicates success, other values indicate failure.</returns> private uint ExcuteRopCall(ISerializable ropRequest, uint insideObjHandle, ref object response, ref List<List<uint>> responseSOHTable, ref byte[] rawData) { List<ISerializable> requestRops = new List<ISerializable> { ropRequest }; List<uint> requestSOH = new List<uint> { insideObjHandle }; if (Common.IsOutputHandleInRopRequest(ropRequest)) { // Add an element for server output object handle, set default value to 0xFFFFFFFF requestSOH.Add(0xFFFFFFFF); } List<IDeserializable> responses = new List<IDeserializable>(); responseSOHTable = new List<List<uint>>(); // 0x10008 specifies the maximum size of the rgbOut buffer to place in Response. uint returnValue = this.oxcropsClient.RopCall(requestRops, requestSOH, ref responses, ref responseSOHTable, ref rawData, 0x10008); if (returnValue == OxcRpcErrorCode.ECRpcFormat) { throw new FormatException("Error RPC Format"); } if (responses != null) { if (responses.Count > 0) { response = responses[0]; this.VerifyTransport(); this.VerifyRPCLayerRequirement(); } } else { response = null; } return returnValue; } /// <summary> /// Execute a ROP call. /// </summary> /// <param name="ropRequest">ROP request objects</param> /// <param name="insideObjHandle">Server object handles in request.</param> /// <param name="response">ROP response object.</param> /// <param name="responseSOHTable">Server objects handles in response.</param> /// <param name="rawData">The original ROP response payload.</param> /// <returns>An unsigned integer get from server. 0 indicates success, other values indicate failure.</returns> private uint ExcuteRopCall(ISerializable ropRequest, List<uint> insideObjHandle, ref object response, ref List<List<uint>> responseSOHTable, ref byte[] rawData) { List<ISerializable> requestRops = new List<ISerializable> { ropRequest }; List<IDeserializable> responses = new List<IDeserializable>(); responseSOHTable = new List<List<uint>>(); // 0x10008 specifies the maximum size of the rgbOut buffer to place in Response. uint returnValue = this.oxcropsClient.RopCall(requestRops, insideObjHandle, ref responses, ref responseSOHTable, ref rawData, 0x10008); if (returnValue == OxcRpcErrorCode.ECRpcFormat) { throw new FormatException("Error RPC Format"); } if (responses != null) { if (responses.Count > 0) { response = responses[0]; this.VerifyTransport(); this.VerifyRPCLayerRequirement(); } } else { response = null; } return returnValue; } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using Xunit; namespace Microsoft.AspNetCore.Razor.Language.Legacy { public class CSharpToMarkupSwitchTest : ParserTestBase { [Fact] public void SingleAngleBracketDoesNotCauseSwitchIfOuterBlockIsTerminated() { ParseDocumentTest("@{ List< }"); } [Fact] public void GivesSpacesToCodeOnAtTagTemplateTransitionInDesignTimeMode() { ParseDocumentTest("@Foo( @<p>Foo</p> )", designTime: true); } [Fact] public void GivesSpacesToCodeOnAtColonTemplateTransitionInDesignTimeMode() { ParseDocumentTest("@Foo( " + Environment.NewLine + "@:<p>Foo</p> " + Environment.NewLine + ")", designTime: true); } [Fact] public void GivesSpacesToCodeOnTagTransitionInDesignTimeMode() { ParseDocumentTest("@{" + Environment.NewLine + " <p>Foo</p> " + Environment.NewLine + "}", designTime: true); } [Fact] public void GivesSpacesToCodeOnInvalidAtTagTransitionInDesignTimeMode() { ParseDocumentTest("@{" + Environment.NewLine + " @<p>Foo</p> " + Environment.NewLine + "}", designTime: true); } [Fact] public void GivesSpacesToCodeOnAtColonTransitionInDesignTimeMode() { ParseDocumentTest("@{" + Environment.NewLine + " @:<p>Foo</p> " + Environment.NewLine + "}", designTime: true); } [Fact] public void ShouldSupportSingleLineMarkupContainingStatementBlock() { ParseDocumentTest("@Repeat(10," + Environment.NewLine + " @: @{}" + Environment.NewLine + ")"); } [Fact] public void ShouldSupportMarkupWithoutPreceedingWhitespace() { ParseDocumentTest("@foreach(var file in files){" + Environment.NewLine + Environment.NewLine + Environment.NewLine + "@:Baz" + Environment.NewLine + "<br/>" + Environment.NewLine + "<a>Foo</a>" + Environment.NewLine + "@:Bar" + Environment.NewLine + "}"); } [Fact] public void GivesAllWhitespaceOnSameLineWithTrailingNewLineToMarkupExclPreceedingNewline() { // ParseBlockGivesAllWhitespaceOnSameLineExcludingPreceedingNewlineButIncludingTrailingNewLineToMarkup ParseDocumentTest("@if(foo) {" + Environment.NewLine + " var foo = \"After this statement there are 10 spaces\"; " + Environment.NewLine + " <p>" + Environment.NewLine + " Foo" + Environment.NewLine + " @bar" + Environment.NewLine + " </p>" + Environment.NewLine + " @:Hello!" + Environment.NewLine + " var biz = boz;" + Environment.NewLine + "}"); } [Fact] public void AllowsMarkupInIfBodyWithBraces() { ParseDocumentTest("@if(foo) { <p>Bar</p> } else if(bar) { <p>Baz</p> } else { <p>Boz</p> }"); } [Fact] public void AllowsMarkupInIfBodyWithBracesWithinCodeBlock() { ParseDocumentTest("@{ if(foo) { <p>Bar</p> } else if(bar) { <p>Baz</p> } else { <p>Boz</p> } }"); } [Fact] public void SupportsMarkupInCaseAndDefaultBranchesOfSwitch() { // Arrange ParseDocumentTest("@switch(foo) {" + Environment.NewLine + " case 0:" + Environment.NewLine + " <p>Foo</p>" + Environment.NewLine + " break;" + Environment.NewLine + " case 1:" + Environment.NewLine + " <p>Bar</p>" + Environment.NewLine + " return;" + Environment.NewLine + " case 2:" + Environment.NewLine + " {" + Environment.NewLine + " <p>Baz</p>" + Environment.NewLine + " <p>Boz</p>" + Environment.NewLine + " }" + Environment.NewLine + " default:" + Environment.NewLine + " <p>Biz</p>" + Environment.NewLine + "}"); } [Fact] public void SupportsMarkupInCaseAndDefaultBranchesOfSwitchInCodeBlock() { // Arrange ParseDocumentTest("@{ switch(foo) {" + Environment.NewLine + " case 0:" + Environment.NewLine + " <p>Foo</p>" + Environment.NewLine + " break;" + Environment.NewLine + " case 1:" + Environment.NewLine + " <p>Bar</p>" + Environment.NewLine + " return;" + Environment.NewLine + " case 2:" + Environment.NewLine + " {" + Environment.NewLine + " <p>Baz</p>" + Environment.NewLine + " <p>Boz</p>" + Environment.NewLine + " }" + Environment.NewLine + " default:" + Environment.NewLine + " <p>Biz</p>" + Environment.NewLine + "} }"); } [Fact] public void ParsesMarkupStatementOnOpenAngleBracket() { ParseDocumentTest("@for(int i = 0; i < 10; i++) { <p>Foo</p> }"); } [Fact] public void ParsesMarkupStatementOnOpenAngleBracketInCodeBlock() { ParseDocumentTest("@{ for(int i = 0; i < 10; i++) { <p>Foo</p> } }"); } [Fact] public void ParsesMarkupStatementOnSwitchCharacterFollowedByColon() { // Arrange ParseDocumentTest("@if(foo) { @:Bar" + Environment.NewLine + "} zoop"); } [Fact] public void ParsesMarkupStatementOnSwitchCharacterFollowedByDoubleColon() { // Arrange ParseDocumentTest("@if(foo) { @::Sometext" + Environment.NewLine + "}"); } [Fact] public void ParsesMarkupStatementOnSwitchCharacterFollowedByTripleColon() { // Arrange ParseDocumentTest("@if(foo) { @:::Sometext" + Environment.NewLine + "}"); } [Fact] public void ParsesMarkupStatementOnSwitchCharacterFollowedByColonInCodeBlock() { // Arrange ParseDocumentTest("@{ if(foo) { @:Bar" + Environment.NewLine + "} } zoop"); } [Fact] public void CorrectlyReturnsFromMarkupBlockWithPseudoTag() { ParseDocumentTest("@if (i > 0) { <text>;</text> }"); } [Fact] public void CorrectlyReturnsFromMarkupBlockWithPseudoTagInCodeBlock() { ParseDocumentTest("@{ if (i > 0) { <text>;</text> } }"); } [Fact] public void SupportsAllKindsOfImplicitMarkupInCodeBlock() { ParseDocumentTest("@{" + Environment.NewLine + " if(true) {" + Environment.NewLine + " @:Single Line Markup" + Environment.NewLine + " }" + Environment.NewLine + " foreach (var p in Enumerable.Range(1, 10)) {" + Environment.NewLine + " <text>The number is @p</text>" + Environment.NewLine + " }" + Environment.NewLine + " if(!false) {" + Environment.NewLine + " <p>A real tag!</p>" + Environment.NewLine + " }" + Environment.NewLine + "}"); } } }
// // RangeIndex.cs // // Author: // Henning Rauch <Henning@RauchEntwicklung.biz> // // Copyright (c) 2011-2015 Henning Rauch // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. #region Usings using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using Framework.Serialization; using NoSQL.GraphDB.Error; using NoSQL.GraphDB.Helper; using NoSQL.GraphDB.Log; using NoSQL.GraphDB.Model; #endregion namespace NoSQL.GraphDB.Index.Range { /// <summary> /// Fallen8 range index. /// </summary> public sealed class RangeIndex : AThreadSafeElement, IRangeIndex { #region Data /// <summary> /// The index dictionary. /// </summary> private Dictionary<IComparable, List<AGraphElement>> _idx; /// <summary> /// The description of the plugin /// </summary> private String _description = "A very very simple range index"; #endregion #region Constructor /// <summary> /// Initializes a new instance of the RangeIndex class. /// </summary> public RangeIndex () { } #endregion #region IDisposable implementation public void Dispose () { _idx.Clear(); _idx = null; } #endregion #region IPlugin implementation public void Initialize (Fallen8 fallen8, IDictionary<string, object> parameter) { _idx = new Dictionary<IComparable, List<AGraphElement>>(); } public string PluginName { get { return "RangeIndex"; } } public Type PluginCategory { get { return typeof(IIndex); } } public string Description { get { return _description; } } public string Manufacturer { get { return "Henning Rauch"; } } #endregion #region IFallen8Serializable implementation public void Save (SerializationWriter writer) { if (ReadResource()) { writer.Write(0);//parameter writer.Write(_idx.Count); foreach (var aKV in _idx) { writer.WriteObject(aKV.Key); writer.Write(aKV.Value.Count); foreach (var aItem in aKV.Value) { writer.Write(aItem.Id); } } FinishReadResource(); return; } throw new CollisionException(this); } public void Load (SerializationReader reader, Fallen8 fallen8) { if (WriteResource()) { reader.ReadInt32();//parameter var keyCount = reader.ReadInt32(); _idx = new Dictionary<IComparable, List<AGraphElement>>(keyCount); for (var i = 0; i < keyCount; i++) { var key = reader.ReadObject(); var value = new List<AGraphElement>(); var valueCount = reader.ReadInt32(); for (var j = 0; j < valueCount; j++) { var graphElementId = reader.ReadInt32(); AGraphElement graphElement; if(fallen8.TryGetGraphElement(out graphElement, graphElementId)) { value.Add(graphElement); } else { Logger.LogError(String.Format("Error while deserializing the index. Could not find the graph element \"{0}\"", graphElementId)); } } _idx.Add((IComparable)key, value); } FinishWriteResource(); return; } throw new CollisionException(this); } #endregion #region IIndex implementation public Int32 CountOfKeys() { if (ReadResource()) { var keyCount = _idx.Keys.Count; FinishReadResource(); return keyCount; } throw new CollisionException(this); } public Int32 CountOfValues() { if (ReadResource()) { var valueCount = _idx.Values.SelectMany(_ => _).Count(); FinishReadResource(); return valueCount; } throw new CollisionException(this); } public void AddOrUpdate(Object keyObject, AGraphElement graphElement) { IComparable key; if (!IndexHelper.CheckObject(out key, keyObject)) { return; } if (WriteResource()) { List<AGraphElement> values; if (_idx.TryGetValue(key, out values)) { values.Add(graphElement); } else { values = new List<AGraphElement> { graphElement }; _idx.Add(key, values); } FinishWriteResource(); return; } throw new CollisionException(this); } public bool TryRemoveKey (Object keyObject) { IComparable key; if (!IndexHelper.CheckObject(out key, keyObject)) { return false; } if (WriteResource()) { var foundSth = _idx.Remove(key); FinishWriteResource(); return foundSth; } throw new CollisionException(this); } public void RemoveValue(AGraphElement graphElement) { if (WriteResource()) { var toBeRemovedKeys = new List<IComparable>(); foreach (var aKv in _idx) { aKv.Value.Remove(graphElement); if (aKv.Value.Count == 0) { toBeRemovedKeys.Add(aKv.Key); } } toBeRemovedKeys.ForEach(_ => _idx.Remove(_)); FinishWriteResource(); return; } throw new CollisionException(this); } public void Wipe () { if (WriteResource()) { _idx.Clear(); FinishWriteResource(); return; } throw new CollisionException(this); } public IEnumerable<Object> GetKeys () { if (ReadResource()) { var keys = new List<IComparable>(_idx.Keys); FinishReadResource(); return keys; } throw new CollisionException(this); } public IEnumerable<KeyValuePair<object, ReadOnlyCollection<AGraphElement>>> GetKeyValues() { if (ReadResource()) { try { foreach (var aKv in _idx) yield return new KeyValuePair<object, ReadOnlyCollection<AGraphElement>>(aKv.Key, new ReadOnlyCollection<AGraphElement>(aKv.Value)); } finally { FinishReadResource(); } yield break; } throw new CollisionException(this); } public bool TryGetValue(out ReadOnlyCollection<AGraphElement> result, Object keyObject) { IComparable key; if (!IndexHelper.CheckObject(out key, keyObject)) { result = null; return false; } if (ReadResource()) { List<AGraphElement> graphElements; var foundSth = _idx.TryGetValue(key, out graphElements); result = foundSth ? new ReadOnlyCollection<AGraphElement>(graphElements) : null; FinishReadResource(); return foundSth; } throw new CollisionException(this); } #endregion #region IRangeIndex implementation public bool LowerThan (out ReadOnlyCollection<AGraphElement> result, IComparable key, bool includeKey) { if (ReadResource()) { var listOfMatchingGraphElements = _idx .AsParallel() .Where(aKV => includeKey ? aKV.Key.CompareTo(key) <= 0 : aKV.Key.CompareTo(key) < 0) .Select(aRelevantKV => aRelevantKV.Value) .SelectMany(_ => _) .ToList(); result = listOfMatchingGraphElements.Count > 0 ? new ReadOnlyCollection<AGraphElement>(listOfMatchingGraphElements) : null; FinishReadResource(); return result != null; } throw new CollisionException(this); } public bool GreaterThan (out ReadOnlyCollection<AGraphElement> result, IComparable key, bool includeKey) { if (ReadResource()) { var listOfMatchingGraphElements = _idx .AsParallel() .Where(aKV => includeKey ? aKV.Key.CompareTo(key) >= 0 : aKV.Key.CompareTo(key) > 0) .Select(aRelevantKV => aRelevantKV.Value) .SelectMany(_ => _) .ToList(); result = listOfMatchingGraphElements.Count > 0 ? new ReadOnlyCollection<AGraphElement>(listOfMatchingGraphElements) : null; FinishReadResource(); return result != null; } throw new CollisionException(this); } public bool Between (out ReadOnlyCollection<AGraphElement> result, IComparable lowerLimit, IComparable upperLimit, bool includeLowerLimit, bool includeUpperLimit) { if (ReadResource()) { var listOfMatchingGraphElements = _idx .AsParallel() .Where(aKV => (includeLowerLimit ? aKV.Key.CompareTo(lowerLimit) <= 0 : aKV.Key.CompareTo(lowerLimit) < 0) && (includeUpperLimit ? aKV.Key.CompareTo(upperLimit) >= 0 : aKV.Key.CompareTo(upperLimit) > 0)) .Select(aRelevantKV => aRelevantKV.Value) .SelectMany(_ => _) .ToList(); result = listOfMatchingGraphElements.Count > 0 ? new ReadOnlyCollection<AGraphElement>(listOfMatchingGraphElements) : null; FinishReadResource(); return result != null; } throw new CollisionException(this); } #endregion } }
/* Copyright 2012 Michael Edwards Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ //-CRE- using System; using System.Collections.Generic; using System.Linq; using System.Text; using Glass.Mapper.Sc.Dynamic; using Glass.Mapper.Sc.FakeDb.Infrastructure.Pipelines.RenderField; using NUnit.Framework; using Sitecore.Data; using Sitecore.Data.Items; using Sitecore.FakeDb; using Sitecore.FakeDb.Sites; using Sitecore.Links; namespace Glass.Mapper.Sc.Integration.Dynamic { [TestFixture] public class DynamicItemFixture { #region INTERFACE TEST [Test] public void InterfaceTest() { //Assign using (var database = new Db { new DbItem("Target") { new DbField("DateTime") { Value = SimpleRenderField.ReplacementKey+"20120204T150015" }, new DbField("SingleLineText") { Value = "some awesome dynamic content" }, new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); using (new Sitecore.Sites.SiteContextSwitcher(new FakeSiteContext("Test"))) { dynamic d = new DynamicItem(item); IDynamicItem i = d as IDynamicItem; //Act string result = d.DateTime; string path = i.Path; //Assert Assert.AreEqual(SimpleRenderField.ReplacementValue + "20120204T150015", result); Assert.AreEqual(item.Paths.Path, path); } } } #endregion #region PROPERTY ContentPath [Test] public void ContentPath_ReturnsContentPath() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.ContentPath; //Assert Assert.AreEqual(item.Paths.ContentPath, result); } } #endregion #region PROPERTY DisplayName [Test] public void DisplayName_ReturnsDisplayName() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.DisplayName; //Assert Assert.AreEqual(item["DisplayName"], result); } } #endregion #region PROPERTY FullPath [Test] public void FullPath_ReturnsFullPath() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.FullPath; //Assert Assert.AreEqual(item.Paths.FullPath, result); } } #endregion #region PROPERTY Key [Test] public void Key_ReturnsKey() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.Key; //Assert Assert.AreEqual(item.Key, result); } } #endregion #region PROPERTY MediaUrl [Test] public void MediaUrl_ReturnsMediaUrl() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.MediaUrl; //Assert Assert.AreEqual( Sitecore.Resources.Media.MediaManager.GetMediaUrl(new Sitecore.Data.Items.MediaItem(item)), result); } } #endregion #region PROPERTY Path [Test] public void Path_ReturnsPath() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.Path; //Assert Assert.AreEqual(item.Paths.Path, result); } } #endregion #region PROPERTY TemplateId [Test] public void TemplateId_ReturnsTemplateId() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.TemplateId; //Assert Assert.AreEqual(item.TemplateID.Guid, result); } } #endregion #region PROPERTY TemplateName [Test] public void TemplateName_ReturnsTemplateName() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.TemplateName; //Assert Assert.AreEqual(item.TemplateName, result); } } #endregion #region PROPERTY Url [Test] public void Url_ReturnsUrl() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.Url; //Assert Assert.AreEqual(LinkManager.GetItemUrl(item), result); } } #endregion #region PROPERTY Version [Test] public void Version_ReturnsVersion() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.Version; //Assert Assert.AreEqual(item.Version.Number, result); } } #endregion #region PROPERTY Name [Test] public void Name_ReturnsName() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.Name; //Assert Assert.AreEqual(item.Name, result); } } #endregion #region PROPERTY Language [Test] public void Language_ReturnsLanguage() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.Language; //Assert Assert.AreEqual(item.Language, result); } } #endregion #region PROPERTY Language [Test] public void Language_ReturnsContentPath() { //Arrange //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.Language; //Assert Assert.AreEqual(item.Language, result); } } #endregion #region PROPERTY BaseTemplateIds [Test] public void BaseTemplateIds_ReturnsBaseTemplateIds() { //Arrange //Assign ID templateId = ID.NewID; ID baseTemplateId1 = ID.NewID; ID baseTemplateId2 = ID.NewID; using (var database = new Db { new Sitecore.FakeDb.DbTemplate("temp", templateId) { new DbField("__Base template") { Value = "{0}|{1}".Formatted(baseTemplateId1.ToString(), baseTemplateId2.ToString()) } }, new Sitecore.FakeDb.DbTemplate("base1", baseTemplateId1), new Sitecore.FakeDb.DbTemplate("base2", baseTemplateId2), new DbItem("Target",ID.NewID, templateId) { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var result = d.BaseTemplateIds as IEnumerable<Guid>; //Assert Assert.AreEqual(result.Count(), 3); } } #endregion #region [Test] public void DynamicFields_ReturnsFields() { //Assign using (var database = new Db { new DbItem("Target") { new DbField("DateTime") { Value = SimpleRenderField.ReplacementKey+"20120204T150015" }, new DbField("SingleLineText") { Value = SimpleRenderField.ReplacementKey+"some awesome dynamic content" }, new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { using (new Sitecore.Sites.SiteContextSwitcher(new FakeSiteContext("Test"))) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act string dateTime = d.DateTime; string text = d.SingleLineText; //Assert Assert.AreEqual(SimpleRenderField.ReplacementValue+"some awesome dynamic content", text); Assert.AreEqual(SimpleRenderField.ReplacementValue + "20120204T150015", dateTime); } } } [Test] public void DynamicInfo_ReturnsItemInfo() { //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act string path = d.Path; string name = d.Name; //Assert Assert.AreEqual("/sitecore/content/Target", path); Assert.AreEqual("Target", name); } } [Test] public void DynamicParent_ReturnsParent() { //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var parent = d.Parent; string path = parent.Path; //Assert Assert.AreEqual(item.Parent.Paths.FullPath, path); } } [Test] public void DynamicParent_ReturnsChildren() { //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var children = d.Children; //Assert Assert.AreEqual(3, children.Count()); foreach (var child in d.Children) { string path = child.Path; Assert.IsTrue(path.StartsWith("/sitecore/content/Target")); } } } [Test] public void DynamicParent_ReturnsLastChild() { //Assign using (var database = new Db { new DbItem("Target") { new DbItem("Child1"), new DbItem("Child2"), new DbItem("Child3") } }) { Item item = database.GetItem("/sitecore/content/Target"); dynamic d = new DynamicItem(item); //Act var children = d.Children; //Assert Assert.AreEqual(3, children.Count()); var child = d.Children.Last(); string path = child.Path; Assert.IsTrue(path.StartsWith("/sitecore/content/Target")); } } #endregion } public interface IDynamicTitle : IDynamicItem { string Title { get; set; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // namespace System.Reflection.Emit { using System.Text; using System; using CultureInfo = System.Globalization.CultureInfo; using System.Diagnostics.SymbolStore; using System.Reflection; using System.Security; using System.Collections; using System.Collections.Generic; using System.Security.Permissions; using System.Runtime.InteropServices; using System.Diagnostics.Contracts; [HostProtection(MayLeakOnAbort = true)] [ClassInterface(ClassInterfaceType.None)] [ComDefaultInterface(typeof(_MethodBuilder))] [System.Runtime.InteropServices.ComVisible(true)] public sealed class MethodBuilder : MethodInfo, _MethodBuilder { #region Private Data Members // Identity internal String m_strName; // The name of the method private MethodToken m_tkMethod; // The token of this method private ModuleBuilder m_module; internal TypeBuilder m_containingType; // IL private int[] m_mdMethodFixups; // The location of all of the token fixups. Null means no fixups. private byte[] m_localSignature; // Local signature if set explicitly via DefineBody. Null otherwise. internal LocalSymInfo m_localSymInfo; // keep track debugging local information internal ILGenerator m_ilGenerator; // Null if not used. private byte[] m_ubBody; // The IL for the method private ExceptionHandler[] m_exceptions; // Exception handlers or null if there are none. private const int DefaultMaxStack = 16; private int m_maxStack = DefaultMaxStack; // Flags internal bool m_bIsBaked; private bool m_bIsGlobalMethod; private bool m_fInitLocals; // indicating if the method stack frame will be zero initialized or not. // Attributes private MethodAttributes m_iAttributes; private CallingConventions m_callingConvention; private MethodImplAttributes m_dwMethodImplFlags; // Parameters private SignatureHelper m_signature; internal Type[] m_parameterTypes; private ParameterBuilder m_retParam; private Type m_returnType; private Type[] m_returnTypeRequiredCustomModifiers; private Type[] m_returnTypeOptionalCustomModifiers; private Type[][] m_parameterTypeRequiredCustomModifiers; private Type[][] m_parameterTypeOptionalCustomModifiers; // Generics private GenericTypeParameterBuilder[] m_inst; private bool m_bIsGenMethDef; #endregion #region Constructor internal MethodBuilder(String name, MethodAttributes attributes, CallingConventions callingConvention, Type returnType, Type[] parameterTypes, ModuleBuilder mod, TypeBuilder type, bool bIsGlobalMethod) { Init(name, attributes, callingConvention, returnType, null, null, parameterTypes, null, null, mod, type, bIsGlobalMethod); } internal MethodBuilder(String name, MethodAttributes attributes, CallingConventions callingConvention, Type returnType, Type[] returnTypeRequiredCustomModifiers, Type[] returnTypeOptionalCustomModifiers, Type[] parameterTypes, Type[][] parameterTypeRequiredCustomModifiers, Type[][] parameterTypeOptionalCustomModifiers, ModuleBuilder mod, TypeBuilder type, bool bIsGlobalMethod) { Init(name, attributes, callingConvention, returnType, returnTypeRequiredCustomModifiers, returnTypeOptionalCustomModifiers, parameterTypes, parameterTypeRequiredCustomModifiers, parameterTypeOptionalCustomModifiers, mod, type, bIsGlobalMethod); } private void Init(String name, MethodAttributes attributes, CallingConventions callingConvention, Type returnType, Type[] returnTypeRequiredCustomModifiers, Type[] returnTypeOptionalCustomModifiers, Type[] parameterTypes, Type[][] parameterTypeRequiredCustomModifiers, Type[][] parameterTypeOptionalCustomModifiers, ModuleBuilder mod, TypeBuilder type, bool bIsGlobalMethod) { if (name == null) throw new ArgumentNullException(nameof(name)); if (name.Length == 0) throw new ArgumentException(Environment.GetResourceString("Argument_EmptyName"), nameof(name)); if (name[0] == '\0') throw new ArgumentException(Environment.GetResourceString("Argument_IllegalName"), nameof(name)); if (mod == null) throw new ArgumentNullException(nameof(mod)); Contract.EndContractBlock(); if (parameterTypes != null) { foreach(Type t in parameterTypes) { if (t == null) throw new ArgumentNullException(nameof(parameterTypes)); } } m_strName = name; m_module = mod; m_containingType = type; // //if (returnType == null) //{ // m_returnType = typeof(void); //} //else { m_returnType = returnType; } if ((attributes & MethodAttributes.Static) == 0) { // turn on the has this calling convention callingConvention = callingConvention | CallingConventions.HasThis; } else if ((attributes & MethodAttributes.Virtual) != 0) { // A method can't be both static and virtual throw new ArgumentException(Environment.GetResourceString("Arg_NoStaticVirtual")); } if ((attributes & MethodAttributes.SpecialName) != MethodAttributes.SpecialName) { if ((type.Attributes & TypeAttributes.Interface) == TypeAttributes.Interface) { // methods on interface have to be abstract + virtual except special name methods such as type initializer if ((attributes & (MethodAttributes.Abstract | MethodAttributes.Virtual)) != (MethodAttributes.Abstract | MethodAttributes.Virtual) && (attributes & MethodAttributes.Static) == 0) throw new ArgumentException(Environment.GetResourceString("Argument_BadAttributeOnInterfaceMethod")); } } m_callingConvention = callingConvention; if (parameterTypes != null) { m_parameterTypes = new Type[parameterTypes.Length]; Array.Copy(parameterTypes, 0, m_parameterTypes, 0, parameterTypes.Length); } else { m_parameterTypes = null; } m_returnTypeRequiredCustomModifiers = returnTypeRequiredCustomModifiers; m_returnTypeOptionalCustomModifiers = returnTypeOptionalCustomModifiers; m_parameterTypeRequiredCustomModifiers = parameterTypeRequiredCustomModifiers; m_parameterTypeOptionalCustomModifiers = parameterTypeOptionalCustomModifiers; // m_signature = SignatureHelper.GetMethodSigHelper(mod, callingConvention, // returnType, returnTypeRequiredCustomModifiers, returnTypeOptionalCustomModifiers, // parameterTypes, parameterTypeRequiredCustomModifiers, parameterTypeOptionalCustomModifiers); m_iAttributes = attributes; m_bIsGlobalMethod = bIsGlobalMethod; m_bIsBaked = false; m_fInitLocals = true; m_localSymInfo = new LocalSymInfo(); m_ubBody = null; m_ilGenerator = null; // Default is managed IL. Manged IL has bit flag 0x0020 set off m_dwMethodImplFlags = MethodImplAttributes.IL; } #endregion #region Internal Members internal void CheckContext(params Type[][] typess) { m_module.CheckContext(typess); } internal void CheckContext(params Type[] types) { m_module.CheckContext(types); } [System.Security.SecurityCritical] // auto-generated internal void CreateMethodBodyHelper(ILGenerator il) { // Sets the IL of the method. An ILGenerator is passed as an argument and the method // queries this instance to get all of the information which it needs. if (il == null) { throw new ArgumentNullException(nameof(il)); } Contract.EndContractBlock(); __ExceptionInfo[] excp; int counter=0; int[] filterAddrs; int[] catchAddrs; int[] catchEndAddrs; Type[] catchClass; int[] type; int numCatch; int start, end; ModuleBuilder dynMod = (ModuleBuilder) m_module; m_containingType.ThrowIfCreated(); if (m_bIsBaked) { throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_MethodHasBody")); } if (il.m_methodBuilder != this && il.m_methodBuilder != null) { // you don't need to call DefineBody when you get your ILGenerator // through MethodBuilder::GetILGenerator. // throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_BadILGeneratorUsage")); } ThrowIfShouldNotHaveBody(); if (il.m_ScopeTree.m_iOpenScopeCount != 0) { // There are still unclosed local scope throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_OpenLocalVariableScope")); } m_ubBody = il.BakeByteArray(); m_mdMethodFixups = il.GetTokenFixups(); //Okay, now the fun part. Calculate all of the exceptions. excp = il.GetExceptions(); int numExceptions = CalculateNumberOfExceptions(excp); if (numExceptions > 0) { m_exceptions = new ExceptionHandler[numExceptions]; for (int i = 0; i < excp.Length; i++) { filterAddrs = excp[i].GetFilterAddresses(); catchAddrs = excp[i].GetCatchAddresses(); catchEndAddrs = excp[i].GetCatchEndAddresses(); catchClass = excp[i].GetCatchClass(); numCatch = excp[i].GetNumberOfCatches(); start = excp[i].GetStartAddress(); end = excp[i].GetEndAddress(); type = excp[i].GetExceptionTypes(); for (int j = 0; j < numCatch; j++) { int tkExceptionClass = 0; if (catchClass[j] != null) { tkExceptionClass = dynMod.GetTypeTokenInternal(catchClass[j]).Token; } switch (type[j]) { case __ExceptionInfo.None: case __ExceptionInfo.Fault: case __ExceptionInfo.Filter: m_exceptions[counter++] = new ExceptionHandler(start, end, filterAddrs[j], catchAddrs[j], catchEndAddrs[j], type[j], tkExceptionClass); break; case __ExceptionInfo.Finally: m_exceptions[counter++] = new ExceptionHandler(start, excp[i].GetFinallyEndAddress(), filterAddrs[j], catchAddrs[j], catchEndAddrs[j], type[j], tkExceptionClass); break; } } } } m_bIsBaked=true; if (dynMod.GetSymWriter() != null) { // set the debugging information such as scope and line number // if it is in a debug module // SymbolToken tk = new SymbolToken(MetadataTokenInternal); ISymbolWriter symWriter = dynMod.GetSymWriter(); // call OpenMethod to make this method the current method symWriter.OpenMethod(tk); // call OpenScope because OpenMethod no longer implicitly creating // the top-levelsmethod scope // symWriter.OpenScope(0); if (m_symCustomAttrs != null) { foreach(SymCustomAttr symCustomAttr in m_symCustomAttrs) dynMod.GetSymWriter().SetSymAttribute( new SymbolToken (MetadataTokenInternal), symCustomAttr.m_name, symCustomAttr.m_data); } if (m_localSymInfo != null) m_localSymInfo.EmitLocalSymInfo(symWriter); il.m_ScopeTree.EmitScopeTree(symWriter); il.m_LineNumberInfo.EmitLineNumberInfo(symWriter); symWriter.CloseScope(il.ILOffset); symWriter.CloseMethod(); } } // This is only called from TypeBuilder.CreateType after the method has been created internal void ReleaseBakedStructures() { if (!m_bIsBaked) { // We don't need to do anything here if we didn't baked the method body return; } m_ubBody = null; m_localSymInfo = null; m_mdMethodFixups = null; m_localSignature = null; m_exceptions = null; } internal override Type[] GetParameterTypes() { if (m_parameterTypes == null) m_parameterTypes = EmptyArray<Type>.Value; return m_parameterTypes; } internal static Type GetMethodBaseReturnType(MethodBase method) { MethodInfo mi = null; ConstructorInfo ci = null; if ( (mi = method as MethodInfo) != null ) { return mi.ReturnType; } else if ( (ci = method as ConstructorInfo) != null) { return ci.GetReturnType(); } else { Contract.Assert(false, "We should never get here!"); return null; } } internal void SetToken(MethodToken token) { m_tkMethod = token; } internal byte[] GetBody() { // Returns the il bytes of this method. // This il is not valid until somebody has called BakeByteArray return m_ubBody; } internal int[] GetTokenFixups() { return m_mdMethodFixups; } [System.Security.SecurityCritical] // auto-generated internal SignatureHelper GetMethodSignature() { if (m_parameterTypes == null) m_parameterTypes = EmptyArray<Type>.Value; m_signature = SignatureHelper.GetMethodSigHelper (m_module, m_callingConvention, m_inst != null ? m_inst.Length : 0, m_returnType == null ? typeof(void) : m_returnType, m_returnTypeRequiredCustomModifiers, m_returnTypeOptionalCustomModifiers, m_parameterTypes, m_parameterTypeRequiredCustomModifiers, m_parameterTypeOptionalCustomModifiers); return m_signature; } // Returns a buffer whose initial signatureLength bytes contain encoded local signature. internal byte[] GetLocalSignature(out int signatureLength) { if (m_localSignature != null) { signatureLength = m_localSignature.Length; return m_localSignature; } if (m_ilGenerator != null) { if (m_ilGenerator.m_localCount != 0) { // If user is using ILGenerator::DeclareLocal, then get local signaturefrom there. return m_ilGenerator.m_localSignature.InternalGetSignature(out signatureLength); } } return SignatureHelper.GetLocalVarSigHelper(m_module).InternalGetSignature(out signatureLength); } internal int GetMaxStack() { if (m_ilGenerator != null) { return m_ilGenerator.GetMaxStackSize() + ExceptionHandlerCount; } else { // this is the case when client provide an array of IL byte stream rather than going through ILGenerator. return m_maxStack; } } internal ExceptionHandler[] GetExceptionHandlers() { return m_exceptions; } internal int ExceptionHandlerCount { get { return m_exceptions != null ? m_exceptions.Length : 0; } } internal int CalculateNumberOfExceptions(__ExceptionInfo[] excp) { int num=0; if (excp==null) { return 0; } for (int i=0; i<excp.Length; i++) { num+=excp[i].GetNumberOfCatches(); } return num; } internal bool IsTypeCreated() { return (m_containingType != null && m_containingType.IsCreated()); } internal TypeBuilder GetTypeBuilder() { return m_containingType; } internal ModuleBuilder GetModuleBuilder() { return m_module; } #endregion #region Object Overrides [System.Security.SecuritySafeCritical] // auto-generated public override bool Equals(Object obj) { if (!(obj is MethodBuilder)) { return false; } if (!(this.m_strName.Equals(((MethodBuilder)obj).m_strName))) { return false; } if (m_iAttributes!=(((MethodBuilder)obj).m_iAttributes)) { return false; } SignatureHelper thatSig = ((MethodBuilder)obj).GetMethodSignature(); if (thatSig.Equals(GetMethodSignature())) { return true; } return false; } public override int GetHashCode() { return this.m_strName.GetHashCode(); } [System.Security.SecuritySafeCritical] // auto-generated public override String ToString() { StringBuilder sb = new StringBuilder(1000); sb.Append("Name: " + m_strName + " " + Environment.NewLine); sb.Append("Attributes: " + (int)m_iAttributes + Environment.NewLine); sb.Append("Method Signature: " + GetMethodSignature() + Environment.NewLine); sb.Append(Environment.NewLine); return sb.ToString(); } #endregion #region MemberInfo Overrides public override String Name { get { return m_strName; } } internal int MetadataTokenInternal { get { return GetToken().Token; } } public override Module Module { get { return m_containingType.Module; } } public override Type DeclaringType { get { if (m_containingType.m_isHiddenGlobalType == true) return null; return m_containingType; } } public override ICustomAttributeProvider ReturnTypeCustomAttributes { get { return null; } } public override Type ReflectedType { get { return DeclaringType; } } #endregion #region MethodBase Overrides public override Object Invoke(Object obj, BindingFlags invokeAttr, Binder binder, Object[] parameters, CultureInfo culture) { throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule")); } public override MethodImplAttributes GetMethodImplementationFlags() { return m_dwMethodImplFlags; } public override MethodAttributes Attributes { get { return m_iAttributes; } } public override CallingConventions CallingConvention { get {return m_callingConvention;} } public override RuntimeMethodHandle MethodHandle { get { throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule")); } } public override bool IsSecurityCritical { #if FEATURE_CORECLR get { return true; } #else get { throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule")); } #endif } public override bool IsSecuritySafeCritical { #if FEATURE_CORECLR get { return false; } #else get { throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule")); } #endif } public override bool IsSecurityTransparent { #if FEATURE_CORECLR get { return false; } #else get { throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule")); } #endif } #endregion #region MethodInfo Overrides public override MethodInfo GetBaseDefinition() { return this; } public override Type ReturnType { get { return m_returnType; } } [Pure] public override ParameterInfo[] GetParameters() { if (!m_bIsBaked || m_containingType == null || m_containingType.BakedRuntimeType == null) throw new NotSupportedException(Environment.GetResourceString("InvalidOperation_TypeNotCreated")); MethodInfo rmi = m_containingType.GetMethod(m_strName, m_parameterTypes); return rmi.GetParameters(); } public override ParameterInfo ReturnParameter { get { if (!m_bIsBaked || m_containingType == null || m_containingType.BakedRuntimeType == null) throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_TypeNotCreated")); MethodInfo rmi = m_containingType.GetMethod(m_strName, m_parameterTypes); return rmi.ReturnParameter; } } #endregion #region ICustomAttributeProvider Implementation public override Object[] GetCustomAttributes(bool inherit) { throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule")); } public override Object[] GetCustomAttributes(Type attributeType, bool inherit) { throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule")); } public override bool IsDefined(Type attributeType, bool inherit) { throw new NotSupportedException(Environment.GetResourceString("NotSupported_DynamicModule")); } #endregion #region Generic Members public override bool IsGenericMethodDefinition { get { return m_bIsGenMethDef; } } public override bool ContainsGenericParameters { get { throw new NotSupportedException(); } } public override MethodInfo GetGenericMethodDefinition() { if (!IsGenericMethod) throw new InvalidOperationException(); return this; } public override bool IsGenericMethod { get { return m_inst != null; } } public override Type[] GetGenericArguments() { return m_inst; } public override MethodInfo MakeGenericMethod(params Type[] typeArguments) { return MethodBuilderInstantiation.MakeGenericMethod(this, typeArguments); } public GenericTypeParameterBuilder[] DefineGenericParameters (params string[] names) { if (names == null) throw new ArgumentNullException(nameof(names)); if (names.Length == 0) throw new ArgumentException(Environment.GetResourceString("Arg_EmptyArray"), nameof(names)); Contract.EndContractBlock(); if (m_inst != null) throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_GenericParametersAlreadySet")); for (int i = 0; i < names.Length; i ++) if (names[i] == null) throw new ArgumentNullException(nameof(names)); if (m_tkMethod.Token != 0) throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_MethodBuilderBaked")); m_bIsGenMethDef = true; m_inst = new GenericTypeParameterBuilder[names.Length]; for (int i = 0; i < names.Length; i ++) m_inst[i] = new GenericTypeParameterBuilder(new TypeBuilder(names[i], i, this)); return m_inst; } internal void ThrowIfGeneric () { if (IsGenericMethod && !IsGenericMethodDefinition) throw new InvalidOperationException (); } #endregion #region Public Members [System.Security.SecuritySafeCritical] // auto-generated public MethodToken GetToken() { // We used to always "tokenize" a MethodBuilder when it is constructed. After change list 709498 // we only "tokenize" a method when requested. But the order in which the methods are tokenized // didn't change: the same order the MethodBuilders are constructed. The recursion introduced // will overflow the stack when there are many methods on the same type (10000 in my experiment). // The change also introduced race conditions. Before the code change GetToken is called from // the MethodBuilder .ctor which is protected by lock(ModuleBuilder.SyncRoot). Now it // could be called more than once on the the same method introducing duplicate (invalid) tokens. // I don't fully understand this change. So I will keep the logic and only fix the recursion and // the race condition. if (m_tkMethod.Token != 0) { return m_tkMethod; } MethodBuilder currentMethod = null; MethodToken currentToken = new MethodToken(0); int i; // We need to lock here to prevent a method from being "tokenized" twice. // We don't need to synchronize this with Type.DefineMethod because it only appends newly // constructed MethodBuilders to the end of m_listMethods lock (m_containingType.m_listMethods) { if (m_tkMethod.Token != 0) { return m_tkMethod; } // If m_tkMethod is still 0 when we obtain the lock, m_lastTokenizedMethod must be smaller // than the index of the current method. for (i = m_containingType.m_lastTokenizedMethod + 1; i < m_containingType.m_listMethods.Count; ++i) { currentMethod = m_containingType.m_listMethods[i]; currentToken = currentMethod.GetTokenNoLock(); if (currentMethod == this) break; } m_containingType.m_lastTokenizedMethod = i; } Contract.Assert(currentMethod == this, "We should have found this method in m_containingType.m_listMethods"); Contract.Assert(currentToken.Token != 0, "The token should not be 0"); return currentToken; } [System.Security.SecurityCritical] // auto-generated private MethodToken GetTokenNoLock() { Contract.Assert(m_tkMethod.Token == 0, "m_tkMethod should not have been initialized"); int sigLength; byte[] sigBytes = GetMethodSignature().InternalGetSignature(out sigLength); int token = TypeBuilder.DefineMethod(m_module.GetNativeHandle(), m_containingType.MetadataTokenInternal, m_strName, sigBytes, sigLength, Attributes); m_tkMethod = new MethodToken(token); if (m_inst != null) foreach (GenericTypeParameterBuilder tb in m_inst) if (!tb.m_type.IsCreated()) tb.m_type.CreateType(); TypeBuilder.SetMethodImpl(m_module.GetNativeHandle(), token, m_dwMethodImplFlags); return m_tkMethod; } public void SetParameters (params Type[] parameterTypes) { CheckContext(parameterTypes); SetSignature (null, null, null, parameterTypes, null, null); } public void SetReturnType (Type returnType) { CheckContext(returnType); SetSignature (returnType, null, null, null, null, null); } public void SetSignature( Type returnType, Type[] returnTypeRequiredCustomModifiers, Type[] returnTypeOptionalCustomModifiers, Type[] parameterTypes, Type[][] parameterTypeRequiredCustomModifiers, Type[][] parameterTypeOptionalCustomModifiers) { // We should throw InvalidOperation_MethodBuilderBaked here if the method signature has been baked. // But we cannot because that would be a breaking change from V2. if (m_tkMethod.Token != 0) return; CheckContext(returnType); CheckContext(returnTypeRequiredCustomModifiers, returnTypeOptionalCustomModifiers, parameterTypes); CheckContext(parameterTypeRequiredCustomModifiers); CheckContext(parameterTypeOptionalCustomModifiers); ThrowIfGeneric(); if (returnType != null) { m_returnType = returnType; } if (parameterTypes != null) { m_parameterTypes = new Type[parameterTypes.Length]; Array.Copy (parameterTypes, 0, m_parameterTypes, 0, parameterTypes.Length); } m_returnTypeRequiredCustomModifiers = returnTypeRequiredCustomModifiers; m_returnTypeOptionalCustomModifiers = returnTypeOptionalCustomModifiers; m_parameterTypeRequiredCustomModifiers = parameterTypeRequiredCustomModifiers; m_parameterTypeOptionalCustomModifiers = parameterTypeOptionalCustomModifiers; } [System.Security.SecuritySafeCritical] // auto-generated public ParameterBuilder DefineParameter(int position, ParameterAttributes attributes, String strParamName) { if (position < 0) throw new ArgumentOutOfRangeException(Environment.GetResourceString("ArgumentOutOfRange_ParamSequence")); Contract.EndContractBlock(); ThrowIfGeneric(); m_containingType.ThrowIfCreated (); if (position > 0 && (m_parameterTypes == null || position > m_parameterTypes.Length)) throw new ArgumentOutOfRangeException(Environment.GetResourceString("ArgumentOutOfRange_ParamSequence")); attributes = attributes & ~ParameterAttributes.ReservedMask; return new ParameterBuilder(this, position, attributes, strParamName); } [System.Security.SecuritySafeCritical] // auto-generated [Obsolete("An alternate API is available: Emit the MarshalAs custom attribute instead. http://go.microsoft.com/fwlink/?linkid=14202")] public void SetMarshal(UnmanagedMarshal unmanagedMarshal) { ThrowIfGeneric (); // set Marshal info for the return type m_containingType.ThrowIfCreated(); if (m_retParam == null) { m_retParam = new ParameterBuilder(this, 0, 0, null); } m_retParam.SetMarshal(unmanagedMarshal); } private List<SymCustomAttr> m_symCustomAttrs; private struct SymCustomAttr { public SymCustomAttr(String name, byte[] data) { m_name = name; m_data = data; } public String m_name; public byte[] m_data; } public void SetSymCustomAttribute(String name, byte[] data) { // Note that this API is rarely used. Support for custom attributes in PDB files was added in // Whidbey and as of 8/2007 the only known user is the C# compiler. There seems to be little // value to this for Reflection.Emit users since they can always use metadata custom attributes. // Some versions of the symbol writer used in the CLR will ignore these entirely. This API has // been removed from the Silverlight API surface area, but we should also consider removing it // from future desktop product versions as well. ThrowIfGeneric (); // This is different from CustomAttribute. This is stored into the SymWriter. m_containingType.ThrowIfCreated(); ModuleBuilder dynMod = (ModuleBuilder) m_module; if ( dynMod.GetSymWriter() == null) { // Cannot SetSymCustomAttribute when it is not a debug module throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_NotADebugModule")); } if (m_symCustomAttrs == null) m_symCustomAttrs = new List<SymCustomAttr>(); m_symCustomAttrs.Add(new SymCustomAttr(name, data)); } #if FEATURE_CAS_POLICY [System.Security.SecuritySafeCritical] // auto-generated public void AddDeclarativeSecurity(SecurityAction action, PermissionSet pset) { if (pset == null) throw new ArgumentNullException(nameof(pset)); Contract.EndContractBlock(); ThrowIfGeneric (); #pragma warning disable 618 if (!Enum.IsDefined(typeof(SecurityAction), action) || action == SecurityAction.RequestMinimum || action == SecurityAction.RequestOptional || action == SecurityAction.RequestRefuse) { throw new ArgumentOutOfRangeException(nameof(action)); } #pragma warning restore 618 // cannot declarative security after type is created m_containingType.ThrowIfCreated(); // Translate permission set into serialized format (uses standard binary serialization format). byte[] blob = null; int length = 0; if (!pset.IsEmpty()) { blob = pset.EncodeXml(); length = blob.Length; } // Write the blob into the metadata. TypeBuilder.AddDeclarativeSecurity(m_module.GetNativeHandle(), MetadataTokenInternal, action, blob, length); } #endif // FEATURE_CAS_POLICY #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif public void SetMethodBody(byte[] il, int maxStack, byte[] localSignature, IEnumerable<ExceptionHandler> exceptionHandlers, IEnumerable<int> tokenFixups) { if (il == null) { throw new ArgumentNullException(nameof(il), Environment.GetResourceString("ArgumentNull_Array")); } if (maxStack < 0) { throw new ArgumentOutOfRangeException(nameof(maxStack), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); } Contract.EndContractBlock(); if (m_bIsBaked) { throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_MethodBaked")); } m_containingType.ThrowIfCreated(); ThrowIfGeneric(); byte[] newLocalSignature = null; ExceptionHandler[] newHandlers = null; int[] newTokenFixups = null; byte[] newIL = (byte[])il.Clone(); if (localSignature != null) { newLocalSignature = (byte[])localSignature.Clone(); } if (exceptionHandlers != null) { newHandlers = ToArray(exceptionHandlers); CheckExceptionHandlerRanges(newHandlers, newIL.Length); // Note: Fixup entries for type tokens stored in ExceptionHandlers are added by the method body emitter. } if (tokenFixups != null) { newTokenFixups = ToArray(tokenFixups); int maxTokenOffset = newIL.Length - 4; for (int i = 0; i < newTokenFixups.Length; i++) { // Check that fixups are within the range of this method's IL, otherwise some random memory might get "fixed up". if (newTokenFixups[i] < 0 || newTokenFixups[i] > maxTokenOffset) { throw new ArgumentOutOfRangeException("tokenFixups[" + i + "]", Environment.GetResourceString("ArgumentOutOfRange_Range", 0, maxTokenOffset)); } } } m_ubBody = newIL; m_localSignature = newLocalSignature; m_exceptions = newHandlers; m_mdMethodFixups = newTokenFixups; m_maxStack = maxStack; // discard IL generator, all information stored in it is now irrelevant m_ilGenerator = null; m_bIsBaked = true; } private static T[] ToArray<T>(IEnumerable<T> sequence) { T[] array = sequence as T[]; if (array != null) { return (T[])array.Clone(); } return new List<T>(sequence).ToArray(); } private static void CheckExceptionHandlerRanges(ExceptionHandler[] exceptionHandlers, int maxOffset) { // Basic checks that the handler ranges are within the method body (ranges are end-exclusive). // Doesn't verify that the ranges are otherwise correct - it is very well possible to emit invalid IL. for (int i = 0; i < exceptionHandlers.Length; i++) { var handler = exceptionHandlers[i]; if (handler.m_filterOffset > maxOffset || handler.m_tryEndOffset > maxOffset || handler.m_handlerEndOffset > maxOffset) { throw new ArgumentOutOfRangeException("exceptionHandlers[" + i + "]", Environment.GetResourceString("ArgumentOutOfRange_Range", 0, maxOffset)); } // Type token might be 0 if the ExceptionHandler was created via a default constructor. // Other tokens migth also be invalid. We only check nil tokens as the implementation (SectEH_Emit in corhlpr.cpp) requires it, // and we can't check for valid tokens until the module is baked. if (handler.Kind == ExceptionHandlingClauseOptions.Clause && handler.ExceptionTypeToken == 0) { throw new ArgumentException(Environment.GetResourceString("Argument_InvalidTypeToken", handler.ExceptionTypeToken), "exceptionHandlers[" + i + "]"); } } } /// <summary> /// Obsolete. /// </summary> #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif public void CreateMethodBody(byte[] il, int count) { ThrowIfGeneric(); // Note that when user calls this function, there are a few information that client is // not able to supply: local signature, exception handlers, max stack size, a list of Token fixup, a list of RVA fixup if (m_bIsBaked) { throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_MethodBaked")); } m_containingType.ThrowIfCreated(); if (il != null && (count < 0 || count > il.Length)) { throw new ArgumentOutOfRangeException(nameof(count), Environment.GetResourceString("ArgumentOutOfRange_Index")); } if (il == null) { m_ubBody = null; return; } m_ubBody = new byte[count]; Buffer.BlockCopy(il, 0, m_ubBody, 0, count); m_localSignature = null; m_exceptions = null; m_mdMethodFixups = null; m_maxStack = DefaultMaxStack; m_bIsBaked = true; } [System.Security.SecuritySafeCritical] // auto-generated public void SetImplementationFlags(MethodImplAttributes attributes) { ThrowIfGeneric (); m_containingType.ThrowIfCreated (); m_dwMethodImplFlags = attributes; m_canBeRuntimeImpl = true; TypeBuilder.SetMethodImpl(m_module.GetNativeHandle(), MetadataTokenInternal, attributes); } public ILGenerator GetILGenerator() { Contract.Ensures(Contract.Result<ILGenerator>() != null); ThrowIfGeneric(); ThrowIfShouldNotHaveBody(); if (m_ilGenerator == null) m_ilGenerator = new ILGenerator(this); return m_ilGenerator; } public ILGenerator GetILGenerator(int size) { Contract.Ensures(Contract.Result<ILGenerator>() != null); ThrowIfGeneric (); ThrowIfShouldNotHaveBody(); if (m_ilGenerator == null) m_ilGenerator = new ILGenerator(this, size); return m_ilGenerator; } private void ThrowIfShouldNotHaveBody() { if ((m_dwMethodImplFlags & MethodImplAttributes.CodeTypeMask) != MethodImplAttributes.IL || (m_dwMethodImplFlags & MethodImplAttributes.Unmanaged) != 0 || (m_iAttributes & MethodAttributes.PinvokeImpl) != 0 || m_isDllImport) { // cannot attach method body if methodimpl is marked not marked as managed IL // throw new InvalidOperationException(Environment.GetResourceString("InvalidOperation_ShouldNotHaveMethodBody")); } } public bool InitLocals { // Property is set to true if user wishes to have zero initialized stack frame for this method. Default to false. get { ThrowIfGeneric (); return m_fInitLocals; } set { ThrowIfGeneric (); m_fInitLocals = value; } } public Module GetModule() { return GetModuleBuilder(); } public String Signature { [System.Security.SecuritySafeCritical] // auto-generated get { return GetMethodSignature().ToString(); } } #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #else [System.Security.SecuritySafeCritical] #endif [System.Runtime.InteropServices.ComVisible(true)] public void SetCustomAttribute(ConstructorInfo con, byte[] binaryAttribute) { if (con == null) throw new ArgumentNullException(nameof(con)); if (binaryAttribute == null) throw new ArgumentNullException(nameof(binaryAttribute)); Contract.EndContractBlock(); ThrowIfGeneric(); TypeBuilder.DefineCustomAttribute(m_module, MetadataTokenInternal, ((ModuleBuilder)m_module).GetConstructorToken(con).Token, binaryAttribute, false, false); if (IsKnownCA(con)) ParseCA(con, binaryAttribute); } [System.Security.SecuritySafeCritical] // auto-generated public void SetCustomAttribute(CustomAttributeBuilder customBuilder) { if (customBuilder == null) throw new ArgumentNullException(nameof(customBuilder)); Contract.EndContractBlock(); ThrowIfGeneric(); customBuilder.CreateCustomAttribute((ModuleBuilder)m_module, MetadataTokenInternal); if (IsKnownCA(customBuilder.m_con)) ParseCA(customBuilder.m_con, customBuilder.m_blob); } // this method should return true for any and every ca that requires more work // than just setting the ca private bool IsKnownCA(ConstructorInfo con) { Type caType = con.DeclaringType; if (caType == typeof(System.Runtime.CompilerServices.MethodImplAttribute)) return true; else if (caType == typeof(DllImportAttribute)) return true; else return false; } private void ParseCA(ConstructorInfo con, byte[] blob) { Type caType = con.DeclaringType; if (caType == typeof(System.Runtime.CompilerServices.MethodImplAttribute)) { // dig through the blob looking for the MethodImplAttributes flag // that must be in the MethodCodeType field // for now we simply set a flag that relaxes the check when saving and // allows this method to have no body when any kind of MethodImplAttribute is present m_canBeRuntimeImpl = true; } else if (caType == typeof(DllImportAttribute)) { m_canBeRuntimeImpl = true; m_isDllImport = true; } } internal bool m_canBeRuntimeImpl = false; internal bool m_isDllImport = false; #endregion #if !FEATURE_CORECLR void _MethodBuilder.GetTypeInfoCount(out uint pcTInfo) { throw new NotImplementedException(); } void _MethodBuilder.GetTypeInfo(uint iTInfo, uint lcid, IntPtr ppTInfo) { throw new NotImplementedException(); } void _MethodBuilder.GetIDsOfNames([In] ref Guid riid, IntPtr rgszNames, uint cNames, uint lcid, IntPtr rgDispId) { throw new NotImplementedException(); } void _MethodBuilder.Invoke(uint dispIdMember, [In] ref Guid riid, uint lcid, short wFlags, IntPtr pDispParams, IntPtr pVarResult, IntPtr pExcepInfo, IntPtr puArgErr) { throw new NotImplementedException(); } #endif } internal class LocalSymInfo { // This class tracks the local variable's debugging information // and namespace information with a given active lexical scope. #region Internal Data Members internal String[] m_strName; internal byte[][] m_ubSignature; internal int[] m_iLocalSlot; internal int[] m_iStartOffset; internal int[] m_iEndOffset; internal int m_iLocalSymCount; // how many entries in the arrays are occupied internal String[] m_namespace; internal int m_iNameSpaceCount; internal const int InitialSize = 16; #endregion #region Constructor internal LocalSymInfo() { // initialize data variables m_iLocalSymCount = 0; m_iNameSpaceCount = 0; } #endregion #region Private Members private void EnsureCapacityNamespace() { if (m_iNameSpaceCount == 0) { m_namespace = new String[InitialSize]; } else if (m_iNameSpaceCount == m_namespace.Length) { String [] strTemp = new String [checked(m_iNameSpaceCount * 2)]; Array.Copy(m_namespace, 0, strTemp, 0, m_iNameSpaceCount); m_namespace = strTemp; } } private void EnsureCapacity() { if (m_iLocalSymCount == 0) { // First time. Allocate the arrays. m_strName = new String[InitialSize]; m_ubSignature = new byte[InitialSize][]; m_iLocalSlot = new int[InitialSize]; m_iStartOffset = new int[InitialSize]; m_iEndOffset = new int[InitialSize]; } else if (m_iLocalSymCount == m_strName.Length) { // the arrays are full. Enlarge the arrays // why aren't we just using lists here? int newSize = checked(m_iLocalSymCount * 2); int[] temp = new int [newSize]; Array.Copy(m_iLocalSlot, 0, temp, 0, m_iLocalSymCount); m_iLocalSlot = temp; temp = new int [newSize]; Array.Copy(m_iStartOffset, 0, temp, 0, m_iLocalSymCount); m_iStartOffset = temp; temp = new int [newSize]; Array.Copy(m_iEndOffset, 0, temp, 0, m_iLocalSymCount); m_iEndOffset = temp; String [] strTemp = new String [newSize]; Array.Copy(m_strName, 0, strTemp, 0, m_iLocalSymCount); m_strName = strTemp; byte[][] ubTemp = new byte[newSize][]; Array.Copy(m_ubSignature, 0, ubTemp, 0, m_iLocalSymCount); m_ubSignature = ubTemp; } } #endregion #region Internal Members internal void AddLocalSymInfo(String strName,byte[] signature,int slot,int startOffset,int endOffset) { // make sure that arrays are large enough to hold addition info EnsureCapacity(); m_iStartOffset[m_iLocalSymCount] = startOffset; m_iEndOffset[m_iLocalSymCount] = endOffset; m_iLocalSlot[m_iLocalSymCount] = slot; m_strName[m_iLocalSymCount] = strName; m_ubSignature[m_iLocalSymCount] = signature; checked {m_iLocalSymCount++; } } internal void AddUsingNamespace(String strNamespace) { EnsureCapacityNamespace(); m_namespace[m_iNameSpaceCount] = strNamespace; checked { m_iNameSpaceCount++; } } #if FEATURE_CORECLR [System.Security.SecurityCritical] // auto-generated #endif internal virtual void EmitLocalSymInfo(ISymbolWriter symWriter) { int i; for (i = 0; i < m_iLocalSymCount; i ++) { symWriter.DefineLocalVariable( m_strName[i], FieldAttributes.PrivateScope, m_ubSignature[i], SymAddressKind.ILOffset, m_iLocalSlot[i], 0, // addr2 is not used yet 0, // addr3 is not used m_iStartOffset[i], m_iEndOffset[i]); } for (i = 0; i < m_iNameSpaceCount; i ++) { symWriter.UsingNamespace(m_namespace[i]); } } #endregion } /// <summary> /// Describes exception handler in a method body. /// </summary> [StructLayout(LayoutKind.Sequential)] [ComVisible(false)] public struct ExceptionHandler : IEquatable<ExceptionHandler> { // Keep in sync with unmanged structure. internal readonly int m_exceptionClass; internal readonly int m_tryStartOffset; internal readonly int m_tryEndOffset; internal readonly int m_filterOffset; internal readonly int m_handlerStartOffset; internal readonly int m_handlerEndOffset; internal readonly ExceptionHandlingClauseOptions m_kind; public int ExceptionTypeToken { get { return m_exceptionClass; } } public int TryOffset { get { return m_tryStartOffset; } } public int TryLength { get { return m_tryEndOffset - m_tryStartOffset; } } public int FilterOffset { get { return m_filterOffset; } } public int HandlerOffset { get { return m_handlerStartOffset; } } public int HandlerLength { get { return m_handlerEndOffset - m_handlerStartOffset; } } public ExceptionHandlingClauseOptions Kind { get { return m_kind; } } #region Constructors /// <summary> /// Creates a description of an exception handler. /// </summary> /// <param name="tryOffset">The offset of the first instruction protected by this handler.</param> /// <param name="tryLength">The number of bytes protected by this handler.</param> /// <param name="filterOffset">The filter code begins at the specified offset and ends at the first instruction of the handler block. Specify 0 if not applicable (this is not a filter handler).</param> /// <param name="handlerOffset">The offset of the first instruction of this handler.</param> /// <param name="handlerLength">The number of bytes of the handler.</param> /// <param name="kind">The kind of handler, the handler might be a catch handler, filter handler, fault handler, or finally handler.</param> /// <param name="exceptionTypeToken">The token of the exception type handled by this handler. Specify 0 if not applicable (this is finally handler).</param> /// <exception cref="ArgumentOutOfRangeException"> /// Some of the instruction offset is negative, /// the end offset of specified range is less than its start offset, /// or <paramref name="kind"/> has an invalid value. /// </exception> public ExceptionHandler(int tryOffset, int tryLength, int filterOffset, int handlerOffset, int handlerLength, ExceptionHandlingClauseOptions kind, int exceptionTypeToken) { if (tryOffset < 0) { throw new ArgumentOutOfRangeException(nameof(tryOffset), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); } if (tryLength < 0) { throw new ArgumentOutOfRangeException(nameof(tryLength), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); } if (filterOffset < 0) { throw new ArgumentOutOfRangeException(nameof(filterOffset), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); } if (handlerOffset < 0) { throw new ArgumentOutOfRangeException(nameof(handlerOffset), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); } if (handlerLength < 0) { throw new ArgumentOutOfRangeException(nameof(handlerLength), Environment.GetResourceString("ArgumentOutOfRange_NeedNonNegNum")); } if ((long)tryOffset + tryLength > Int32.MaxValue) { throw new ArgumentOutOfRangeException(nameof(tryLength), Environment.GetResourceString("ArgumentOutOfRange_Range", 0, Int32.MaxValue - tryOffset)); } if ((long)handlerOffset + handlerLength > Int32.MaxValue) { throw new ArgumentOutOfRangeException(nameof(handlerLength), Environment.GetResourceString("ArgumentOutOfRange_Range", 0, Int32.MaxValue - handlerOffset)); } // Other tokens migth also be invalid. We only check nil tokens as the implementation (SectEH_Emit in corhlpr.cpp) requires it, // and we can't check for valid tokens until the module is baked. if (kind == ExceptionHandlingClauseOptions.Clause && (exceptionTypeToken & 0x00FFFFFF) == 0) { throw new ArgumentException(Environment.GetResourceString("Argument_InvalidTypeToken", exceptionTypeToken), nameof(exceptionTypeToken)); } Contract.EndContractBlock(); if (!IsValidKind(kind)) { throw new ArgumentOutOfRangeException(nameof(kind), Environment.GetResourceString("ArgumentOutOfRange_Enum")); } m_tryStartOffset = tryOffset; m_tryEndOffset = tryOffset + tryLength; m_filterOffset = filterOffset; m_handlerStartOffset = handlerOffset; m_handlerEndOffset = handlerOffset + handlerLength; m_kind = kind; m_exceptionClass = exceptionTypeToken; } internal ExceptionHandler(int tryStartOffset, int tryEndOffset, int filterOffset, int handlerStartOffset, int handlerEndOffset, int kind, int exceptionTypeToken) { Contract.Assert(tryStartOffset >= 0); Contract.Assert(tryEndOffset >= 0); Contract.Assert(filterOffset >= 0); Contract.Assert(handlerStartOffset >= 0); Contract.Assert(handlerEndOffset >= 0); Contract.Assert(IsValidKind((ExceptionHandlingClauseOptions)kind)); Contract.Assert(kind != (int)ExceptionHandlingClauseOptions.Clause || (exceptionTypeToken & 0x00FFFFFF) != 0); m_tryStartOffset = tryStartOffset; m_tryEndOffset = tryEndOffset; m_filterOffset = filterOffset; m_handlerStartOffset = handlerStartOffset; m_handlerEndOffset = handlerEndOffset; m_kind = (ExceptionHandlingClauseOptions)kind; m_exceptionClass = exceptionTypeToken; } private static bool IsValidKind(ExceptionHandlingClauseOptions kind) { switch (kind) { case ExceptionHandlingClauseOptions.Clause: case ExceptionHandlingClauseOptions.Filter: case ExceptionHandlingClauseOptions.Finally: case ExceptionHandlingClauseOptions.Fault: return true; default: return false; } } #endregion #region Equality public override int GetHashCode() { return m_exceptionClass ^ m_tryStartOffset ^ m_tryEndOffset ^ m_filterOffset ^ m_handlerStartOffset ^ m_handlerEndOffset ^ (int)m_kind; } public override bool Equals(Object obj) { return obj is ExceptionHandler && Equals((ExceptionHandler)obj); } public bool Equals(ExceptionHandler other) { return other.m_exceptionClass == m_exceptionClass && other.m_tryStartOffset == m_tryStartOffset && other.m_tryEndOffset == m_tryEndOffset && other.m_filterOffset == m_filterOffset && other.m_handlerStartOffset == m_handlerStartOffset && other.m_handlerEndOffset == m_handlerEndOffset && other.m_kind == m_kind; } public static bool operator ==(ExceptionHandler left, ExceptionHandler right) { return left.Equals(right); } public static bool operator !=(ExceptionHandler left, ExceptionHandler right) { return !left.Equals(right); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections; using System.ComponentModel; using System.Diagnostics; using System.Reflection; namespace System.Configuration { /// <summary> /// Base settings class for client applications. /// </summary> public abstract class ApplicationSettingsBase : SettingsBase, INotifyPropertyChanged { private bool _explicitSerializeOnClass = false; private object[] _classAttributes; private readonly IComponent _owner; private PropertyChangedEventHandler _onPropertyChanged; private SettingsContext _context; private SettingsProperty _init; private SettingsPropertyCollection _settings; private SettingsProviderCollection _providers; private SettingChangingEventHandler _onSettingChanging; private SettingsLoadedEventHandler _onSettingsLoaded; private SettingsSavingEventHandler _onSettingsSaving; private string _settingsKey = string.Empty; private bool _firstLoad = true; private bool _initialized = false; /// <summary> /// Default constructor without a concept of "owner" component. /// </summary> protected ApplicationSettingsBase() : base() { } /// <summary> /// Constructor that takes an IComponent. The IComponent acts as the "owner" of this settings class. One /// of the things we do is query the component's site to see if it has a SettingsProvider service. If it /// does, we allow it to override the providers specified in the metadata. /// </summary> protected ApplicationSettingsBase(IComponent owner) : this(owner, string.Empty) { } /// <summary> /// Convenience overload that takes the settings key /// </summary> protected ApplicationSettingsBase(string settingsKey) { _settingsKey = settingsKey; } /// <summary> /// Convenience overload that takes the owner component and settings key. /// </summary> protected ApplicationSettingsBase(IComponent owner, string settingsKey) : this(settingsKey) { if (owner == null) { throw new ArgumentNullException(nameof(owner)); } _owner = owner; if (owner.Site != null) { ISettingsProviderService providerService = owner.Site.GetService(typeof(ISettingsProviderService)) as ISettingsProviderService; if (providerService != null) { // The component's site has a settings provider service. We pass each SettingsProperty to it // to see if it wants to override the current provider. foreach (SettingsProperty property in Properties) { SettingsProvider provider = providerService.GetSettingsProvider(property); if (provider != null) { property.Provider = provider; } } ResetProviders(); } } } /// <summary> /// The Context to pass on to the provider. Currently, this will just contain the settings group name. /// </summary> [Browsable(false)] public override SettingsContext Context { get { if (_context == null) { if (IsSynchronized) { lock (this) { if (_context == null) { _context = new SettingsContext(); EnsureInitialized(); } } } else { _context = new SettingsContext(); EnsureInitialized(); } } return _context; } } /// <summary> /// The SettingsBase class queries this to get the collection of SettingsProperty objects. We reflect over /// the properties defined on the current object's type and use the metadata on those properties to form /// this collection. /// </summary> [Browsable(false)] public override SettingsPropertyCollection Properties { get { if (_settings == null) { if (IsSynchronized) { lock (this) { if (_settings == null) { _settings = new SettingsPropertyCollection(); EnsureInitialized(); } } } else { _settings = new SettingsPropertyCollection(); EnsureInitialized(); } } return _settings; } } /// <summary> /// Just overriding to add attributes. /// </summary> [Browsable(false)] public override SettingsPropertyValueCollection PropertyValues { get { return base.PropertyValues; } } /// <summary> /// Provider collection /// </summary> [Browsable(false)] public override SettingsProviderCollection Providers { get { if (_providers == null) { if (IsSynchronized) { lock (this) { if (_providers == null) { _providers = new SettingsProviderCollection(); EnsureInitialized(); } } } else { _providers = new SettingsProviderCollection(); EnsureInitialized(); } } return _providers; } } /// <summary> /// Derived classes should use this to uniquely identify separate instances of settings classes. /// </summary> [Browsable(false)] public string SettingsKey { get { return _settingsKey; } set { _settingsKey = value; Context["SettingsKey"] = _settingsKey; } } /// <summary> /// Fires when the value of a setting is changed. (INotifyPropertyChanged implementation.) /// </summary> public event PropertyChangedEventHandler PropertyChanged { add { _onPropertyChanged += value; } remove { _onPropertyChanged -= value; } } /// <summary> /// Fires when the value of a setting is about to change. This is a cancellable event. /// </summary> public event SettingChangingEventHandler SettingChanging { add { _onSettingChanging += value; } remove { _onSettingChanging -= value; } } /// <summary> /// Fires when settings are retrieved from a provider. It fires once for each provider. /// </summary> public event SettingsLoadedEventHandler SettingsLoaded { add { _onSettingsLoaded += value; } remove { _onSettingsLoaded -= value; } } /// <summary> /// Fires when Save() is called. This is a cancellable event. /// </summary> public event SettingsSavingEventHandler SettingsSaving { add { _onSettingsSaving += value; } remove { _onSettingsSaving -= value; } } /// <summary> /// Used in conjunction with Upgrade - retrieves the previous value of a setting from the provider. /// Provider must implement IApplicationSettingsProvider to support this. /// </summary> public object GetPreviousVersion(string propertyName) { if (Properties.Count == 0) throw new SettingsPropertyNotFoundException(); SettingsProperty sp = Properties[propertyName]; SettingsPropertyValue value = null; if (sp == null) throw new SettingsPropertyNotFoundException(); IApplicationSettingsProvider clientProv = sp.Provider as IApplicationSettingsProvider; if (clientProv != null) { value = clientProv.GetPreviousVersion(Context, sp); } if (value != null) { return value.PropertyValue; } return null; } /// <summary> /// Fires the PropertyChanged event. /// </summary> protected virtual void OnPropertyChanged(object sender, PropertyChangedEventArgs e) { _onPropertyChanged?.Invoke(this, e); } /// <summary> /// Fires the SettingChanging event. /// </summary> protected virtual void OnSettingChanging(object sender, SettingChangingEventArgs e) { _onSettingChanging?.Invoke(this, e); } /// <summary> /// Fires the SettingsLoaded event. /// </summary> protected virtual void OnSettingsLoaded(object sender, SettingsLoadedEventArgs e) { _onSettingsLoaded?.Invoke(this, e); } /// <summary> /// Fires the SettingsSaving event. /// </summary> protected virtual void OnSettingsSaving(object sender, CancelEventArgs e) { _onSettingsSaving?.Invoke(this, e); } /// <summary> /// Causes a reload to happen on next setting access, by clearing the cached values. /// </summary> public void Reload() { if (PropertyValues != null) { PropertyValues.Clear(); } foreach (SettingsProperty sp in Properties) { PropertyChangedEventArgs pe = new PropertyChangedEventArgs(sp.Name); OnPropertyChanged(this, pe); } } /// <summary> /// Calls Reset on the providers. /// Providers must implement IApplicationSettingsProvider to support this. /// </summary> public void Reset() { if (Properties != null) { foreach (SettingsProvider provider in Providers) { IApplicationSettingsProvider clientProv = provider as IApplicationSettingsProvider; if (clientProv != null) { clientProv.Reset(Context); } } } Reload(); } /// <summary> /// Overridden from SettingsBase to support validation event. /// </summary> public override void Save() { CancelEventArgs e = new CancelEventArgs(false); OnSettingsSaving(this, e); if (!e.Cancel) { base.Save(); } } /// <summary> /// Overridden from SettingsBase to support validation event. /// </summary> public override object this[string propertyName] { get { if (IsSynchronized) { lock (this) { return GetPropertyValue(propertyName); } } else { return GetPropertyValue(propertyName); } } set { SettingChangingEventArgs e = new SettingChangingEventArgs(propertyName, this.GetType().FullName, SettingsKey, value, false); OnSettingChanging(this, e); if (!e.Cancel) { base[propertyName] = value; // CONSIDER: Should we call this even if canceled? PropertyChangedEventArgs pe = new PropertyChangedEventArgs(propertyName); OnPropertyChanged(this, pe); } } } /// <summary> /// Called when the app is upgraded so that we can instruct the providers to upgrade their settings. /// Providers must implement IApplicationSettingsProvider to support this. /// </summary> public virtual void Upgrade() { if (Properties != null) { foreach (SettingsProvider provider in Providers) { IApplicationSettingsProvider clientProv = provider as IApplicationSettingsProvider; if (clientProv != null) { clientProv.Upgrade(Context, GetPropertiesForProvider(provider)); } } } Reload(); } /// <summary> /// Creates a SettingsProperty object using the metadata on the given property /// and returns it. /// </summary> private SettingsProperty CreateSetting(PropertyInfo propertyInfo) { // Initialization method - // be careful not to access properties here to prevent stack overflow. object[] attributes = propertyInfo.GetCustomAttributes(false); SettingsProperty settingsProperty = new SettingsProperty(Initializer); bool explicitSerialize = _explicitSerializeOnClass; settingsProperty.Name = propertyInfo.Name; settingsProperty.PropertyType = propertyInfo.PropertyType; for (int i = 0; i < attributes.Length; i++) { Attribute attribute = attributes[i] as Attribute; if (attribute == null) continue; if (attribute is DefaultSettingValueAttribute) { settingsProperty.DefaultValue = ((DefaultSettingValueAttribute)attribute).Value; } else if (attribute is ReadOnlyAttribute) { settingsProperty.IsReadOnly = true; } else if (attribute is SettingsProviderAttribute) { string providerTypeName = ((SettingsProviderAttribute)attribute).ProviderTypeName; Type providerType = Type.GetType(providerTypeName); if (providerType == null) { throw new ConfigurationErrorsException(SR.Format(SR.ProviderTypeLoadFailed, providerTypeName)); } SettingsProvider settingsProvider = TypeUtil.CreateInstance(providerType) as SettingsProvider; if (settingsProvider == null) { throw new ConfigurationErrorsException(SR.Format(SR.ProviderInstantiationFailed, providerTypeName)); } settingsProvider.Initialize(null, null); settingsProvider.ApplicationName = ConfigurationManagerInternalFactory.Instance.ExeProductName; // See if we already have a provider of the same name in our collection. If so, // re-use the existing instance, since we cannot have multiple providers of the same name. SettingsProvider existing = _providers[settingsProvider.Name]; if (existing != null) { settingsProvider = existing; } settingsProperty.Provider = settingsProvider; } else if (attribute is SettingsSerializeAsAttribute) { settingsProperty.SerializeAs = ((SettingsSerializeAsAttribute)attribute).SerializeAs; explicitSerialize = true; } else { // This isn't an attribute we care about, so simply pass it on // to the SettingsProvider. // // NOTE: The key is the type. So if an attribute was found at class // level and also property level, the latter overrides the former // for a given setting. This is exactly the behavior we want. settingsProperty.Attributes.Add(attribute.GetType(), attribute); } } if (!explicitSerialize) { // Serialization method was not explicitly attributed. TypeConverter tc = TypeDescriptor.GetConverter(propertyInfo.PropertyType); if (tc.CanConvertTo(typeof(string)) && tc.CanConvertFrom(typeof(string))) { // We can use string settingsProperty.SerializeAs = SettingsSerializeAs.String; } else { // Fallback is Xml settingsProperty.SerializeAs = SettingsSerializeAs.Xml; } } return settingsProperty; } /// <summary> /// Ensures this class is initialized. Initialization involves reflecting over properties and building /// a list of SettingsProperty's. /// </summary> private void EnsureInitialized() { // Initialization method - // be careful not to access properties here to prevent stack overflow. if (!_initialized) { _initialized = true; Type type = GetType(); if (_context == null) { _context = new SettingsContext(); } _context["GroupName"] = type.FullName; _context["SettingsKey"] = SettingsKey; _context["SettingsClassType"] = type; PropertyInfo[] properties = SettingsFilter(type.GetProperties(BindingFlags.Instance | BindingFlags.Public)); _classAttributes = type.GetCustomAttributes(false); if (_settings == null) { _settings = new SettingsPropertyCollection(); } if (_providers == null) { _providers = new SettingsProviderCollection(); } for (int i = 0; i < properties.Length; i++) { SettingsProperty sp = CreateSetting(properties[i]); if (sp != null) { _settings.Add(sp); if (sp.Provider != null && _providers[sp.Provider.Name] == null) { _providers.Add(sp.Provider); } } } } } /// <summary> /// Returns a SettingsProperty used to initialize settings. We initialize a setting with values /// derived from class level attributes, if present. Otherwise, we initialize to /// reasonable defaults. /// </summary> private SettingsProperty Initializer { // Initialization method - // be careful not to access properties here to prevent stack overflow. get { if (_init == null) { _init = new SettingsProperty(""); _init.DefaultValue = null; _init.IsReadOnly = false; _init.PropertyType = null; SettingsProvider provider = new LocalFileSettingsProvider(); if (_classAttributes != null) { for (int i = 0; i < _classAttributes.Length; i++) { Attribute attr = _classAttributes[i] as Attribute; if (attr != null) { if (attr is ReadOnlyAttribute) { _init.IsReadOnly = true; } else if (attr is SettingsGroupNameAttribute) { if (_context == null) { _context = new SettingsContext(); } _context["GroupName"] = ((SettingsGroupNameAttribute)attr).GroupName; } else if (attr is SettingsProviderAttribute) { string providerTypeName = ((SettingsProviderAttribute)attr).ProviderTypeName; Type providerType = Type.GetType(providerTypeName); if (providerType != null) { SettingsProvider spdr = TypeUtil.CreateInstance(providerType) as SettingsProvider; if (spdr != null) { provider = spdr; } else { throw new ConfigurationErrorsException(SR.Format(SR.ProviderInstantiationFailed, providerTypeName)); } } else { throw new ConfigurationErrorsException(SR.Format(SR.ProviderTypeLoadFailed, providerTypeName)); } } else if (attr is SettingsSerializeAsAttribute) { _init.SerializeAs = ((SettingsSerializeAsAttribute)attr).SerializeAs; _explicitSerializeOnClass = true; } else { // This isn't an attribute we care about, so simply pass it on // to the SettingsProvider. // NOTE: The key is the type. So if an attribute was found at class // level and also property level, the latter overrides the former // for a given setting. This is exactly the behavior we want. _init.Attributes.Add(attr.GetType(), attr); } } } } //Initialize the SettingsProvider provider.Initialize(null, null); provider.ApplicationName = ConfigurationManagerInternalFactory.Instance.ExeProductName; _init.Provider = provider; } return _init; } } /// <summary> /// Gets all the settings properties for this provider. /// </summary> private SettingsPropertyCollection GetPropertiesForProvider(SettingsProvider provider) { SettingsPropertyCollection properties = new SettingsPropertyCollection(); foreach (SettingsProperty sp in Properties) { if (sp.Provider == provider) { properties.Add(sp); } } return properties; } /// <summary> /// Retrieves the value of a setting. We need this method so we can fire the SettingsLoaded event /// when settings are loaded from the providers.Ideally, this should be fired from SettingsBase, /// but unfortunately that will not happen in Whidbey. Instead, we check to see if the value has already /// been retrieved. If not, we fire the load event, since we expect SettingsBase to load all the settings /// from this setting's provider. /// </summary> private object GetPropertyValue(string propertyName) { if (PropertyValues[propertyName] == null) { // If this is our first load and we are part of a Clickonce app, call Upgrade. if (_firstLoad) { _firstLoad = false; if (IsFirstRunOfClickOnceApp()) { Upgrade(); } } object temp = base[propertyName]; SettingsProperty setting = Properties[propertyName]; SettingsProvider provider = setting != null ? setting.Provider : null; Debug.Assert(provider != null, "Could not determine provider from which settings were loaded"); SettingsLoadedEventArgs e = new SettingsLoadedEventArgs(provider); OnSettingsLoaded(this, e); // Note: we need to requery the value here in case someone changed it while // handling SettingsLoaded. return base[propertyName]; } else { return base[propertyName]; } } /// <summary> /// Returns true if this is a clickonce deployed app and this is the first run of the app /// since deployment or last upgrade. /// </summary> private bool IsFirstRunOfClickOnceApp() { // Never ClickOnce app in CoreFX return false; } /// <summary> /// Returns true if this is a clickonce deployed app. /// </summary> internal static bool IsClickOnceDeployed(AppDomain appDomain) { // Never ClickOnce app in CoreFX return false; } /// <summary> /// Only those settings class properties that have a SettingAttribute on them are /// treated as settings. This routine filters out other properties. /// </summary> private PropertyInfo[] SettingsFilter(PropertyInfo[] allProps) { ArrayList settingProps = new ArrayList(); object[] attributes; Attribute attr; for (int i = 0; i < allProps.Length; i++) { attributes = allProps[i].GetCustomAttributes(false); for (int j = 0; j < attributes.Length; j++) { attr = attributes[j] as Attribute; if (attr is SettingAttribute) { settingProps.Add(allProps[i]); break; } } } return (PropertyInfo[])settingProps.ToArray(typeof(PropertyInfo)); } /// <summary> /// Resets the provider collection. This needs to be called when providers change after /// first being set. /// </summary> private void ResetProviders() { Providers.Clear(); foreach (SettingsProperty sp in Properties) { if (Providers[sp.Provider.Name] == null) { Providers.Add(sp.Provider); } } } } }
// // Copyright (c) 2004-2020 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // #if !MONO namespace NLog.UnitTests.Config { using System; using System.IO; using System.Threading; using System.Xml; using NLog.Config; using Xunit; public class ReloadTests : NLogTestBase { public ReloadTests() { if (LogManager.LogFactory != null) { LogManager.LogFactory.ResetLoggerCache(); } } [Theory] [InlineData(true)] [InlineData(false)] public void TestNoAutoReload(bool useExplicitFileLoading) { string config1 = @"<nlog> <targets><target name='debug' type='Debug' layout='${message}' /></targets> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string config2 = @"<nlog> <targets><target name='debug' type='Debug' layout='[${message}]' /></targets> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); string configFilePath = Path.Combine(tempPath, "noreload.nlog"); WriteConfigFile(configFilePath, config1); try { SetLogManagerConfiguration(useExplicitFileLoading, configFilePath); Assert.False(((XmlLoggingConfiguration)LogManager.Configuration).AutoReload); var logger = LogManager.GetLogger("A"); logger.Debug("aaa"); AssertDebugLastMessage("debug", "aaa"); ChangeAndReloadConfigFile(configFilePath, config2, assertDidReload: false); logger.Debug("bbb"); // Assert that config1 is still loaded. AssertDebugLastMessage("debug", "bbb"); } finally { if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } private static void SetLogManagerConfiguration(bool useExplicitFileLoading, string configFilePath) { if (useExplicitFileLoading) LogManager.Configuration = new XmlLoggingConfiguration(configFilePath); else LogManager.LogFactory.SetCandidateConfigFilePaths(new string[] { configFilePath }); } [Theory] [InlineData(true)] [InlineData(false)] public void TestAutoReloadOnFileChange(bool useExplicitFileLoading) { #if NETSTANDARD if (IsTravis()) { Console.WriteLine("[SKIP] ReloadTests.TestAutoReloadOnFileChange because we are running in Travis"); return; } #endif string config1 = @"<nlog autoReload='true'> <targets><target name='debug' type='Debug' layout='${message}' /></targets> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string config2 = @"<nlog autoReload='true'> <targets><target name='debug' type='Debug' layout='[${message}]' /></targets> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string badConfig = @"<nlog autoReload='true'> <targets><target name='debug' type='Debug' layout='(${message})' /></targets> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); string configFilePath = Path.Combine(tempPath, "reload.nlog"); WriteConfigFile(configFilePath, config1); try { SetLogManagerConfiguration(useExplicitFileLoading, configFilePath); Assert.True(((XmlLoggingConfiguration)LogManager.Configuration).AutoReload); var logger = LogManager.GetLogger("A"); logger.Debug("aaa"); AssertDebugLastMessage("debug", "aaa"); ChangeAndReloadConfigFile(configFilePath, badConfig, assertDidReload: false); logger.Debug("bbb"); // Assert that config1 is still loaded. AssertDebugLastMessage("debug", "bbb"); ChangeAndReloadConfigFile(configFilePath, config2); logger.Debug("ccc"); // Assert that config2 is loaded. AssertDebugLastMessage("debug", "[ccc]"); } finally { if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Theory] [InlineData(true)] [InlineData(false)] public void TestAutoReloadOnFileMove(bool useExplicitFileLoading) { string config1 = @"<nlog autoReload='true'> <targets><target name='debug' type='Debug' layout='${message}' /></targets> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string config2 = @"<nlog autoReload='true'> <targets><target name='debug' type='Debug' layout='[${message}]' /></targets> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); string configFilePath = Path.Combine(tempPath, "reload.nlog"); WriteConfigFile(configFilePath, config1); string otherFilePath = Path.Combine(tempPath, "other.nlog"); try { SetLogManagerConfiguration(useExplicitFileLoading, configFilePath); var logger = LogManager.GetLogger("A"); logger.Debug("aaa"); AssertDebugLastMessage("debug", "aaa"); using (var reloadWaiter = new ConfigurationReloadWaiter()) { File.Move(configFilePath, otherFilePath); reloadWaiter.WaitForReload(); } logger.Debug("bbb"); // Assert that config1 is still loaded. AssertDebugLastMessage("debug", "bbb"); WriteConfigFile(otherFilePath, config2); using (var reloadWaiter = new ConfigurationReloadWaiter()) { File.Move(otherFilePath, configFilePath); reloadWaiter.WaitForReload(); Assert.True(reloadWaiter.DidReload); } logger.Debug("ccc"); // Assert that config2 is loaded. AssertDebugLastMessage("debug", "[ccc]"); } finally { if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Theory] [InlineData(true)] [InlineData(false)] public void TestAutoReloadOnFileCopy(bool useExplicitFileLoading) { #if NETSTANDARD if (IsTravis()) { Console.WriteLine("[SKIP] ReloadTests.TestAutoReloadOnFileCopy because we are running in Travis"); return; } #endif string config1 = @"<nlog autoReload='true'> <targets><target name='debug' type='Debug' layout='${message}' /></targets> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string config2 = @"<nlog autoReload='true'> <targets><target name='debug' type='Debug' layout='[${message}]' /></targets> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); string configFilePath = Path.Combine(tempPath, "reload.nlog"); WriteConfigFile(configFilePath, config1); string otherFilePath = Path.Combine(tempPath, "other.nlog"); try { SetLogManagerConfiguration(useExplicitFileLoading, configFilePath); var logger = LogManager.GetLogger("A"); logger.Debug("aaa"); AssertDebugLastMessage("debug", "aaa"); using (var reloadWaiter = new ConfigurationReloadWaiter()) { File.Delete(configFilePath); reloadWaiter.WaitForReload(); } logger.Debug("bbb"); // Assert that config1 is still loaded. AssertDebugLastMessage("debug", "bbb"); WriteConfigFile(otherFilePath, config2); using (var reloadWaiter = new ConfigurationReloadWaiter()) { File.Copy(otherFilePath, configFilePath); File.Delete(otherFilePath); reloadWaiter.WaitForReload(); Assert.True(reloadWaiter.DidReload); } logger.Debug("ccc"); // Assert that config2 is loaded. AssertDebugLastMessage("debug", "[ccc]"); } finally { if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Theory] [InlineData(true)] [InlineData(false)] public void TestIncludedConfigNoReload(bool useExplicitFileLoading) { #if NETSTANDARD if (IsTravis()) { Console.WriteLine("[SKIP] ReloadTests.TestIncludedConfigNoReload because we are running in Travis"); return; } #endif string mainConfig1 = @"<nlog> <include file='included.nlog' /> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string mainConfig2 = @"<nlog> <include file='included.nlog' /> <rules><logger name='*' minlevel='Info' writeTo='debug' /></rules> </nlog>"; string includedConfig1 = @"<nlog> <targets><target name='debug' type='Debug' layout='${message}' /></targets> </nlog>"; string includedConfig2 = @"<nlog> <targets><target name='debug' type='Debug' layout='[${message}]' /></targets> </nlog>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); string mainConfigFilePath = Path.Combine(tempPath, "main.nlog"); WriteConfigFile(mainConfigFilePath, mainConfig1); string includedConfigFilePath = Path.Combine(tempPath, "included.nlog"); WriteConfigFile(includedConfigFilePath, includedConfig1); try { SetLogManagerConfiguration(useExplicitFileLoading, mainConfigFilePath); var logger = LogManager.GetLogger("A"); logger.Debug("aaa"); AssertDebugLastMessage("debug", "aaa"); ChangeAndReloadConfigFile(mainConfigFilePath, mainConfig2, assertDidReload: false); logger.Debug("bbb"); // Assert that mainConfig1 is still loaded. AssertDebugLastMessage("debug", "bbb"); WriteConfigFile(mainConfigFilePath, mainConfig1); ChangeAndReloadConfigFile(includedConfigFilePath, includedConfig2, assertDidReload: false); logger.Debug("ccc"); // Assert that includedConfig1 is still loaded. AssertDebugLastMessage("debug", "ccc"); } finally { if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Theory] [InlineData(true)] [InlineData(false)] public void TestIncludedConfigReload(bool useExplicitFileLoading) { #if NETSTANDARD if (IsTravis()) { Console.WriteLine("[SKIP] ReloadTests.TestIncludedConfigNoReload because we are running in Travis"); return; } #endif string mainConfig1 = @"<nlog> <include file='included.nlog' /> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string mainConfig2 = @"<nlog> <include file='included.nlog' /> <rules><logger name='*' minlevel='Info' writeTo='debug' /></rules> </nlog>"; string includedConfig1 = @"<nlog autoReload='true'> <targets><target name='debug' type='Debug' layout='${message}' /></targets> </nlog>"; string includedConfig2 = @"<nlog autoReload='true'> <targets><target name='debug' type='Debug' layout='[${message}]' /></targets> </nlog>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); string mainConfigFilePath = Path.Combine(tempPath, "main.nlog"); WriteConfigFile(mainConfigFilePath, mainConfig1); string includedConfigFilePath = Path.Combine(tempPath, "included.nlog"); WriteConfigFile(includedConfigFilePath, includedConfig1); try { SetLogManagerConfiguration(useExplicitFileLoading, mainConfigFilePath); var logger = LogManager.GetLogger("A"); logger.Debug("aaa"); AssertDebugLastMessage("debug", "aaa"); ChangeAndReloadConfigFile(mainConfigFilePath, mainConfig2, assertDidReload: false); logger.Debug("bbb"); // Assert that mainConfig1 is still loaded. AssertDebugLastMessage("debug", "bbb"); WriteConfigFile(mainConfigFilePath, mainConfig1); ChangeAndReloadConfigFile(includedConfigFilePath, includedConfig2); logger.Debug("ccc"); // Assert that includedConfig2 is loaded. AssertDebugLastMessage("debug", "[ccc]"); } finally { if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Theory] [InlineData(true)] [InlineData(false)] public void TestMainConfigReload(bool useExplicitFileLoading) { #if NETSTANDARD if (IsTravis()) { Console.WriteLine("[SKIP] ReloadTests.TestMainConfigReload because we are running in Travis"); return; } #endif string mainConfig1 = @"<nlog autoReload='true'> <include file='included.nlog' /> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string mainConfig2 = @"<nlog autoReload='true'> <include file='included2.nlog' /> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string included1Config = @"<nlog> <targets><target name='debug' type='Debug' layout='${message}' /></targets> </nlog>"; string included2Config1 = @"<nlog> <targets><target name='debug' type='Debug' layout='[${message}]' /></targets> </nlog>"; string included2Config2 = @"<nlog> <targets><target name='debug' type='Debug' layout='(${message})' /></targets> </nlog>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); string mainConfigFilePath = Path.Combine(tempPath, "main.nlog"); WriteConfigFile(mainConfigFilePath, mainConfig1); string included1ConfigFilePath = Path.Combine(tempPath, "included.nlog"); WriteConfigFile(included1ConfigFilePath, included1Config); string included2ConfigFilePath = Path.Combine(tempPath, "included2.nlog"); WriteConfigFile(included2ConfigFilePath, included2Config1); try { SetLogManagerConfiguration(useExplicitFileLoading, mainConfigFilePath); var logger = LogManager.GetLogger("A"); logger.Debug("aaa"); AssertDebugLastMessage("debug", "aaa"); ChangeAndReloadConfigFile(mainConfigFilePath, mainConfig2); logger.Debug("bbb"); // Assert that mainConfig2 is loaded (which refers to included2.nlog). AssertDebugLastMessage("debug", "[bbb]"); ChangeAndReloadConfigFile(included2ConfigFilePath, included2Config2); logger.Debug("ccc"); // Assert that included2Config2 is loaded. AssertDebugLastMessage("debug", "(ccc)"); } finally { if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Theory] [InlineData(true)] [InlineData(false)] public void TestMainConfigReloadIncludedConfigNoReload(bool useExplicitFileLoading) { #if NETSTANDARD if (IsTravis()) { Console.WriteLine("[SKIP] ReloadTests.TestMainConfigReload because we are running in Travis"); return; } #endif string mainConfig1 = @"<nlog autoReload='true'> <include file='included.nlog' /> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string mainConfig2 = @"<nlog autoReload='true'> <include file='included2.nlog' /> <rules><logger name='*' minlevel='Debug' writeTo='debug' /></rules> </nlog>"; string included1Config = @"<nlog> <targets><target name='debug' type='Debug' layout='${message}' /></targets> </nlog>"; string included2Config1 = @"<nlog autoReload='false'> <targets><target name='debug' type='Debug' layout='[${message}]' /></targets> </nlog>"; string included2Config2 = @"<nlog autoReload='false'> <targets><target name='debug' type='Debug' layout='(${message})' /></targets> </nlog>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); string mainConfigFilePath = Path.Combine(tempPath, "main.nlog"); WriteConfigFile(mainConfigFilePath, mainConfig1); string included1ConfigFilePath = Path.Combine(tempPath, "included.nlog"); WriteConfigFile(included1ConfigFilePath, included1Config); string included2ConfigFilePath = Path.Combine(tempPath, "included2.nlog"); WriteConfigFile(included2ConfigFilePath, included2Config1); try { SetLogManagerConfiguration(useExplicitFileLoading, mainConfigFilePath); var logger = LogManager.GetLogger("A"); logger.Debug("aaa"); AssertDebugLastMessage("debug", "aaa"); ChangeAndReloadConfigFile(mainConfigFilePath, mainConfig2); logger.Debug("bbb"); // Assert that mainConfig2 is loaded (which refers to included2.nlog). AssertDebugLastMessage("debug", "[bbb]"); ChangeAndReloadConfigFile(included2ConfigFilePath, included2Config2, assertDidReload: false); logger.Debug("ccc"); // Assert that included2Config1 is still loaded. AssertDebugLastMessage("debug", "[ccc]"); } finally { if (Directory.Exists(tempPath)) Directory.Delete(tempPath, true); } } [Fact] public void TestKeepVariablesOnReload() { string config = @"<nlog autoReload='true' keepVariablesOnReload='true'> <variable name='var1' value='' /> <variable name='var2' value='keep_value' /> </nlog>"; var configLoader = new LoggingConfigurationWatchableFileLoader(); var logFactory = new LogFactory(configLoader); var configuration = XmlLoggingConfigurationMock.CreateFromXml(logFactory, config); logFactory.Configuration = configuration; logFactory.Configuration.Variables["var1"] = "new_value"; logFactory.Configuration.Variables["var3"] = "new_value3"; configLoader.ReloadConfigOnTimer(configuration); Assert.Equal("new_value", logFactory.Configuration.Variables["var1"].OriginalText); Assert.Equal("keep_value", logFactory.Configuration.Variables["var2"].OriginalText); Assert.Equal("new_value3", logFactory.Configuration.Variables["var3"].OriginalText); logFactory.Configuration = configuration.ReloadNewConfig(); Assert.Equal("new_value", logFactory.Configuration.Variables["var1"].OriginalText); Assert.Equal("keep_value", logFactory.Configuration.Variables["var2"].OriginalText); Assert.Equal("new_value3", logFactory.Configuration.Variables["var3"].OriginalText); } [Fact] public void TestResetVariablesOnReload() { string config = @"<nlog autoReload='true' keepVariablesOnReload='false'> <variable name='var1' value='' /> <variable name='var2' value='keep_value' /> </nlog>"; var configLoader = new LoggingConfigurationWatchableFileLoader(); var logFactory = new LogFactory(configLoader); var configuration = XmlLoggingConfigurationMock.CreateFromXml(logFactory, config); logFactory.Configuration = configuration; logFactory.Configuration.Variables["var1"] = "new_value"; logFactory.Configuration.Variables["var3"] = "new_value3"; configLoader.ReloadConfigOnTimer(configuration); Assert.Equal("", logFactory.Configuration.Variables["var1"].OriginalText); Assert.Equal("keep_value", logFactory.Configuration.Variables["var2"].OriginalText); logFactory.Configuration = configuration.ReloadNewConfig(); Assert.Equal("", logFactory.Configuration.Variables["var1"].OriginalText); Assert.Equal("keep_value", logFactory.Configuration.Variables["var2"].OriginalText); } [Fact] public void ReloadConfigOnTimer_When_No_Exception_Raises_ConfigurationReloadedEvent() { var called = false; LoggingConfigurationReloadedEventArgs arguments = null; object calledBy = null; var configLoader = new LoggingConfigurationWatchableFileLoader(); var logFactory = new LogFactory(configLoader); var loggingConfiguration = XmlLoggingConfigurationMock.CreateFromXml(logFactory, "<nlog></nlog>"); logFactory.Configuration = loggingConfiguration; logFactory.ConfigurationReloaded += (sender, args) => { called = true; calledBy = sender; arguments = args; }; configLoader.ReloadConfigOnTimer(loggingConfiguration); Assert.True(called); Assert.Same(calledBy, logFactory); Assert.True(arguments.Succeeded); } [Fact] public void TestReloadingInvalidConfiguration() { var validXmlConfig = @"<nlog> <targets><target name='debug' type='Debug' layout='${message}' /></targets> <rules> <logger name='*' minlevel='Debug' writeTo='debug' /> </rules> </nlog>"; var invalidXmlConfig = @"<nlog autoReload='true' internalLogLevel='debug' internalLogLevel='error'> <targets><target name='debug' type='Debug' layout='${message}' /></targets> </nlog>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); try { using (new NoThrowNLogExceptions()) { var nlogConfigFile = Path.Combine(tempPath, "NLog.config"); LogFactory logFactory = new LogFactory(); logFactory.SetCandidateConfigFilePaths(new[] { nlogConfigFile }); var config = logFactory.Configuration; Assert.Null(config); WriteConfigFile(nlogConfigFile, invalidXmlConfig); config = logFactory.Configuration; Assert.NotNull(config); Assert.Empty(config.AllTargets); // Failed to load Assert.Single(config.FileNamesToWatch); // But file-watcher is active WriteConfigFile(nlogConfigFile, validXmlConfig); config = logFactory.Configuration.Reload(); Assert.Single(config.AllTargets); } } finally { if (Directory.Exists(tempPath)) { Directory.Delete(tempPath, true); } } } [Fact] public void TestThrowExceptionWhenInvalidXml() { var invalidXmlConfig = @"<nlog throwExceptions='true' internalLogLevel='debug' internalLogLevel='error'> </nlog>"; string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); Directory.CreateDirectory(tempPath); try { using (new NoThrowNLogExceptions()) { var nlogConfigFile = Path.Combine(tempPath, "NLog.config"); WriteConfigFile(nlogConfigFile, invalidXmlConfig); LogFactory logFactory = new LogFactory(); logFactory.SetCandidateConfigFilePaths(new[] { nlogConfigFile }); Assert.Throws<NLogConfigurationException>(() => logFactory.GetLogger("Hello")); } } finally { if (Directory.Exists(tempPath)) { Directory.Delete(tempPath, true); } } } private static void WriteConfigFile(string configFilePath, string config) { using (StreamWriter writer = File.CreateText(configFilePath)) writer.Write(config); } private static void ChangeAndReloadConfigFile(string configFilePath, string config, bool assertDidReload = true) { using (var reloadWaiter = new ConfigurationReloadWaiter()) { WriteConfigFile(configFilePath, config); reloadWaiter.WaitForReload(); if (assertDidReload) Assert.True(reloadWaiter.DidReload, $"Config '{configFilePath}' did not reload."); } } private class ConfigurationReloadWaiter : IDisposable { private ManualResetEvent counterEvent = new ManualResetEvent(false); public ConfigurationReloadWaiter() { LogManager.ConfigurationReloaded += SignalCounterEvent(counterEvent); } public bool DidReload => counterEvent.WaitOne(0); public void Dispose() { LogManager.ConfigurationReloaded -= SignalCounterEvent(counterEvent); } public void WaitForReload() { counterEvent.WaitOne(3000); } private static EventHandler<LoggingConfigurationReloadedEventArgs> SignalCounterEvent(ManualResetEvent counterEvent) { return (sender, e) => { counterEvent.Set(); }; } } } /// <summary> /// Xml config with reload without file-reads for performance /// </summary> public class XmlLoggingConfigurationMock : XmlLoggingConfiguration { private string _configXml; private LogFactory _logFactory; private XmlLoggingConfigurationMock(LogFactory logFactory, string configXml) : base(XmlReader.Create(new StringReader(configXml)), null, logFactory) { _logFactory = logFactory; _configXml = configXml; } public override LoggingConfiguration Reload() { return new XmlLoggingConfigurationMock(_logFactory, _configXml); } public static XmlLoggingConfigurationMock CreateFromXml(LogFactory logFactory, string configXml) { return new XmlLoggingConfigurationMock(logFactory, configXml); } } } #endif
using System; using System.Net; using System.Collections; using System.Diagnostics; using ExitGames.Client.Photon; using UnityEngine; using Debug = UnityEngine.Debug; using SupportClassPun = ExitGames.Client.Photon.SupportClass; #if UNITY_EDITOR || (!UNITY_ANDROID && !UNITY_IPHONE && !UNITY_PS3 && !UNITY_WINRT) using System.Net.Sockets; /// <summary>Uses C# Socket class from System.Net.Sockets (as Unity usually does).</summary> /// <remarks>Incompatible with Windows 8 Store/Phone API.</remarks> public class PingMonoEditor : PhotonPing { private Socket sock; /// <summary> /// Sends a "Photon Ping" to a server. /// </summary> /// <param name="ip">Address in IPv4 or IPv6 format. An address containing a '.' will be interpretet as IPv4.</param> /// <returns>True if the Photon Ping could be sent.</returns> public override bool StartPing(string ip) { base.Init(); try { if (ip.Contains(".")) { this.sock = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp); } else { this.sock = new Socket(AddressFamily.InterNetworkV6, SocketType.Dgram, ProtocolType.Udp); } sock.ReceiveTimeout = 5000; sock.Connect(ip, 5055); PingBytes[PingBytes.Length - 1] = PingId; sock.Send(PingBytes); PingBytes[PingBytes.Length - 1] = (byte)(PingId - 1); } catch (Exception e) { sock = null; Console.WriteLine(e); } return false; } public override bool Done() { if (this.GotResult || sock == null) { return true; } if (sock.Available <= 0) { return false; } int read = sock.Receive(PingBytes, SocketFlags.None); //Debug.Log("Got: " + SupportClassPun.ByteArrayToString(PingBytes)); bool replyMatch = PingBytes[PingBytes.Length - 1] == PingId && read == PingLength; if (!replyMatch) Debug.Log("ReplyMatch is false! "); this.Successful = read == PingBytes.Length && PingBytes[PingBytes.Length - 1] == PingId; this.GotResult = true; return true; } public override void Dispose() { try { sock.Close(); } catch { } sock = null; } } #endif #if UNITY_WEBGL public class PingHttp : PhotonPing { private WWW webRequest; public override bool StartPing(string address) { address = "https://" + address + "/photon/m/?ping&r=" + UnityEngine.Random.Range(0, 10000); Debug.Log("StartPing: " + address); this.webRequest = new WWW(address); return true; } public override bool Done() { if (this.webRequest.isDone) { Successful = true; return true; } return false; } public override void Dispose() { this.webRequest.Dispose(); } } #endif public class PhotonPingManager { public bool UseNative; public static int Attempts = 5; public static bool IgnoreInitialAttempt = true; public static int MaxMilliseconsPerPing = 800; // enter a value you're sure some server can beat (have a lower rtt) private const string wssProtocolString = "wss://"; public Region BestRegion { get { Region result = null; int bestRtt = Int32.MaxValue; foreach (Region region in PhotonNetwork.networkingPeer.AvailableRegions) { Debug.Log("BestRegion checks region: " + region); if (region.Ping != 0 && region.Ping < bestRtt) { bestRtt = region.Ping; result = region; } } return (Region)result; } } public bool Done { get { return this.PingsRunning == 0; } } private int PingsRunning; /// <remarks> /// Affected by frame-rate of app, as this Coroutine checks the socket for a result once per frame. /// </remarks> public IEnumerator PingSocket(Region region) { region.Ping = Attempts*MaxMilliseconsPerPing; this.PingsRunning++; // TODO: Add try-catch to make sure the PingsRunning are reduced at the end and that the lib does not crash the app PhotonPing ping; if (PhotonHandler.PingImplementation == typeof(PingNativeDynamic)) { Debug.Log("Using constructor for new PingNativeDynamic()"); // it seems on Android, the Activator can't find the default Constructor ping = new PingNativeDynamic(); } else if(PhotonHandler.PingImplementation == typeof(PingNativeStatic)) { Debug.Log("Using constructor for new PingNativeStatic()"); // it seems on Switch, the Activator can't find the default Constructor ping = new PingNativeStatic(); } else if (PhotonHandler.PingImplementation == typeof(PingMono)) { ping = new PingMono(); // using this type explicitly saves it from IL2CPP bytecode stripping } #if UNITY_WEBGL else if (PhotonHandler.PingImplementation == typeof(PingHttp)) { ping = new PingHttp(); } #endif else { ping = (PhotonPing)Activator.CreateInstance(PhotonHandler.PingImplementation); } //Debug.Log(region); float rttSum = 0.0f; int replyCount = 0; // all addresses for Photon region servers will contain a :port ending. this needs to be removed first. // PhotonPing.StartPing() requires a plain (IP) address without port or protocol-prefix (on all but Windows 8.1 and WebGL platforms). string regionAddress = region.HostAndPort; int indexOfColon = regionAddress.LastIndexOf(':'); if (indexOfColon > 1) { regionAddress = regionAddress.Substring(0, indexOfColon); } // we also need to remove the protocol or Dns.GetHostAddresses(hostName) will throw an exception // This is for xBox One for example. int indexOfProtocol = regionAddress.IndexOf(PhotonPingManager.wssProtocolString); if (indexOfProtocol > -1) { regionAddress = regionAddress.Substring(indexOfProtocol+PhotonPingManager.wssProtocolString.Length); } regionAddress = ResolveHost(regionAddress); for (int i = 0; i < Attempts; i++) { bool overtime = false; Stopwatch sw = new Stopwatch(); sw.Start(); try { ping.StartPing(regionAddress); } catch (Exception e) { Debug.Log("catched: " + e); this.PingsRunning--; break; } while (!ping.Done()) { if (sw.ElapsedMilliseconds >= MaxMilliseconsPerPing) { overtime = true; break; } yield return 0; // keep this loop tight, to avoid adding local lag to rtt. } int rtt = (int)sw.ElapsedMilliseconds; if (IgnoreInitialAttempt && i == 0) { // do nothing. } else if (ping.Successful && !overtime) { rttSum += rtt; replyCount++; region.Ping = (int)((rttSum) / replyCount); //Debug.Log("region " + region.Code + " RTT " + region.Ping + " success: " + ping.Successful + " over: " + overtime); } yield return new WaitForSeconds(0.1f); } ping.Dispose(); this.PingsRunning--; //Debug.Log("this.PingsRunning: " + this.PingsRunning + " this debug: " + ping.DebugString); yield return null; } #if (UNITY_WINRT && !UNITY_EDITOR) || UNITY_WEBGL public static string ResolveHost(string hostName) { #if UNITY_WEBGL if (hostName.StartsWith("wss://")) { hostName = hostName.Substring(6); } if (hostName.StartsWith("ws://")) { hostName = hostName.Substring(5); } #endif return hostName; } #else /// <summary> /// Attempts to resolve a hostname into an IP string or returns empty string if that fails. /// </summary> /// <remarks> /// To be compatible with most platforms, the address family is checked like this:</br> /// if (ipAddress.AddressFamily.ToString().Contains("6")) // ipv6... /// </reamrks> /// <param name="hostName">Hostname to resolve.</param> /// <returns>IP string or empty string if resolution fails</returns> public static string ResolveHost(string hostName) { string ipv4Address = string.Empty; try { IPAddress[] address = Dns.GetHostAddresses(hostName); //foreach (IPAddress adr in address) //{ // Debug.Log(hostName + " -> Adress: " + adr + " family: " + adr.AddressFamily.ToString()); //} if (address.Length == 1) { return address[0].ToString(); } // if we got more addresses, try to pick a IPv4 one for (int index = 0; index < address.Length; index++) { IPAddress ipAddress = address[index]; if (ipAddress != null) { // checking ipAddress.ToString() means we don't have to import System.Net.Sockets, which is not available on some platforms (Metro) if (ipAddress.ToString().Contains(":")) { return ipAddress.ToString(); } if (string.IsNullOrEmpty(ipv4Address)) { ipv4Address = address.ToString(); } } } } catch (System.Exception e) { Debug.Log("Exception caught! " + e.Source + " Message: " + e.Message); } return ipv4Address; } #endif }
#region License // Copyright (c) K2 Workflow (SourceCode Technology Holdings Inc.). All rights reserved. // Licensed under the MIT License. See LICENSE file in the project root for full license information. #endregion using System; using System.Collections.Generic; using System.IO; using Newtonsoft.Json; using Xunit; namespace SourceCode.Clay.Json.Units { [System.Diagnostics.CodeAnalysis.ExcludeFromCodeCoverage] public static class JsonReaderExtensionTests { [Trait("Type", "Unit")] [Theory] [InlineData("{}")] [InlineData(" {\t\n \t} \r\n")] public static void When_read_empty_object(string json) { // Read using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { int actual = 0; jr.ReadObject(n => { actual++; return false; }); Assert.Equal(0, actual); } // Skip using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { int actualCount = jr.SkipObject(); Assert.True(jr.TokenType == JsonToken.EndObject); Assert.Equal(0, actualCount); } } [Trait("Type", "Unit")] [Theory] [InlineData("[]")] [InlineData(" [\t\n \t] \r\n")] public static void When_read_empty_array(string json) { // Read using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { IReadOnlyList<int> actual = jr.ReadArray(() => (int)jr.Value); Assert.Empty(actual); } // Enumerate using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { IEnumerable<int> actual = jr.EnumerateArray(() => (int)jr.Value); Assert.Empty(actual); } // Process using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { int actualCount = 0; jr.ReadArray(() => actualCount++); Assert.Equal(0, actualCount); } // Skip using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { int actualCount = jr.SkipCountArray(); Assert.True(jr.TokenType == JsonToken.EndArray); Assert.Equal(0, actualCount); } } [Trait("Type", "Unit")] [Theory] [InlineData("{ \"a\": null }")] [InlineData("{ \t\n \"a\": \t\n null \t\n\t } \r\n")] public static void When_read_null_object(string json) { // Read using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { string actual = null; jr.ReadObject(n => { switch (n) { case "a": actual = (string)jr.Value; return true; } return false; }); Assert.Null(actual); } // Skip using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { int actualCount = jr.SkipObject(); Assert.True(jr.TokenType == JsonToken.EndObject); Assert.Equal(1, actualCount); } } [Trait("Type", "Unit")] [Theory] [InlineData("[ null ]")] [InlineData("[ \t\n null \t\n\t ] \r\n")] public static void When_read_null_array(string json) { // Read using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { IReadOnlyList<string> actual = jr.ReadArray(() => (string)jr.Value); Assert.Equal(new string[] { null }, actual); } // Enumerate using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { IEnumerable<string> actual = jr.EnumerateArray(() => (string)jr.Value); Assert.Equal(new string[] { null }, actual); } // Process using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { var actual = new List<string>(); jr.ReadArray(() => actual.Add((string)jr.Value)); Assert.Equal(new string[] { null }, actual); } // Skip using (var tr = new StringReader(json)) using (var jr = new JsonTextReader(tr)) { int actualCount = jr.SkipCountArray(); Assert.True(jr.TokenType == JsonToken.EndArray); Assert.Equal(1, actualCount); } } private static readonly Guid s_guid = new Guid("82a7f48d-3b50-4b1e-b82e-3ada8210c358"); private static readonly Uri s_uri = new Uri("http://www.microsoft.com", UriKind.Absolute); private static readonly string s_jsonObject = @" { ""name"": ""joe"", ""last"": null, ""middle"": """", // comment ""alive"": true, ""age"": 99, ""type1"": ""tINyINt"", ""type2"": """", ""type3"": null, ""guid"": """ + s_guid.ToString() + @""", ""uri"": """ + s_uri.ToString() + @""", ""object"": { ""foo"": 123 }, ""array"": [ 123, ""abc"", null, { ""foo"": 123, ""bar"": [ false, ""a"", 123, null ] } ], ""byte"": " + byte.MaxValue.ToString() + @", ""sbyte"": " + sbyte.MinValue.ToString() + @", ""ushort"": " + ushort.MaxValue.ToString() + @", ""short"": " + short.MinValue.ToString() + @", ""uint"": " + uint.MaxValue.ToString() + @", ""int"": " + int.MinValue.ToString() + @", ""long"": " + long.MinValue.ToString() + @", }"; private const string JsonArray = @" [ ""joe"", null, """", // comment true, 99 ]"; [Trait("Type", "Unit")] [Fact] public static void When_read_simple_object() { // Read using (var tr = new StringReader(s_jsonObject)) using (var jr = new JsonTextReader(tr)) { string name = null; string last = "smith"; string middle = "a"; bool alive = false; int age = -1; var type = new System.Data.SqlDbType?[3]; Guid? guid = null; Uri uri = null; byte u8 = 0; sbyte s8 = 0; ushort u16 = 0; short s16 = 0; uint u32 = 0; int s32 = 0; long s64 = 0; jr.ReadObject(n => { switch (n) { case "name": name = (string)jr.Value; return true; case "last": last = (string)jr.Value; return true; case "middle": middle = (string)jr.Value; return true; case "alive": alive = jr.AsBool(); alive = jr.AsBoolNullable().Value; return true; case "age": age = jr.AsInt32(); return true; case "type1": type[0] = jr.AsEnum<System.Data.SqlDbType>(true); return true; case "type2": type[1] = jr.AsEnumNullable<System.Data.SqlDbType>(true); return true; case "type3": type[2] = jr.AsEnumNullable<System.Data.SqlDbType>(true); return true; case "object": jr.SkipObject(); return true; case "array": jr.SkipCountArray(); return true; case "guid": guid = jr.AsGuid(); guid = jr.AsGuidNullable(); return true; case "uri": uri = jr.AsUri(UriKind.Absolute); uri = jr.AsUriNullable(UriKind.Absolute); return true; case "byte": u8 = jr.AsByte(); u8 = jr.AsByteNullable().Value; return true; case "sbyte": s8 = jr.AsSByte(); s8 = jr.AsSByteNullable().Value; return true; case "ushort": u16 = jr.AsUInt16(); u16 = jr.AsUInt16Nullable().Value; return true; case "short": s16 = jr.AsInt16(); s16 = jr.AsInt16Nullable().Value; return true; case "uint": u32 = jr.AsUInt32(); u32 = jr.AsUInt32Nullable().Value; return true; case "int": s32 = jr.AsInt32(); s32 = jr.AsInt32Nullable().Value; return true; case "long": s64 = jr.AsInt64(); s64 = jr.AsInt64Nullable().Value; return true; } return false; }); string text = $"{name} {age}"; Assert.Equal("joe 99", text); Assert.Equal("joe", name); Assert.Null(last); Assert.Equal(string.Empty, middle); Assert.True(alive); Assert.Equal(99, age); Assert.Equal(System.Data.SqlDbType.TinyInt, type[0]); Assert.Null(type[1]); Assert.Null(type[2]); Assert.Equal(s_guid, guid.Value); Assert.Equal(s_uri.ToString(), uri?.ToString(), StringComparer.Ordinal); Assert.Equal(byte.MaxValue, u8); Assert.Equal(sbyte.MinValue, s8); Assert.Equal(ushort.MaxValue, u16); Assert.Equal(short.MinValue, s16); Assert.Equal(uint.MaxValue, u32); Assert.Equal(int.MinValue, s32); Assert.Equal(long.MinValue, s64); } // Skip using (var tr = new StringReader(s_jsonObject)) using (var jr = new JsonTextReader(tr)) { int actualCount = jr.SkipObject(); Assert.True(jr.TokenType == JsonToken.EndObject); Assert.Equal(19, actualCount); } } [Trait("Type", "Unit")] [Fact] public static void When_read_simple_array() { // Read using (var tr = new StringReader(JsonArray)) using (var jr = new JsonTextReader(tr)) { IReadOnlyList<object> actual = jr.ReadArray(() => jr.Value); Assert.Collection(actual, n => Assert.Equal("joe", n), n => Assert.Null(n), n => Assert.Equal(string.Empty, n), n => Assert.True((bool)n), n => Assert.Equal(99L, n)); } // Enumerate using (var tr = new StringReader(JsonArray)) using (var jr = new JsonTextReader(tr)) { IEnumerable<object> actual = jr.EnumerateArray(() => jr.Value); Assert.Collection(actual, n => Assert.Equal("joe", n), n => Assert.Null(n), n => Assert.Equal(string.Empty, n), n => Assert.True((bool)n), n => Assert.Equal(99L, n)); } // Process using (var tr = new StringReader(JsonArray)) using (var jr = new JsonTextReader(tr)) { var actual = new List<object>(); jr.ReadArray(() => actual.Add(jr.Value)); Assert.Collection(actual, n => Assert.Equal("joe", n), n => Assert.Null(n), n => Assert.Equal(string.Empty, n), n => Assert.True((bool)n), n => Assert.Equal(99L, n)); } // Skip using (var tr = new StringReader(JsonArray)) using (var jr = new JsonTextReader(tr)) { int actualCount = jr.SkipCountArray(); Assert.True(jr.TokenType == JsonToken.EndArray); Assert.Equal(5, actualCount); } } [Trait("Type", "Unit")] [Fact] public static void When_read_simple_object_negative() { // Read using (var tr = new StringReader(s_jsonObject)) using (var jr = new JsonTextReader(tr)) { string name = null; string last = "smith"; string middle = "a"; bool alive = false; Assert.Throws<JsonReaderException> ( () => jr.ReadObject(n => { switch (n) { case "name": name = (string)jr.Value; return true; case "last": last = (string)jr.Value; return true; case "middle": middle = (string)jr.Value; return true; case "alive": alive = jr.AsBool(); return true; // Neglect to process all other properties } return false; }) ); } } } }
using PlayFab.PfEditor.EditorModels; using System.Collections.Generic; using System.Text; using UnityEditor; using UnityEngine; namespace PlayFab.PfEditor { [InitializeOnLoad] public class PlayFabEditorSettings : UnityEditor.Editor { #region panel variables public enum SubMenuStates { StandardSettings, TitleSettings, ApiSettings, Packages } public enum WebRequestType { UnityWww, // High compatability Unity api calls HttpWebRequest // High performance multi-threaded api calls } private static float LABEL_WIDTH = 160; private static readonly StringBuilder Sb = new StringBuilder(); private static SubMenuComponent _menu = null; private static readonly Dictionary<string, StudioDisplaySet> StudioFoldOutStates = new Dictionary<string, StudioDisplaySet>(); private static Vector2 _titleScrollPos = Vector2.zero; private static Vector2 _packagesScrollPos = Vector2.zero; #endregion #region draw calls private static void DrawApiSubPanel() { using (new UnityVertical(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleGray1"))) { var curDefines = PlayerSettings.GetScriptingDefineSymbolsForGroup(EditorUserBuildSettings.selectedBuildTargetGroup); var changedFlags = false; List<string> allFlags = new List<string>(PlayFabEditorHelper.FLAG_LABELS.Keys); var extraDefines = new List<string>(curDefines.Split(' ', ';')); extraDefines.Sort(); foreach (var eachFlag in extraDefines) if (!allFlags.Contains(eachFlag)) allFlags.Add(eachFlag); foreach (var eachDefine in allFlags) { if (string.IsNullOrEmpty(eachDefine)) continue; string flagLabel; if (!PlayFabEditorHelper.FLAG_LABELS.TryGetValue(eachDefine, out flagLabel)) flagLabel = eachDefine; bool flagInverted; PlayFabEditorHelper.FLAG_INVERSION.TryGetValue(eachDefine, out flagInverted); DisplayDefineToggle(flagLabel + ": ", flagInverted, eachDefine, ref curDefines, ref changedFlags); } if (changedFlags) { PlayerSettings.SetScriptingDefineSymbolsForGroup(EditorUserBuildSettings.selectedBuildTargetGroup, curDefines); Debug.Log("Updating Defines: " + curDefines); AssetDatabase.Refresh(); } } } private static void DisplayDefineToggle(string label, bool invertDisplay, string displayedDefine, ref string curDefines, ref bool changedFlag) { bool flagSet, flagGet = curDefines.Contains(displayedDefine); using (var fwl = new FixedWidthLabel(label)) { GUILayout.Space(LABEL_WIDTH - fwl.fieldWidth); flagSet = EditorGUILayout.Toggle(invertDisplay ? !flagGet : flagGet, PlayFabEditorHelper.uiStyle.GetStyle("Toggle"), GUILayout.MinHeight(25)); if (invertDisplay) flagSet = !flagSet; } changedFlag |= flagSet != flagGet; Sb.Length = 0; if (flagSet && !flagGet) { Sb.Append(curDefines); if (Sb.Length > 0) Sb.Append(";"); Sb.Append(displayedDefine); curDefines = Sb.ToString(); } else if (!flagSet && flagGet) { Sb.Append(curDefines); Sb.Replace(displayedDefine, "").Replace(";;", ";"); if (Sb.Length > 0 && Sb[0] == ';') Sb.Remove(0, 1); if (Sb.Length > 0 && Sb[Sb.Length - 1] == ';') Sb.Remove(Sb.Length - 1, 1); curDefines = Sb.ToString(); } } public static void DrawSettingsPanel() { if (!PlayFabEditorDataService.IsDataLoaded) return; if (_menu != null) { _menu.DrawMenu(); switch ((SubMenuStates)PlayFabEditorDataService.EditorView.currentSubMenu) { case SubMenuStates.StandardSettings: DrawStandardSettingsSubPanel(); break; case SubMenuStates.ApiSettings: DrawApiSubPanel(); break; case SubMenuStates.TitleSettings: DrawTitleSettingsSubPanel(); break; case SubMenuStates.Packages: DrawPackagesSubPanel(); break; } } else { RegisterMenu(); } } private static void DrawTitleSettingsSubPanel() { float labelWidth = 100; if (PlayFabEditorDataService.AccountDetails.studios != null && PlayFabEditorDataService.AccountDetails.studios.Count != StudioFoldOutStates.Count + 1) { StudioFoldOutStates.Clear(); foreach (var studio in PlayFabEditorDataService.AccountDetails.studios) { if (string.IsNullOrEmpty(studio.Id)) continue; if (!StudioFoldOutStates.ContainsKey(studio.Id)) StudioFoldOutStates.Add(studio.Id, new StudioDisplaySet { Studio = studio }); foreach (var title in studio.Titles) if (!StudioFoldOutStates[studio.Id].titleFoldOutStates.ContainsKey(title.Id)) StudioFoldOutStates[studio.Id].titleFoldOutStates.Add(title.Id, new TitleDisplaySet { Title = title }); } } _titleScrollPos = GUILayout.BeginScrollView(_titleScrollPos, PlayFabEditorHelper.uiStyle.GetStyle("gpStyleGray1")); using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) { EditorGUILayout.LabelField("STUDIOS:", PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"), GUILayout.Width(labelWidth)); GUILayout.FlexibleSpace(); if (GUILayout.Button("REFRESH", PlayFabEditorHelper.uiStyle.GetStyle("Button"))) PlayFabEditorDataService.RefreshStudiosList(); } foreach (var studio in StudioFoldOutStates) { var style = new GUIStyle(EditorStyles.foldout); if (studio.Value.isCollapsed) style.fontStyle = FontStyle.Normal; studio.Value.isCollapsed = EditorGUI.Foldout(EditorGUILayout.GetControlRect(), studio.Value.isCollapsed, string.Format("{0} ({1})", studio.Value.Studio.Name, studio.Value.Studio.Titles.Length), true, PlayFabEditorHelper.uiStyle.GetStyle("foldOut_std")); if (studio.Value.isCollapsed) continue; EditorGUI.indentLevel = 2; using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) { EditorGUILayout.LabelField("TITLES:", PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"), GUILayout.Width(labelWidth)); } GUILayout.Space(5); // draw title foldouts foreach (var title in studio.Value.titleFoldOutStates) { title.Value.isCollapsed = EditorGUI.Foldout(EditorGUILayout.GetControlRect(), title.Value.isCollapsed, string.Format("{0} [{1}]", title.Value.Title.Name, title.Value.Title.Id), true, PlayFabEditorHelper.uiStyle.GetStyle("foldOut_std")); if (title.Value.isCollapsed) continue; EditorGUI.indentLevel = 3; using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) { EditorGUILayout.LabelField("SECRET KEY:", PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"), GUILayout.Width(labelWidth)); EditorGUILayout.TextField("" + title.Value.Title.SecretKey); } using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) { EditorGUILayout.LabelField("URL:", PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"), GUILayout.Width(labelWidth)); GUILayout.FlexibleSpace(); if (GUILayout.Button("VIEW IN GAME MANAGER", PlayFabEditorHelper.uiStyle.GetStyle("textButton"))) Application.OpenURL(title.Value.Title.GameManagerUrl); GUILayout.FlexibleSpace(); } EditorGUI.indentLevel = 2; } EditorGUI.indentLevel = 0; } GUILayout.EndScrollView(); } private static Studio GetStudioForTitleId(string titleId) { if (PlayFabEditorDataService.AccountDetails.studios == null) return Studio.OVERRIDE; foreach (var eachStudio in PlayFabEditorDataService.AccountDetails.studios) if (eachStudio.Titles != null) foreach (var eachTitle in eachStudio.Titles) if (eachTitle.Id == titleId) return eachStudio; return Studio.OVERRIDE; } private static void DrawStandardSettingsSubPanel() { float labelWidth = 160; using (new UnityVertical(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleGray1"), GUILayout.ExpandWidth(true))) { var studio = GetStudioForTitleId(PlayFabEditorDataService.SharedSettings.TitleId); if (string.IsNullOrEmpty(studio.Id)) using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) GUILayout.Label("You are using a TitleId to which you are not a memeber. A title administrator can approve access for your account.", PlayFabEditorHelper.uiStyle.GetStyle("orTxt")); PlayFabGuiFieldHelper.SuperFancyDropdown(labelWidth, "STUDIO: ", studio, PlayFabEditorDataService.AccountDetails.studios, eachStudio => eachStudio.Name, OnStudioChange, PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear")); studio = GetStudioForTitleId(PlayFabEditorDataService.SharedSettings.TitleId); // This might have changed above, so refresh it if (string.IsNullOrEmpty(studio.Id)) { // Override studio lets you set your own titleId using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) { EditorGUILayout.LabelField("TITLE ID: ", PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"), GUILayout.Width(labelWidth)); var newTitleId = EditorGUILayout.TextField(PlayFabEditorDataService.SharedSettings.TitleId, PlayFabEditorHelper.uiStyle.GetStyle("TextField"), GUILayout.MinHeight(25)); if (newTitleId != PlayFabEditorDataService.SharedSettings.TitleId) OnTitleIdChange(newTitleId); } } else { PlayFabGuiFieldHelper.SuperFancyDropdown(labelWidth, "TITLE ID: ", studio.GetTitle(PlayFabEditorDataService.SharedSettings.TitleId), studio.Titles, GetTitleDisplayString, OnTitleChange, PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear")); } DrawPfSharedSettingsOptions(labelWidth); } } private static string GetTitleDisplayString(Title title) { return string.Format("[{0}] {1}", title.Id, title.Name); } private static void DrawPfSharedSettingsOptions(float labelWidth) { #if ENABLE_PLAYFABADMIN_API || ENABLE_PLAYFABSERVER_API // Set the title secret key, if we're using the dropdown var studio = GetStudioForTitleId(PlayFabEditorDataService.SharedSettings.TitleId); var correctKey = studio.GetTitleSecretKey(PlayFabEditorDataService.SharedSettings.TitleId); var setKey = !string.IsNullOrEmpty(studio.Id) && !string.IsNullOrEmpty(correctKey); if (setKey) PlayFabEditorDataService.SharedSettings.DeveloperSecretKey = correctKey; using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) { EditorGUILayout.LabelField("DEVELOPER SECRET KEY: ", PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"), GUILayout.Width(labelWidth)); using (new UnityGuiToggler(!setKey)) PlayFabEditorDataService.SharedSettings.DeveloperSecretKey = EditorGUILayout.TextField(PlayFabEditorDataService.SharedSettings.DeveloperSecretKey, PlayFabEditorHelper.uiStyle.GetStyle("TextField"), GUILayout.MinHeight(25)); } #endif using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) { EditorGUILayout.LabelField("REQUEST TYPE: ", PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"), GUILayout.MaxWidth(labelWidth)); PlayFabEditorDataService.SharedSettings.WebRequestType = (WebRequestType)EditorGUILayout.EnumPopup(PlayFabEditorDataService.SharedSettings.WebRequestType, PlayFabEditorHelper.uiStyle.GetStyle("TextField"), GUILayout.Height(25)); } if (PlayFabEditorDataService.SharedSettings.WebRequestType == WebRequestType.HttpWebRequest) { using (var fwl = new FixedWidthLabel(new GUIContent("REQUEST TIMEOUT: "), PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"))) { GUILayout.Space(labelWidth - fwl.fieldWidth); PlayFabEditorDataService.SharedSettings.TimeOut = EditorGUILayout.IntField(PlayFabEditorDataService.SharedSettings.TimeOut, PlayFabEditorHelper.uiStyle.GetStyle("TextField"), GUILayout.MinHeight(25)); } using (var fwl = new FixedWidthLabel(new GUIContent("KEEP ALIVE: "), PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"))) { GUILayout.Space(labelWidth - fwl.fieldWidth); PlayFabEditorDataService.SharedSettings.KeepAlive = EditorGUILayout.Toggle(PlayFabEditorDataService.SharedSettings.KeepAlive, PlayFabEditorHelper.uiStyle.GetStyle("Toggle"), GUILayout.MinHeight(25)); } } using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) { EditorGUILayout.LabelField("COMPRESS API DATA: ", PlayFabEditorHelper.uiStyle.GetStyle("labelStyle"), GUILayout.MaxWidth(labelWidth)); PlayFabEditorDataService.SharedSettings.CompressApiData = EditorGUILayout.Toggle(PlayFabEditorDataService.SharedSettings.CompressApiData, PlayFabEditorHelper.uiStyle.GetStyle("Toggle"), GUILayout.MinHeight(25)); } } private static void DrawPackagesSubPanel() { using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleGray1"))) { GUILayout.Label("Packages are additional PlayFab features that can be installed. Enabling a package will install the AsssetPackage; disabling will remove the package.", PlayFabEditorHelper.uiStyle.GetStyle("genTxt")); } if (PlayFabEditorSDKTools.IsInstalled && PlayFabEditorSDKTools.isSdkSupported) { float labelWidth = 245; _packagesScrollPos = GUILayout.BeginScrollView(_packagesScrollPos, PlayFabEditorHelper.uiStyle.GetStyle("gpStyleGray1")); using (var fwl = new FixedWidthLabel("Push Notification Plugin (Android): ")) { GUILayout.Space(labelWidth - fwl.fieldWidth); PlayFabEditorPackageManager.AndroidPushPlugin = EditorGUILayout.Toggle(PlayFabEditorPackageManager.AndroidPushPlugin, PlayFabEditorHelper.uiStyle.GetStyle("Toggle")); } GUILayout.Space(5); using (new UnityHorizontal(PlayFabEditorHelper.uiStyle.GetStyle("gpStyleClear"))) { if (GUILayout.Button("VIEW GUIDE", PlayFabEditorHelper.uiStyle.GetStyle("Button"))) { Application.OpenURL("https://github.com/PlayFab/UnitySDK/tree/master/PluginsSource/UnityAndroidPluginSource#playfab-push-notification-plugin"); } } GUILayout.EndScrollView(); } } #endregion #region menu and helper methods private static void RegisterMenu() { if (_menu != null) return; _menu = CreateInstance<SubMenuComponent>(); _menu.RegisterMenuItem("PROJECT", OnStandardSetttingsClicked); _menu.RegisterMenuItem("STUDIOS", OnTitleSettingsClicked); _menu.RegisterMenuItem("API", OnApiSettingsClicked); _menu.RegisterMenuItem("PACKAGES", OnPackagesClicked); } private static void OnPackagesClicked() { PlayFabEditor.RaiseStateUpdate(PlayFabEditor.EdExStates.OnSubmenuItemClicked, SubMenuStates.Packages.ToString(), "" + (int)SubMenuStates.Packages); } private static void OnApiSettingsClicked() { PlayFabEditor.RaiseStateUpdate(PlayFabEditor.EdExStates.OnSubmenuItemClicked, SubMenuStates.ApiSettings.ToString(), "" + (int)SubMenuStates.ApiSettings); } private static void OnStandardSetttingsClicked() { PlayFabEditor.RaiseStateUpdate(PlayFabEditor.EdExStates.OnSubmenuItemClicked, SubMenuStates.StandardSettings.ToString(), "" + (int)SubMenuStates.StandardSettings); } private static void OnTitleSettingsClicked() { PlayFabEditor.RaiseStateUpdate(PlayFabEditor.EdExStates.OnSubmenuItemClicked, SubMenuStates.TitleSettings.ToString(), "" + (int)SubMenuStates.TitleSettings); } private static void OnStudioChange(Studio newStudio) { var newTitleId = newStudio.Titles == null ? "" : newStudio.Titles[0].Id; OnTitleIdChange(newTitleId); } private static void OnTitleChange(Title newTitle) { OnTitleIdChange(newTitle.Id); } private static void OnTitleIdChange(string newTitleId) { var studio = GetStudioForTitleId(newTitleId); PlayFabEditorDataService.EnvDetails.selectedStudio = studio.Name; PlayFabEditorDataService.SharedSettings.TitleId = newTitleId; #if ENABLE_PLAYFABADMIN_API || ENABLE_PLAYFABSERVER_API PlayFabEditorDataService.SharedSettings.DeveloperSecretKey = studio.GetTitleSecretKey(newTitleId); #endif PlayFabEditorDataService.EnvDetails.titleData.Clear(); if (PlayFabEditorDataMenu.tdViewer != null) PlayFabEditorDataMenu.tdViewer.items.Clear(); PlayFabEditorDataService.SaveEnvDetails(); PlayFabEditor.RaiseStateUpdate(PlayFabEditor.EdExStates.OnSuccess); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Data.Common; using System.Diagnostics; using System.Text; using System.Threading; using System.Threading.Tasks; namespace System.Data.SqlClient { sealed internal class SqlSequentialTextReader : System.IO.TextReader { private SqlDataReader _reader; // The SqlDataReader that we are reading data from private int _columnIndex; // The index of out column in the table private Encoding _encoding; // Encoding for this character stream private Decoder _decoder; // Decoder based on the encoding (NOTE: Decoders are stateful as they are designed to process streams of data) private byte[] _leftOverBytes; // Bytes leftover from the last Read() operation - this can be null if there were no bytes leftover (Possible optimization: re-use the same array?) private int _peekedChar; // The last character that we peeked at (or -1 if we haven't peeked at anything) private Task _currentTask; // The current async task private CancellationTokenSource _disposalTokenSource; // Used to indicate that a cancellation is requested due to disposal internal SqlSequentialTextReader(SqlDataReader reader, int columnIndex, Encoding encoding) { Debug.Assert(reader != null, "Null reader when creating sequential textreader"); Debug.Assert(columnIndex >= 0, "Invalid column index when creating sequential textreader"); Debug.Assert(encoding != null, "Null encoding when creating sequential textreader"); _reader = reader; _columnIndex = columnIndex; _encoding = encoding; _decoder = encoding.GetDecoder(); _leftOverBytes = null; _peekedChar = -1; _currentTask = null; _disposalTokenSource = new CancellationTokenSource(); } internal int ColumnIndex { get { return _columnIndex; } } public override int Peek() { if (_currentTask != null) { throw ADP.AsyncOperationPending(); } if (IsClosed) { throw ADP.ObjectDisposed(this); } if (!HasPeekedChar) { _peekedChar = Read(); } Debug.Assert(_peekedChar == -1 || ((_peekedChar >= char.MinValue) && (_peekedChar <= char.MaxValue)), string.Format("Bad peeked character: {0}", _peekedChar)); return _peekedChar; } public override int Read() { if (_currentTask != null) { throw ADP.AsyncOperationPending(); } if (IsClosed) { throw ADP.ObjectDisposed(this); } int readChar = -1; // If there is already a peeked char, then return it if (HasPeekedChar) { readChar = _peekedChar; _peekedChar = -1; } // If there is data available try to read a char else { char[] tempBuffer = new char[1]; int charsRead = InternalRead(tempBuffer, 0, 1); if (charsRead == 1) { readChar = tempBuffer[0]; } } Debug.Assert(readChar == -1 || ((readChar >= char.MinValue) && (readChar <= char.MaxValue)), string.Format("Bad read character: {0}", readChar)); return readChar; } public override int Read(char[] buffer, int index, int count) { ValidateReadParameters(buffer, index, count); if (IsClosed) { throw ADP.ObjectDisposed(this); } if (_currentTask != null) { throw ADP.AsyncOperationPending(); } int charsRead = 0; int charsNeeded = count; // Load in peeked char if ((charsNeeded > 0) && (HasPeekedChar)) { Debug.Assert((_peekedChar >= char.MinValue) && (_peekedChar <= char.MaxValue), string.Format("Bad peeked character: {0}", _peekedChar)); buffer[index + charsRead] = (char)_peekedChar; charsRead++; charsNeeded--; _peekedChar = -1; } // If we need more data and there is data avaiable, read charsRead += InternalRead(buffer, index + charsRead, charsNeeded); return charsRead; } public override Task<int> ReadAsync(char[] buffer, int index, int count) { ValidateReadParameters(buffer, index, count); TaskCompletionSource<int> completion = new TaskCompletionSource<int>(); if (IsClosed) { completion.SetException(ADP.ExceptionWithStackTrace(ADP.ObjectDisposed(this))); } else { try { Task original = Interlocked.CompareExchange<Task>(ref _currentTask, completion.Task, null); if (original != null) { completion.SetException(ADP.ExceptionWithStackTrace(ADP.AsyncOperationPending())); } else { bool completedSynchronously = true; int charsRead = 0; int adjustedIndex = index; int charsNeeded = count; // Load in peeked char if ((HasPeekedChar) && (charsNeeded > 0)) { // Take a copy of _peekedChar in case it is cleared during close int peekedChar = _peekedChar; if (peekedChar >= char.MinValue) { Debug.Assert((_peekedChar >= char.MinValue) && (_peekedChar <= char.MaxValue), string.Format("Bad peeked character: {0}", _peekedChar)); buffer[adjustedIndex] = (char)peekedChar; adjustedIndex++; charsRead++; charsNeeded--; _peekedChar = -1; } } int byteBufferUsed; byte[] byteBuffer = PrepareByteBuffer(charsNeeded, out byteBufferUsed); // Permit a 0 byte read in order to advance the reader to the correct column if ((byteBufferUsed < byteBuffer.Length) || (byteBuffer.Length == 0)) { int bytesRead; var reader = _reader; if (reader != null) { Task<int> getBytesTask = reader.GetBytesAsync(_columnIndex, byteBuffer, byteBufferUsed, byteBuffer.Length - byteBufferUsed, Timeout.Infinite, _disposalTokenSource.Token, out bytesRead); if (getBytesTask == null) { byteBufferUsed += bytesRead; } else { // We need more data - setup the callback, and mark this as not completed sync completedSynchronously = false; getBytesTask.ContinueWith((t) => { _currentTask = null; // If we completed but the textreader is closed, then report cancellation if ((t.Status == TaskStatus.RanToCompletion) && (!IsClosed)) { try { int bytesReadFromStream = t.Result; byteBufferUsed += bytesReadFromStream; if (byteBufferUsed > 0) { charsRead += DecodeBytesToChars(byteBuffer, byteBufferUsed, buffer, adjustedIndex, charsNeeded); } completion.SetResult(charsRead); } catch (Exception ex) { completion.SetException(ex); } } else if (IsClosed) { completion.SetException(ADP.ExceptionWithStackTrace(ADP.ObjectDisposed(this))); } else if (t.Status == TaskStatus.Faulted) { if (t.Exception.InnerException is SqlException) { // ReadAsync can't throw a SqlException, so wrap it in an IOException completion.SetException(ADP.ExceptionWithStackTrace(ADP.ErrorReadingFromStream(t.Exception.InnerException))); } else { completion.SetException(t.Exception.InnerException); } } else { completion.SetCanceled(); } }, TaskScheduler.Default); } if ((completedSynchronously) && (byteBufferUsed > 0)) { // No more data needed, decode what we have charsRead += DecodeBytesToChars(byteBuffer, byteBufferUsed, buffer, adjustedIndex, charsNeeded); } } else { // Reader is null, close must of happened in the middle of this read completion.SetException(ADP.ExceptionWithStackTrace(ADP.ObjectDisposed(this))); } } if (completedSynchronously) { _currentTask = null; if (IsClosed) { completion.SetCanceled(); } else { completion.SetResult(charsRead); } } } } catch (Exception ex) { // In case of any errors, ensure that the completion is completed and the task is set back to null if we switched it completion.TrySetException(ex); Interlocked.CompareExchange(ref _currentTask, null, completion.Task); throw; } } return completion.Task; } protected override void Dispose(bool disposing) { if (disposing) { // Set the textreader as closed SetClosed(); } base.Dispose(disposing); } /// <summary> /// Forces the TextReader to act as if it was closed /// This does not actually close the stream, read off the rest of the data or dispose this /// </summary> internal void SetClosed() { _disposalTokenSource.Cancel(); _reader = null; _peekedChar = -1; // Wait for pending task var currentTask = _currentTask; if (currentTask != null) { ((IAsyncResult)currentTask).AsyncWaitHandle.WaitOne(); } } /// <summary> /// Performs the actual reading and converting /// NOTE: This assumes that buffer, index and count are all valid, we're not closed (!IsClosed) and that there is data left (IsDataLeft()) /// </summary> /// <param name="buffer"></param> /// <param name="index"></param> /// <param name="count"></param> /// <returns></returns> private int InternalRead(char[] buffer, int index, int count) { Debug.Assert(buffer != null, "Null output buffer"); Debug.Assert((index >= 0) && (count >= 0) && (index + count <= buffer.Length), string.Format("Bad count: {0} or index: {1}", count, index)); Debug.Assert(!IsClosed, "Can't read while textreader is closed"); try { int byteBufferUsed; byte[] byteBuffer = PrepareByteBuffer(count, out byteBufferUsed); byteBufferUsed += _reader.GetBytesInternalSequential(_columnIndex, byteBuffer, byteBufferUsed, byteBuffer.Length - byteBufferUsed); if (byteBufferUsed > 0) { return DecodeBytesToChars(byteBuffer, byteBufferUsed, buffer, index, count); } else { // Nothing to read, or nothing read return 0; } } catch (SqlException ex) { // Read can't throw a SqlException - so wrap it in an IOException throw ADP.ErrorReadingFromStream(ex); } } /// <summary> /// Creates a byte array large enough to store all bytes for the characters in the current encoding, then fills it with any leftover bytes /// </summary> /// <param name="numberOfChars">Number of characters that are to be read</param> /// <param name="byteBufferUsed">Number of bytes pre-filled by the leftover bytes</param> /// <returns>A byte array of the correct size, pre-filled with leftover bytes</returns> private byte[] PrepareByteBuffer(int numberOfChars, out int byteBufferUsed) { Debug.Assert(numberOfChars >= 0, "Can't prepare a byte buffer for negative characters"); byte[] byteBuffer; if (numberOfChars == 0) { byteBuffer = new byte[0]; byteBufferUsed = 0; } else { int byteBufferSize = _encoding.GetMaxByteCount(numberOfChars); if (_leftOverBytes != null) { // If we have more leftover bytes than we need for this conversion, then just re-use the leftover buffer if (_leftOverBytes.Length > byteBufferSize) { byteBuffer = _leftOverBytes; byteBufferUsed = byteBuffer.Length; } else { // Otherwise, copy over the leftover buffer byteBuffer = new byte[byteBufferSize]; Buffer.BlockCopy(_leftOverBytes, 0, byteBuffer, 0, _leftOverBytes.Length); byteBufferUsed = _leftOverBytes.Length; } } else { byteBuffer = new byte[byteBufferSize]; byteBufferUsed = 0; } } return byteBuffer; } /// <summary> /// Decodes the given bytes into characters, and stores the leftover bytes for later use /// </summary> /// <param name="inBuffer">Buffer of bytes to decode</param> /// <param name="inBufferCount">Number of bytes to decode from the inBuffer</param> /// <param name="outBuffer">Buffer to write the characters to</param> /// <param name="outBufferOffset">Offset to start writing to outBuffer at</param> /// <param name="outBufferCount">Maximum number of characters to decode</param> /// <returns>The actual number of characters decoded</returns> private int DecodeBytesToChars(byte[] inBuffer, int inBufferCount, char[] outBuffer, int outBufferOffset, int outBufferCount) { Debug.Assert(inBuffer != null, "Null input buffer"); Debug.Assert((inBufferCount > 0) && (inBufferCount <= inBuffer.Length), string.Format("Bad inBufferCount: {0}", inBufferCount)); Debug.Assert(outBuffer != null, "Null output buffer"); Debug.Assert((outBufferOffset >= 0) && (outBufferCount > 0) && (outBufferOffset + outBufferCount <= outBuffer.Length), string.Format("Bad outBufferCount: {0} or outBufferOffset: {1}", outBufferCount, outBufferOffset)); int charsRead; int bytesUsed; bool completed; _decoder.Convert(inBuffer, 0, inBufferCount, outBuffer, outBufferOffset, outBufferCount, false, out bytesUsed, out charsRead, out completed); // completed may be false and there is no spare bytes if the Decoder has stored bytes to use later if ((!completed) && (bytesUsed < inBufferCount)) { _leftOverBytes = new byte[inBufferCount - bytesUsed]; Buffer.BlockCopy(inBuffer, bytesUsed, _leftOverBytes, 0, _leftOverBytes.Length); } else { // If Convert() sets completed to true, then it must have used all of the bytes we gave it Debug.Assert(bytesUsed >= inBufferCount, "Converted completed, but not all bytes were used"); _leftOverBytes = null; } Debug.Assert(((_reader == null) || (_reader.ColumnDataBytesRemaining() > 0) || (!completed) || (_leftOverBytes == null)), "Stream has run out of data and the decoder finished, but there are leftover bytes"); Debug.Assert(charsRead > 0, "Converted no chars. Bad encoding?"); return charsRead; } /// <summary> /// True if this TextReader is supposed to be closed /// </summary> private bool IsClosed { get { return (_reader == null); } } /// <summary> /// True if there is a peeked character available /// </summary> private bool HasPeekedChar { get { return (_peekedChar >= char.MinValue); } } /// <summary> /// Checks the the parameters passed into a Read() method are valid /// </summary> /// <param name="buffer"></param> /// <param name="index"></param> /// <param name="count"></param> internal static void ValidateReadParameters(char[] buffer, int index, int count) { if (buffer == null) { throw ADP.ArgumentNull(ADP.ParameterBuffer); } if (index < 0) { throw ADP.ArgumentOutOfRange(ADP.ParameterIndex); } if (count < 0) { throw ADP.ArgumentOutOfRange(ADP.ParameterCount); } try { if (checked(index + count) > buffer.Length) { throw ExceptionBuilder.InvalidOffsetLength(); } } catch (OverflowException) { // If we've overflowed when adding index and count, then they never would have fit into buffer anyway throw ExceptionBuilder.InvalidOffsetLength(); } } } sealed internal class SqlUnicodeEncoding : UnicodeEncoding { private static SqlUnicodeEncoding s_singletonEncoding = new SqlUnicodeEncoding(); private SqlUnicodeEncoding() : base(bigEndian: false, byteOrderMark: false, throwOnInvalidBytes: false) { } public override Decoder GetDecoder() { return new SqlUnicodeDecoder(); } public override int GetMaxByteCount(int charCount) { // SQL Server never sends a BOM, so we can assume that its 2 bytes per char return charCount * 2; } public static Encoding SqlUnicodeEncodingInstance { get { return s_singletonEncoding; } } sealed private class SqlUnicodeDecoder : Decoder { public override int GetCharCount(byte[] bytes, int index, int count) { // SQL Server never sends a BOM, so we can assume that its 2 bytes per char return count / 2; } public override int GetChars(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex) { // This method is required - simply call Convert() int bytesUsed; int charsUsed; bool completed; Convert(bytes, byteIndex, byteCount, chars, charIndex, chars.Length - charIndex, true, out bytesUsed, out charsUsed, out completed); return charsUsed; } public override void Convert(byte[] bytes, int byteIndex, int byteCount, char[] chars, int charIndex, int charCount, bool flush, out int bytesUsed, out int charsUsed, out bool completed) { // Assume 2 bytes per char and no BOM charsUsed = Math.Min(charCount, byteCount / 2); bytesUsed = charsUsed * 2; completed = (bytesUsed == byteCount); // BlockCopy uses offsets\length measured in bytes, not the actual array index Buffer.BlockCopy(bytes, byteIndex, chars, charIndex * 2, bytesUsed); } } } }
/* * REST API Documentation for the MOTI Hired Equipment Tracking System (HETS) Application * * The Hired Equipment Program is for owners/operators who have a dump truck, bulldozer, backhoe or other piece of equipment they want to hire out to the transportation ministry for day labour and emergency projects. The Hired Equipment Program distributes available work to local equipment owners. The program is based on seniority and is designed to deliver work to registered users fairly and efficiently through the development of local area call-out lists. * * OpenAPI spec version: v1 * * */ using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net.Http; using System.Threading.Tasks; using Xunit; using HETSAPI; using HETSAPI.Models; using System.Reflection; namespace HETSAPI.Test { /// <summary> /// Class for testing the model DumpTruck /// </summary> public class DumpTruckModelTests { // TODO uncomment below to declare an instance variable for DumpTruck private DumpTruck instance; /// <summary> /// Setup the test. /// </summary> public DumpTruckModelTests() { instance = new DumpTruck(); } /// <summary> /// Test an instance of DumpTruck /// </summary> [Fact] public void DumpTruckInstanceTest() { Assert.IsType<DumpTruck>(instance); } /// <summary> /// Test the property 'Id' /// </summary> [Fact] public void IdTest() { // TODO unit test for the property 'Id' Assert.True(true); } /// <summary> /// Test the property 'IsSingleAxle' /// </summary> [Fact] public void IsSingleAxleTest() { // TODO unit test for the property 'IsSingleAxle' Assert.True(true); } /// <summary> /// Test the property 'IsTandemAxle' /// </summary> [Fact] public void IsTandemAxleTest() { // TODO unit test for the property 'IsTandemAxle' Assert.True(true); } /// <summary> /// Test the property 'IsTridem' /// </summary> [Fact] public void IsTridemTest() { // TODO unit test for the property 'IsTridem' Assert.True(true); } /// <summary> /// Test the property 'HasPUP' /// </summary> [Fact] public void HasPUPTest() { // TODO unit test for the property 'HasPUP' Assert.True(true); } /// <summary> /// Test the property 'HasBellyDump' /// </summary> [Fact] public void HasBellyDumpTest() { // TODO unit test for the property 'HasBellyDump' Assert.True(true); } /// <summary> /// Test the property 'HasRockBox' /// </summary> [Fact] public void HasRockBoxTest() { // TODO unit test for the property 'HasRockBox' Assert.True(true); } /// <summary> /// Test the property 'HasHiliftGate' /// </summary> [Fact] public void HasHiliftGateTest() { // TODO unit test for the property 'HasHiliftGate' Assert.True(true); } /// <summary> /// Test the property 'IsWaterTruck' /// </summary> [Fact] public void IsWaterTruckTest() { // TODO unit test for the property 'IsWaterTruck' Assert.True(true); } /// <summary> /// Test the property 'HasSealcoatHitch' /// </summary> [Fact] public void HasSealcoatHitchTest() { // TODO unit test for the property 'HasSealcoatHitch' Assert.True(true); } /// <summary> /// Test the property 'RearAxleSpacing' /// </summary> [Fact] public void RearAxleSpacingTest() { // TODO unit test for the property 'RearAxleSpacing' Assert.True(true); } /// <summary> /// Test the property 'FrontTireSize' /// </summary> [Fact] public void FrontTireSizeTest() { // TODO unit test for the property 'FrontTireSize' Assert.True(true); } /// <summary> /// Test the property 'FrontTireUOM' /// </summary> [Fact] public void FrontTireUOMTest() { // TODO unit test for the property 'FrontTireUOM' Assert.True(true); } /// <summary> /// Test the property 'FrontAxleCapacity' /// </summary> [Fact] public void FrontAxleCapacityTest() { // TODO unit test for the property 'FrontAxleCapacity' Assert.True(true); } /// <summary> /// Test the property 'RearAxleCapacity' /// </summary> [Fact] public void RearAxleCapacityTest() { // TODO unit test for the property 'RearAxleCapacity' Assert.True(true); } /// <summary> /// Test the property 'LegalLoad' /// </summary> [Fact] public void LegalLoadTest() { // TODO unit test for the property 'LegalLoad' Assert.True(true); } /// <summary> /// Test the property 'LegalCapacity' /// </summary> [Fact] public void LegalCapacityTest() { // TODO unit test for the property 'LegalCapacity' Assert.True(true); } /// <summary> /// Test the property 'LegalPUPTareWeight' /// </summary> [Fact] public void LegalPUPTareWeightTest() { // TODO unit test for the property 'LegalPUPTareWeight' Assert.True(true); } /// <summary> /// Test the property 'LicencedGVW' /// </summary> [Fact] public void LicencedGVWTest() { // TODO unit test for the property 'LicencedGVW' Assert.True(true); } /// <summary> /// Test the property 'LicencedGVWUOM' /// </summary> [Fact] public void LicencedGVWUOMTest() { // TODO unit test for the property 'LicencedGVWUOM' Assert.True(true); } /// <summary> /// Test the property 'LicencedTareWeight' /// </summary> [Fact] public void LicencedTareWeightTest() { // TODO unit test for the property 'LicencedTareWeight' Assert.True(true); } /// <summary> /// Test the property 'LicencedPUPTareWeight' /// </summary> [Fact] public void LicencedPUPTareWeightTest() { // TODO unit test for the property 'LicencedPUPTareWeight' Assert.True(true); } /// <summary> /// Test the property 'LicencedLoad' /// </summary> [Fact] public void LicencedLoadTest() { // TODO unit test for the property 'LicencedLoad' Assert.True(true); } /// <summary> /// Test the property 'LicencedCapacity' /// </summary> [Fact] public void LicencedCapacityTest() { // TODO unit test for the property 'LicencedCapacity' Assert.True(true); } /// <summary> /// Test the property 'BoxLength' /// </summary> [Fact] public void BoxLengthTest() { // TODO unit test for the property 'BoxLength' Assert.True(true); } /// <summary> /// Test the property 'BoxWidth' /// </summary> [Fact] public void BoxWidthTest() { // TODO unit test for the property 'BoxWidth' Assert.True(true); } /// <summary> /// Test the property 'BoxHeight' /// </summary> [Fact] public void BoxHeightTest() { // TODO unit test for the property 'BoxHeight' Assert.True(true); } /// <summary> /// Test the property 'BoxCapacity' /// </summary> [Fact] public void BoxCapacityTest() { // TODO unit test for the property 'BoxCapacity' Assert.True(true); } /// <summary> /// Test the property 'TrailerBoxLength' /// </summary> [Fact] public void TrailerBoxLengthTest() { // TODO unit test for the property 'TrailerBoxLength' Assert.True(true); } /// <summary> /// Test the property 'TrailerBoxWidth' /// </summary> [Fact] public void TrailerBoxWidthTest() { // TODO unit test for the property 'TrailerBoxWidth' Assert.True(true); } /// <summary> /// Test the property 'TrailerBoxHeight' /// </summary> [Fact] public void TrailerBoxHeightTest() { // TODO unit test for the property 'TrailerBoxHeight' Assert.True(true); } /// <summary> /// Test the property 'TrailerBoxCapacity' /// </summary> [Fact] public void TrailerBoxCapacityTest() { // TODO unit test for the property 'TrailerBoxCapacity' Assert.True(true); } } }
using Chemistry; using Proteomics.Fragmentation; using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Text.RegularExpressions; using EngineLayer.GlycoSearch; using System.IO; namespace EngineLayer { public class PsmFromTsv { private static readonly Regex PositionParser = new Regex(@"(\d+)\s+to\s+(\d+)"); private static readonly Regex VariantParser = new Regex(@"[a-zA-Z]+(\d+)([a-zA-Z]+)"); private static readonly Regex IonParser = new Regex(@"([a-zA-Z]+)(\d+)"); public string FullSequence { get; } public int Ms2ScanNumber { get; } public string FileNameWithoutExtension { get; } public int PrecursorScanNum { get; } public int PrecursorCharge { get; } public double PrecursorMz { get; } public double PrecursorMass { get; } public double Score { get; } public string ProteinAccession { get; } public List<MatchedFragmentIon> MatchedIons { get; } public Dictionary<int, List<MatchedFragmentIon>> ChildScanMatchedIons { get; } // this is only used in crosslink for now, but in the future will be used for other experiment types public double QValue { get; } public double PEP { get; } public double PEP_QValue { get; } public double? TotalIonCurrent { get; } public double? DeltaScore { get; } public string Notch { get; } public string BaseSeq { get; } public string EssentialSeq { get; } public string AmbiguityLevel { get; } public string MissedCleavage { get; } public string PeptideMonoMass { get; } public string MassDiffDa { get; } public string MassDiffPpm { get; } public string ProteinName { get; } public string GeneName { get; } public string OrganismName { get; } public string IntersectingSequenceVariations { get; } public string IdentifiedSequenceVariations { get; } public string SpliceSites { get; } public string PeptideDescription { get; } public string StartAndEndResiduesInProtein { get; } public string PreviousAminoAcid { get; } public string NextAminoAcid { get; } public string DecoyContamTarget { get; } public double? QValueNotch { get; } public List<MatchedFragmentIon> VariantCrossingIons { get; } //For crosslink public string CrossType { get; } public string LinkResidues { get; } public int? ProteinLinkSite { get; } public int? Rank { get; } public string BetaPeptideProteinAccession { get; } public int? BetaPeptideProteinLinkSite { get; } public string BetaPeptideBaseSequence { get; } public string BetaPeptideFullSequence { get; } public string BetaPeptideTheoreticalMass { get; } public double? BetaPeptideScore { get; } public int? BetaPeptideRank { get; } public List<MatchedFragmentIon> BetaPeptideMatchedIons { get; } public Dictionary<int, List<MatchedFragmentIon>> BetaPeptideChildScanMatchedIons { get; } public double? XLTotalScore { get; } public string ParentIons { get; } public double? RetentionTime { get; } //For Glyco public string GlycanStructure { get; set; } public double? GlycanMass { get; set; } public string GlycanComposition { get; set; } public LocalizationLevel? GlycanLocalizationLevel { get; set; } public string LocalizedGlycan { get; set; } public PsmFromTsv(string line, char[] split, Dictionary<string, int> parsedHeader) { var spl = line.Split(split); //Required properties FileNameWithoutExtension = spl[parsedHeader[PsmTsvHeader.FileName]].Trim(); // remove file format, e.g., .raw, .mzML, .mgf // this is more robust but slower than Path.GetFileNameWithoutExtension if (FileNameWithoutExtension.Contains('.')) { foreach (var knownSpectraFileExtension in GlobalVariables.AcceptedSpectraFormats) { FileNameWithoutExtension = Path.GetFileName(FileNameWithoutExtension.Replace(knownSpectraFileExtension, string.Empty, StringComparison.InvariantCultureIgnoreCase)); } } Ms2ScanNumber = int.Parse(spl[parsedHeader[PsmTsvHeader.Ms2ScanNumber]]); // this will probably not be known in an .mgf data file if (int.TryParse(spl[parsedHeader[PsmTsvHeader.PrecursorScanNum]].Trim(), out int result)) { PrecursorScanNum = result; } else { PrecursorScanNum = 0; } PrecursorCharge = (int)double.Parse(spl[parsedHeader[PsmTsvHeader.PrecursorCharge]].Trim(), CultureInfo.InvariantCulture); PrecursorMz = double.Parse(spl[parsedHeader[PsmTsvHeader.PrecursorMz]].Trim(), CultureInfo.InvariantCulture); PrecursorMass = double.Parse(spl[parsedHeader[PsmTsvHeader.PrecursorMass]].Trim(), CultureInfo.InvariantCulture); BaseSeq = RemoveParentheses(spl[parsedHeader[PsmTsvHeader.BaseSequence]].Trim()); FullSequence = spl[parsedHeader[PsmTsvHeader.FullSequence]]; PeptideMonoMass = spl[parsedHeader[PsmTsvHeader.PeptideMonoMass]].Trim(); Score = double.Parse(spl[parsedHeader[PsmTsvHeader.Score]].Trim(), CultureInfo.InvariantCulture); DecoyContamTarget = spl[parsedHeader[PsmTsvHeader.DecoyContaminantTarget]].Trim(); QValue = double.Parse(spl[parsedHeader[PsmTsvHeader.QValue]].Trim(), CultureInfo.InvariantCulture); MatchedIons = (spl[parsedHeader[PsmTsvHeader.MatchedIonMzRatios]].StartsWith("{")) ? ReadChildScanMatchedIons(spl[parsedHeader[PsmTsvHeader.MatchedIonMzRatios]].Trim(), spl[parsedHeader[PsmTsvHeader.MatchedIonIntensities]].Trim(), BaseSeq).First().Value : ReadFragmentIonsFromString(spl[parsedHeader[PsmTsvHeader.MatchedIonMzRatios]].Trim(), spl[parsedHeader[PsmTsvHeader.MatchedIonIntensities]].Trim(), BaseSeq); AmbiguityLevel = (parsedHeader[PsmTsvHeader.AmbiguityLevel] < 0) ? null : spl[parsedHeader[PsmTsvHeader.AmbiguityLevel]].Trim(); //For general psms TotalIonCurrent = (parsedHeader[PsmTsvHeader.TotalIonCurrent] < 0) ? null : (double?)double.Parse(spl[parsedHeader[PsmTsvHeader.TotalIonCurrent]].Trim(), CultureInfo.InvariantCulture); DeltaScore = (parsedHeader[PsmTsvHeader.DeltaScore] < 0) ? null : (double?)double.Parse(spl[parsedHeader[PsmTsvHeader.DeltaScore]].Trim(), CultureInfo.InvariantCulture); Notch = (parsedHeader[PsmTsvHeader.Notch] < 0) ? null : spl[parsedHeader[PsmTsvHeader.Notch]].Trim(); EssentialSeq = (parsedHeader[PsmTsvHeader.EssentialSequence] < 0) ? null : spl[parsedHeader[PsmTsvHeader.EssentialSequence]].Trim(); MissedCleavage = (parsedHeader[PsmTsvHeader.MissedCleavages] < 0) ? null : spl[parsedHeader[PsmTsvHeader.MissedCleavages]].Trim(); MassDiffDa = (parsedHeader[PsmTsvHeader.MassDiffDa] < 0) ? null : spl[parsedHeader[PsmTsvHeader.MassDiffDa]].Trim(); MassDiffPpm = (parsedHeader[PsmTsvHeader.MassDiffPpm] < 0) ? null : spl[parsedHeader[PsmTsvHeader.MassDiffPpm]].Trim(); ProteinAccession = (parsedHeader[PsmTsvHeader.ProteinAccession] < 0) ? null : spl[parsedHeader[PsmTsvHeader.ProteinAccession]].Trim(); ProteinName = (parsedHeader[PsmTsvHeader.ProteinName] < 0) ? null : spl[parsedHeader[PsmTsvHeader.ProteinName]].Trim(); GeneName = (parsedHeader[PsmTsvHeader.GeneName] < 0) ? null : spl[parsedHeader[PsmTsvHeader.GeneName]].Trim(); OrganismName = (parsedHeader[PsmTsvHeader.OrganismName] < 0) ? null : spl[parsedHeader[PsmTsvHeader.OrganismName]].Trim(); IntersectingSequenceVariations = (parsedHeader[PsmTsvHeader.IntersectingSequenceVariations] < 0) ? null : spl[parsedHeader[PsmTsvHeader.IntersectingSequenceVariations]].Trim(); IdentifiedSequenceVariations = (parsedHeader[PsmTsvHeader.IdentifiedSequenceVariations] < 0) ? null : spl[parsedHeader[PsmTsvHeader.IdentifiedSequenceVariations]].Trim(); SpliceSites = (parsedHeader[PsmTsvHeader.SpliceSites] < 0) ? null : spl[parsedHeader[PsmTsvHeader.SpliceSites]].Trim(); PeptideDescription = (parsedHeader[PsmTsvHeader.PeptideDesicription] < 0) ? null : spl[parsedHeader[PsmTsvHeader.PeptideDesicription]].Trim(); StartAndEndResiduesInProtein = (parsedHeader[PsmTsvHeader.StartAndEndResiduesInProtein] < 0) ? null : spl[parsedHeader[PsmTsvHeader.StartAndEndResiduesInProtein]].Trim(); PreviousAminoAcid = (parsedHeader[PsmTsvHeader.PreviousAminoAcid] < 0) ? null : spl[parsedHeader[PsmTsvHeader.PreviousAminoAcid]].Trim(); NextAminoAcid = (parsedHeader[PsmTsvHeader.NextAminoAcid] < 0) ? null : spl[parsedHeader[PsmTsvHeader.NextAminoAcid]].Trim(); QValueNotch = (parsedHeader[PsmTsvHeader.QValueNotch] < 0) ? null : (double?)double.Parse(spl[parsedHeader[PsmTsvHeader.QValueNotch]].Trim(), CultureInfo.InvariantCulture); RetentionTime = (parsedHeader[PsmTsvHeader.Ms2ScanRetentionTime] < 0) ? null : (double?)double.Parse(spl[parsedHeader[PsmTsvHeader.Ms2ScanRetentionTime]].Trim(), CultureInfo.InvariantCulture); PEP = double.Parse(spl[parsedHeader[PsmTsvHeader.PEP]].Trim(), CultureInfo.InvariantCulture); PEP_QValue = double.Parse(spl[parsedHeader[PsmTsvHeader.PEP_QValue]].Trim(), CultureInfo.InvariantCulture); VariantCrossingIons = findVariantCrossingIons(); //For crosslinks CrossType = (parsedHeader[PsmTsvHeader.CrossTypeLabel] < 0) ? null : spl[parsedHeader[PsmTsvHeader.CrossTypeLabel]].Trim(); LinkResidues = (parsedHeader[PsmTsvHeader.LinkResiduesLabel] < 0) ? null : spl[parsedHeader[PsmTsvHeader.LinkResiduesLabel]].Trim(); ProteinLinkSite = (parsedHeader[PsmTsvHeader.ProteinLinkSiteLabel] < 0) ? null : (spl[parsedHeader[PsmTsvHeader.ProteinLinkSiteLabel]] == "" ? null : (int?)int.Parse(spl[parsedHeader[PsmTsvHeader.ProteinLinkSiteLabel]].Trim())); Rank = (parsedHeader[PsmTsvHeader.RankLabel] < 0) ? null : (int?)int.Parse(spl[parsedHeader[PsmTsvHeader.RankLabel]].Trim()); BetaPeptideProteinAccession = (parsedHeader[PsmTsvHeader.BetaPeptideProteinAccessionLabel] < 0) ? null : spl[parsedHeader[PsmTsvHeader.BetaPeptideProteinAccessionLabel]].Trim(); BetaPeptideProteinLinkSite = (parsedHeader[PsmTsvHeader.BetaPeptideProteinLinkSiteLabel] < 0) ? null : (spl[parsedHeader[PsmTsvHeader.BetaPeptideProteinLinkSiteLabel]] == "" ? null : (int?)int.Parse(spl[parsedHeader[PsmTsvHeader.BetaPeptideProteinLinkSiteLabel]].Trim())); BetaPeptideBaseSequence = (parsedHeader[PsmTsvHeader.BetaPeptideBaseSequenceLabel] < 0) ? null : spl[parsedHeader[PsmTsvHeader.BetaPeptideBaseSequenceLabel]].Trim(); BetaPeptideFullSequence = (parsedHeader[PsmTsvHeader.BetaPeptideFullSequenceLabel] < 0) ? null : spl[parsedHeader[PsmTsvHeader.BetaPeptideFullSequenceLabel]].Trim(); BetaPeptideTheoreticalMass = (parsedHeader[PsmTsvHeader.BetaPeptideTheoreticalMassLabel] < 0) ? null : spl[parsedHeader[PsmTsvHeader.BetaPeptideTheoreticalMassLabel]].Trim(); BetaPeptideScore = (parsedHeader[PsmTsvHeader.BetaPeptideScoreLabel] < 0) ? null : (double?)double.Parse(spl[parsedHeader[PsmTsvHeader.BetaPeptideScoreLabel]].Trim(), CultureInfo.InvariantCulture); BetaPeptideRank = (parsedHeader[PsmTsvHeader.BetaPeptideRankLabel] < 0) ? null : (int?)int.Parse(spl[parsedHeader[PsmTsvHeader.BetaPeptideRankLabel]].Trim()); BetaPeptideMatchedIons = (parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonsLabel] < 0) ? null : ((spl[parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonsLabel]].StartsWith("{")) ? ReadChildScanMatchedIons(spl[parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonsLabel]].Trim(), spl[parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonIntensitiesLabel]].Trim(), BetaPeptideBaseSequence).First().Value : ReadFragmentIonsFromString(spl[parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonsLabel]].Trim(), spl[parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonIntensitiesLabel]].Trim(), BetaPeptideBaseSequence)); XLTotalScore = (parsedHeader[PsmTsvHeader.XLTotalScoreLabel] < 0) ? null : (double?)double.Parse(spl[parsedHeader[PsmTsvHeader.XLTotalScoreLabel]].Trim(), CultureInfo.InvariantCulture); ParentIons = (parsedHeader[PsmTsvHeader.ParentIonsLabel] < 0) ? null : spl[parsedHeader[PsmTsvHeader.ParentIonsLabel]].Trim(); // child scan matched ions (only for crosslinks for now, but in the future this will change) ChildScanMatchedIons = (!spl[parsedHeader[PsmTsvHeader.MatchedIonMzRatios]].StartsWith("{")) ? null : ReadChildScanMatchedIons(spl[parsedHeader[PsmTsvHeader.MatchedIonMzRatios]].Trim(), spl[parsedHeader[PsmTsvHeader.MatchedIonIntensities]].Trim(), BaseSeq); if (ChildScanMatchedIons != null && ChildScanMatchedIons.ContainsKey(Ms2ScanNumber)) { ChildScanMatchedIons.Remove(Ms2ScanNumber); } // beta peptide child scan matched ions (for crosslinks) BetaPeptideChildScanMatchedIons = (parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonsLabel] < 0) ? null : ((!spl[parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonsLabel]].StartsWith("{")) ? null : ReadChildScanMatchedIons(spl[parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonsLabel]].Trim(), spl[parsedHeader[PsmTsvHeader.BetaPeptideMatchedIonIntensitiesLabel]].Trim(), BetaPeptideBaseSequence)); if (BetaPeptideChildScanMatchedIons != null && BetaPeptideChildScanMatchedIons.ContainsKey(Ms2ScanNumber)) { BetaPeptideChildScanMatchedIons.Remove(Ms2ScanNumber); } //For Glyco GlycanMass = (parsedHeader[PsmTsvHeader_Glyco.GlycanMass] < 0) ? null : (double?)double.Parse(spl[parsedHeader[PsmTsvHeader_Glyco.GlycanMass]], CultureInfo.InvariantCulture); GlycanComposition = (parsedHeader[PsmTsvHeader_Glyco.GlycanComposition] < 0) ? null : spl[parsedHeader[PsmTsvHeader_Glyco.GlycanComposition]]; GlycanStructure = (parsedHeader[PsmTsvHeader_Glyco.GlycanStructure] < 0) ? null : spl[parsedHeader[PsmTsvHeader_Glyco.GlycanStructure]]; var localizationLevel = (parsedHeader[PsmTsvHeader_Glyco.GlycanLocalizationLevel] < 0) ? null : spl[parsedHeader[PsmTsvHeader_Glyco.GlycanLocalizationLevel]]; if (localizationLevel != null) { GlycanLocalizationLevel = (LocalizationLevel)Enum.Parse(typeof(LocalizationLevel), localizationLevel); } LocalizedGlycan = (parsedHeader[PsmTsvHeader_Glyco.LocalizedGlycan] < 0) ? null : spl[parsedHeader[PsmTsvHeader_Glyco.LocalizedGlycan]]; } //Used to remove Silac labels for proper annotation public static string RemoveParentheses(string baseSequence) { if (baseSequence.Contains("(")) { string updatedBaseSequence = ""; bool withinParentheses = false; foreach (char c in baseSequence) { if (c == ')') //leaving the parentheses { withinParentheses = false; } else if (c == '(') //entering the parentheses { withinParentheses = true; } else if (!withinParentheses) //if outside the parentheses, preserve this amino acid { updatedBaseSequence += c; } //else do nothing } return updatedBaseSequence; } return baseSequence; } private static List<MatchedFragmentIon> ReadFragmentIonsFromString(string matchedMzString, string matchedIntensityString, string peptideBaseSequence) { List<MatchedFragmentIon> matchedIons = new List<MatchedFragmentIon>(); if (matchedMzString.Length > 2) //check if there's an ion { List<string> peakMzs = CleanMatchedIonString(matchedMzString); List<string> peakIntensities = CleanMatchedIonString(matchedIntensityString); for (int index = 0; index < peakMzs.Count; index++) { string peak = peakMzs[index]; string[] split = peak.Split(new char[] { '+', ':' }); //TODO: needs update for negative charges that doesn't break internal fragment ions or neutral losses double intensity = peakMzs.Count == peakIntensities.Count ? //TODO: needs update for negative charges that doesn't break internal fragment ions or neutral losses double.Parse(peakIntensities[index].Split(new char[] { '+', ':', ']' })[2], CultureInfo.InvariantCulture) : 1.0; int fragmentNumber = 0; int secondaryFragmentNumber = 0; ProductType productType; ProductType? secondaryProductType = null; FragmentationTerminus terminus = FragmentationTerminus.None; //default for internal fragments int aminoAcidPosition; double neutralLoss = 0; //get theoretical fragment string ionTypeAndNumber = split[0]; //if an internal fragment if (ionTypeAndNumber.Contains("[")) { string[] internalSplit = split[0].Split('['); string[] productSplit = internalSplit[0].Split("I"); string[] positionSplit = internalSplit[1].Replace("]", "").Split('-'); productType = (ProductType)Enum.Parse(typeof(ProductType), productSplit[0]); secondaryProductType = (ProductType)Enum.Parse(typeof(ProductType), productSplit[1]); fragmentNumber = int.Parse(positionSplit[0]); secondaryFragmentNumber = int.Parse(positionSplit[1]); aminoAcidPosition = secondaryFragmentNumber - fragmentNumber; } else //terminal fragment { Match result = IonParser.Match(ionTypeAndNumber); productType = (ProductType)Enum.Parse(typeof(ProductType), result.Groups[1].Value); fragmentNumber = int.Parse(result.Groups[2].Value); // check for neutral loss if (ionTypeAndNumber.Contains("(")) { string temp = ionTypeAndNumber.Replace("(", ""); temp = temp.Replace(")", ""); var split2 = temp.Split('-'); neutralLoss = double.Parse(split2[1], CultureInfo.InvariantCulture); } //get terminus if (TerminusSpecificProductTypes.ProductTypeToFragmentationTerminus.ContainsKey(productType)) { terminus = TerminusSpecificProductTypes.ProductTypeToFragmentationTerminus[productType]; } //get amino acid position aminoAcidPosition = terminus == FragmentationTerminus.C ? peptideBaseSequence.Length - fragmentNumber : fragmentNumber; } //get charge and mz int z = int.Parse(split[1]); double mz = double.Parse(split[2], CultureInfo.InvariantCulture); Product p = new Product(productType, terminus, mz.ToMass(z), fragmentNumber, aminoAcidPosition, neutralLoss, secondaryProductType, secondaryFragmentNumber); matchedIons.Add(new MatchedFragmentIon(ref p, mz, intensity, z)); } } return matchedIons; } private static List<string> CleanMatchedIonString(string input) { List<string> ionProperty = input.Substring(1, input.Length - 2) //remove the brackets on the ends .Replace("];[", ", ") //replace delimiter between ion series with the delimiter used between ions //.Replace("[","") //.Replace("]","") .Split(", ") //split by delimiter between ions .ToList(); ionProperty.RemoveAll(p => p.Contains("\"") || p.Equals("")); return ionProperty; } private static Dictionary<int, List<MatchedFragmentIon>> ReadChildScanMatchedIons(string childScanMatchedMzString, string childScanMatchedIntensitiesString, string peptideBaseSequence) { var childScanMatchedIons = new Dictionary<int, List<MatchedFragmentIon>>(); foreach (var childScan in childScanMatchedMzString.Split(new char[] { '}' }).Where(p => !string.IsNullOrWhiteSpace(p))) { var split1 = childScan.Split(new char[] { '@' }); int scanNumber = int.Parse(split1[0].Trim(new char[] { '{' })); string matchedIonsString = split1[1]; var childMatchedIons = ReadFragmentIonsFromString(matchedIonsString, childScanMatchedIntensitiesString, peptideBaseSequence); childScanMatchedIons.Add(scanNumber, childMatchedIons); } return childScanMatchedIons; } // finds the ions that contain variant residues using the position in IdentifiedSequenceVariations. When the variation spans // multiple residues, if any part is contained in an ion, the ion is marked as variant crossing. private List<MatchedFragmentIon> findVariantCrossingIons() { List<MatchedFragmentIon> variantCrossingIons = new List<MatchedFragmentIon>(); if (StartAndEndResiduesInProtein != null && IdentifiedSequenceVariations != null) { Match positionMatch = PositionParser.Match(StartAndEndResiduesInProtein); Match variantMatch = VariantParser.Match(IdentifiedSequenceVariations); if (positionMatch.Success && variantMatch.Success) { List<ProductType> abcProductTypes = new List<ProductType>() { ProductType.a, ProductType.aDegree, ProductType.aStar, ProductType.b, ProductType.bDegree, ProductType.bStar, ProductType.c }; List<ProductType> xyzProductTypes = new List<ProductType>() { ProductType.x, ProductType.y, ProductType.yDegree, ProductType.yStar, ProductType.zDot, ProductType.zPlusOne}; int peptideStart = int.Parse(positionMatch.Groups[1].Value); int peptideEnd = int.Parse(positionMatch.Groups[2].Value); int variantResidueStart = int.Parse(variantMatch.Groups[1].Value); int variantResidueEnd = variantResidueStart + variantMatch.Groups[2].Value.Length - 1; foreach (MatchedFragmentIon ion in MatchedIons) { Match ionMatch = IonParser.Match(ion.Annotation); if (ionMatch.Success && (variantResidueEnd >= peptideStart && variantResidueStart <= peptideEnd) && // variant is within peptide ((abcProductTypes.Contains(ion.NeutralTheoreticalProduct.ProductType) && // type a, b, or c peptideStart + int.Parse(ionMatch.Groups[2].Value) > variantResidueStart) || // crosses variant (xyzProductTypes.Contains(ion.NeutralTheoreticalProduct.ProductType) && // type x, y, or z peptideEnd - int.Parse(ionMatch.Groups[2].Value) < variantResidueEnd))) // crosses variant { variantCrossingIons.Add(ion); } } } } return variantCrossingIons; } public static List<Tuple<int, string, double>> ReadLocalizedGlycan(string localizedGlycan) { List<Tuple<int, string, double>> tuples = new List<Tuple<int, string, double>>(); if (localizedGlycan == null) { return tuples; } var lgs = localizedGlycan.Split(new string[] { "[", "]" }, StringSplitOptions.RemoveEmptyEntries); foreach (var lg in lgs) { var g = lg.Split(',', StringSplitOptions.RemoveEmptyEntries); Tuple<int, string, double> tuple = new Tuple<int, string, double>(int.Parse(g[0], CultureInfo.InvariantCulture), g[1], double.Parse(g[2], CultureInfo.InvariantCulture)); tuples.Add(tuple); } return tuples; } } }
#region Copyright //======================================================================================= // Microsoft Azure Customer Advisory Team // // This sample is supplemental to the technical guidance published on my personal // blog at http://blogs.msdn.com/b/paolos/. // // Author: Paolo Salvatori //======================================================================================= // Copyright (c) Microsoft Corporation. All rights reserved. // // LICENSED UNDER THE APACHE LICENSE, VERSION 2.0 (THE "LICENSE"); YOU MAY NOT USE THESE // FILES EXCEPT IN COMPLIANCE WITH THE LICENSE. YOU MAY OBTAIN A COPY OF THE LICENSE AT // http://www.apache.org/licenses/LICENSE-2.0 // UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING, SOFTWARE DISTRIBUTED UNDER THE // LICENSE IS DISTRIBUTED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, EITHER EXPRESS OR IMPLIED. SEE THE LICENSE FOR THE SPECIFIC LANGUAGE GOVERNING // PERMISSIONS AND LIMITATIONS UNDER THE LICENSE. //======================================================================================= #endregion #region Using Directives using System; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Drawing.Design; using System.Linq; using System.Reflection; using System.Resources; using ServiceBusExplorer.Helpers; using ServiceBusExplorer.Utilities.Helpers; #endregion namespace ServiceBusExplorer.UIHelpers { public class DynamicCustomTypeDescriptor : CustomTypeDescriptor { #region Private Fields private readonly PropertyDescriptorList propertyDescriptorList = new PropertyDescriptorList(); private readonly object instance; private readonly Hashtable hashRM = new Hashtable(); private CustomSortOrder propertySortOrder = CustomSortOrder.AscendingById; private CustomSortOrder categorySortOrder = CustomSortOrder.AscendingById; private ISite site = null; #endregion #region Public Properties public DynamicCustomTypeDescriptor(ICustomTypeDescriptor ctd, object instance) : base(ctd) { this.instance = instance; GetProperties(); } #endregion #region Public Properties public CustomSortOrder PropertySortOrder { get { return propertySortOrder; } set { propertySortOrder = value; } } public CustomSortOrder CategorySortOrder { get { return categorySortOrder; } set { categorySortOrder = value; } } #endregion #region Public Methods public CustomPropertyDescriptor GetProperty(string propertyName) { var cpd = propertyDescriptorList.FirstOrDefault(a => String.Compare(a.Name, propertyName, true) == 0); return cpd; } public CustomPropertyDescriptor CreateProperty(string name, Type type, object value, int index, params Attribute[] attributes) { var cpd = new CustomPropertyDescriptor(instance, name, type, value, attributes); if (index == -1) { propertyDescriptorList.Add(cpd); } else { propertyDescriptorList.Insert(index, cpd); } TypeDescriptor.Refresh(instance); return cpd; } public bool RemoveProperty(string propertyName) { var cpd = propertyDescriptorList.FirstOrDefault(a => String.Compare(a.Name, propertyName, true) == 0); var bReturn = propertyDescriptorList.Remove(cpd); TypeDescriptor.Refresh(instance); return bReturn; } public void ResetProperties() { propertyDescriptorList.Clear(); GetProperties(); } public override PropertyDescriptorCollection GetProperties(Attribute[] attributes) { var pdl = propertyDescriptorList.FindAll(pd => pd.Attributes.Contains(attributes)); PreProcess(pdl); var pdcReturn = new PropertyDescriptorCollection(pdl.ToArray()); return pdcReturn; } public override sealed PropertyDescriptorCollection GetProperties() { if (propertyDescriptorList.Count == 0) { var pdc = base.GetProperties(); // this gives us a readonly collection, no good foreach (PropertyDescriptor pd in pdc) { if (!(pd is CustomPropertyDescriptor)) { var cpd = new CustomPropertyDescriptor(GetPropertyOwner(pd), pd); propertyDescriptorList.Add(cpd); } } } var pdl = propertyDescriptorList.FindAll(pd => pd != null); PreProcess(pdl); var pdcReturn = new PropertyDescriptorCollection(propertyDescriptorList.ToArray()); return pdcReturn; } #endregion #region Private Methods private void PreProcess(List<CustomPropertyDescriptor> pdl) { if (propertySortOrder != CustomSortOrder.None && pdl.Count > 0) { var propSorter = new PropertySorter(); propSorter.SortOrder = propertySortOrder; pdl.Sort(propSorter); } UpdateCategoryTabAppendCount(); UpdateResourceManager(); } private void UpdateResourceManager() { foreach (var cpd in propertyDescriptorList) { IResourceAttribute attr = (PropertyResourceAttribute)cpd.AllAttributes.FirstOrDefault(a => a is PropertyResourceAttribute); if (attr == null) { var ac = GetAttributes(); var al = new AttributeList(ac); attr = (ClassResourceAttribute)al.FirstOrDefault(a => a is ClassResourceAttribute); } if (attr == null) { cpd.ResourceManager = null; continue; } cpd.KeyPrefix = attr.KeyPrefix; var rm = hashRM[attr] as ResourceManager; if (rm != null) { cpd.ResourceManager = rm; continue; } try { if (String.IsNullOrWhiteSpace(attr.AssemblyFullName) == false) { rm = new ResourceManager(attr.BaseName, Assembly.ReflectionOnlyLoad(attr.AssemblyFullName)); } else { rm = new ResourceManager(attr.BaseName, GetPropertyOwner(cpd).GetType().Assembly); } } catch (Exception ex) { Console.WriteLine(ex.Message); continue; } hashRM.Add(attr, rm); cpd.ResourceManager = rm; } } private void UpdateCategoryTabAppendCount() { // get a copy of the list as we do not want to sort around the actual list var pdl = propertyDescriptorList.FindAll(pd => pd != null); if (pdl.Count == 0) { return; } var propSorter = new CategorySorter(); int nTabCount; switch (categorySortOrder) { case CustomSortOrder.AscendingById: propSorter.SortOrder = CustomSortOrder.DescendingById; pdl.Sort(propSorter); nTabCount = 0; var sortIndex = pdl[0].CategoryId; foreach (var cpd in pdl) { if (cpd.CategoryId == sortIndex) { cpd.TabAppendCount = nTabCount; } else { sortIndex = cpd.CategoryId; nTabCount++; cpd.TabAppendCount = nTabCount; } } break; case CustomSortOrder.None: case CustomSortOrder.AscendingByName: // by default, property grid sorts the category ascendingly foreach (var cpd in propertyDescriptorList) { cpd.TabAppendCount = 0; } break; case CustomSortOrder.DescendingById: propSorter.SortOrder = CustomSortOrder.AscendingById; pdl.Sort(propSorter); nTabCount = 0; var nCategorySortIndex = pdl[0].CategoryId; foreach (var cpd in pdl) { if (nCategorySortIndex == cpd.CategoryId) { cpd.TabAppendCount = nTabCount; } else { nCategorySortIndex = cpd.CategoryId; nTabCount++; cpd.TabAppendCount = nTabCount; } } break; case CustomSortOrder.DescendingByName: propSorter.SortOrder = CustomSortOrder.AscendingByName; pdl.Sort(propSorter); nTabCount = 0; pdl[0].TabAppendCount = 0; var sCat = pdl[0].Category; foreach (var cpd in pdl) { cpd.TabAppendCount = 0; if (String.Compare(sCat, cpd.Category) == 0) { cpd.TabAppendCount = nTabCount; } else { sCat = cpd.Category; nTabCount++; cpd.TabAppendCount = nTabCount; } } break; } } public ISite GetSite() { if (site == null) { var newSite = new SimpleSite(); IPropertyValueUIService service = new PropertyValueUIService(); service.AddPropertyValueUIHandler(new PropertyValueUIHandler(GenericPropertyValueUIHandler)); newSite.AddService(service); site = newSite; } return site; } private void GenericPropertyValueUIHandler(ITypeDescriptorContext context, PropertyDescriptor propDesc, ArrayList itemList) { var cpd = propDesc as CustomPropertyDescriptor; if (cpd != null && cpd.StateItems != null) { itemList.AddRange(cpd.StateItems as ICollection); } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // This program uses code hyperlinks available as part of the HyperAddin Visual Studio plug-in. // It is available from http://www.codeplex.com/hyperAddin #if PLATFORM_WINDOWS #define FEATURE_MANAGED_ETW #if !ES_BUILD_STANDALONE #define FEATURE_ACTIVITYSAMPLING #endif #endif // PLATFORM_WINDOWS #if ES_BUILD_STANDALONE #define FEATURE_MANAGED_ETW_CHANNELS // #define FEATURE_ADVANCED_MANAGED_ETW_CHANNELS #endif #if ES_BUILD_STANDALONE using Environment = Microsoft.Diagnostics.Tracing.Internal.Environment; using EventDescriptor = Microsoft.Diagnostics.Tracing.EventDescriptor; #endif using System; using System.Runtime.InteropServices; using System.Security; using System.Collections.ObjectModel; #if !ES_BUILD_AGAINST_DOTNET_V35 using Contract = System.Diagnostics.Contracts.Contract; using System.Collections.Generic; using System.Text; #else using Contract = Microsoft.Diagnostics.Contracts.Internal.Contract; using System.Collections.Generic; using System.Text; #endif #if ES_BUILD_STANDALONE namespace Microsoft.Diagnostics.Tracing #else namespace System.Diagnostics.Tracing #endif { public partial class EventSource { #if FEATURE_MANAGED_ETW private byte[] providerMetadata; #endif /// <summary> /// Construct an EventSource with a given name for non-contract based events (e.g. those using the Write() API). /// </summary> /// <param name="eventSourceName"> /// The name of the event source. Must not be null. /// </param> public EventSource( string eventSourceName) : this(eventSourceName, EventSourceSettings.EtwSelfDescribingEventFormat) { } /// <summary> /// Construct an EventSource with a given name for non-contract based events (e.g. those using the Write() API). /// </summary> /// <param name="eventSourceName"> /// The name of the event source. Must not be null. /// </param> /// <param name="config"> /// Configuration options for the EventSource as a whole. /// </param> public EventSource( string eventSourceName, EventSourceSettings config) : this(eventSourceName, config, null) { } /// <summary> /// Construct an EventSource with a given name for non-contract based events (e.g. those using the Write() API). /// /// Also specify a list of key-value pairs called traits (you must pass an even number of strings). /// The first string is the key and the second is the value. These are not interpreted by EventSource /// itself but may be interprated the listeners. Can be fetched with GetTrait(string). /// </summary> /// <param name="eventSourceName"> /// The name of the event source. Must not be null. /// </param> /// <param name="config"> /// Configuration options for the EventSource as a whole. /// </param> /// <param name="traits">A collection of key-value strings (must be an even number).</param> public EventSource( string eventSourceName, EventSourceSettings config, params string[] traits) : this( eventSourceName == null ? new Guid() : GenerateGuidFromName(eventSourceName.ToUpperInvariant()), eventSourceName, config, traits) { if (eventSourceName == null) { throw new ArgumentNullException(nameof(eventSourceName)); } Contract.EndContractBlock(); } /// <summary> /// Writes an event with no fields and default options. /// (Native API: EventWriteTransfer) /// </summary> /// <param name="eventName">The name of the event. Must not be null.</param> public unsafe void Write(string eventName) { if (eventName == null) { throw new ArgumentNullException(nameof(eventName)); } Contract.EndContractBlock(); if (!this.IsEnabled()) { return; } var options = new EventSourceOptions(); this.WriteImpl(eventName, ref options, null, null, null, SimpleEventTypes<EmptyStruct>.Instance); } /// <summary> /// Writes an event with no fields. /// (Native API: EventWriteTransfer) /// </summary> /// <param name="eventName">The name of the event. Must not be null.</param> /// <param name="options"> /// Options for the event, such as the level, keywords, and opcode. Unset /// options will be set to default values. /// </param> public unsafe void Write(string eventName, EventSourceOptions options) { if (eventName == null) { throw new ArgumentNullException(nameof(eventName)); } Contract.EndContractBlock(); if (!this.IsEnabled()) { return; } this.WriteImpl(eventName, ref options, null, null, null, SimpleEventTypes<EmptyStruct>.Instance); } /// <summary> /// Writes an event. /// (Native API: EventWriteTransfer) /// </summary> /// <typeparam name="T"> /// The type that defines the event and its payload. This must be an /// anonymous type or a type with an [EventData] attribute. /// </typeparam> /// <param name="eventName"> /// The name for the event. If null, the event name is automatically /// determined based on T, either from the Name property of T's EventData /// attribute or from typeof(T).Name. /// </param> /// <param name="data"> /// The object containing the event payload data. The type T must be /// an anonymous type or a type with an [EventData] attribute. The /// public instance properties of data will be written recursively to /// create the fields of the event. /// </param> public unsafe void Write<T>( string eventName, T data) { if (!this.IsEnabled()) { return; } var options = new EventSourceOptions(); this.WriteImpl(eventName, ref options, data, null, null, SimpleEventTypes<T>.Instance); } /// <summary> /// Writes an event. /// (Native API: EventWriteTransfer) /// </summary> /// <typeparam name="T"> /// The type that defines the event and its payload. This must be an /// anonymous type or a type with an [EventData] attribute. /// </typeparam> /// <param name="eventName"> /// The name for the event. If null, the event name is automatically /// determined based on T, either from the Name property of T's EventData /// attribute or from typeof(T).Name. /// </param> /// <param name="options"> /// Options for the event, such as the level, keywords, and opcode. Unset /// options will be set to default values. /// </param> /// <param name="data"> /// The object containing the event payload data. The type T must be /// an anonymous type or a type with an [EventData] attribute. The /// public instance properties of data will be written recursively to /// create the fields of the event. /// </param> public unsafe void Write<T>( string eventName, EventSourceOptions options, T data) { if (!this.IsEnabled()) { return; } this.WriteImpl(eventName, ref options, data, null, null, SimpleEventTypes<T>.Instance); } /// <summary> /// Writes an event. /// This overload is for use with extension methods that wish to efficiently /// forward the options or data parameter without performing an extra copy. /// (Native API: EventWriteTransfer) /// </summary> /// <typeparam name="T"> /// The type that defines the event and its payload. This must be an /// anonymous type or a type with an [EventData] attribute. /// </typeparam> /// <param name="eventName"> /// The name for the event. If null, the event name is automatically /// determined based on T, either from the Name property of T's EventData /// attribute or from typeof(T).Name. /// </param> /// <param name="options"> /// Options for the event, such as the level, keywords, and opcode. Unset /// options will be set to default values. /// </param> /// <param name="data"> /// The object containing the event payload data. The type T must be /// an anonymous type or a type with an [EventData] attribute. The /// public instance properties of data will be written recursively to /// create the fields of the event. /// </param> public unsafe void Write<T>( string eventName, ref EventSourceOptions options, ref T data) { if (!this.IsEnabled()) { return; } this.WriteImpl(eventName, ref options, data, null, null, SimpleEventTypes<T>.Instance); } /// <summary> /// Writes an event. /// This overload is meant for clients that need to manipuate the activityId /// and related ActivityId for the event. /// </summary> /// <typeparam name="T"> /// The type that defines the event and its payload. This must be an /// anonymous type or a type with an [EventData] attribute. /// </typeparam> /// <param name="eventName"> /// The name for the event. If null, the event name is automatically /// determined based on T, either from the Name property of T's EventData /// attribute or from typeof(T).Name. /// </param> /// <param name="options"> /// Options for the event, such as the level, keywords, and opcode. Unset /// options will be set to default values. /// </param> /// <param name="activityId"> /// The GUID of the activity associated with this event. /// </param> /// <param name="relatedActivityId"> /// The GUID of another activity that is related to this activity, or Guid.Empty /// if there is no related activity. Most commonly, the Start operation of a /// new activity specifies a parent activity as its related activity. /// </param> /// <param name="data"> /// The object containing the event payload data. The type T must be /// an anonymous type or a type with an [EventData] attribute. The /// public instance properties of data will be written recursively to /// create the fields of the event. /// </param> public unsafe void Write<T>( string eventName, ref EventSourceOptions options, ref Guid activityId, ref Guid relatedActivityId, ref T data) { if (!this.IsEnabled()) { return; } fixed (Guid* pActivity = &activityId, pRelated = &relatedActivityId) { this.WriteImpl( eventName, ref options, data, pActivity, relatedActivityId == Guid.Empty ? null : pRelated, SimpleEventTypes<T>.Instance); } } /// <summary> /// Writes an extended event, where the values of the event are the /// combined properties of any number of values. This method is /// intended for use in advanced logging scenarios that support a /// dynamic set of event context providers. /// This method does a quick check on whether this event is enabled. /// </summary> /// <param name="eventName"> /// The name for the event. If null, the name from eventTypes is used. /// (Note that providing the event name via the name parameter is slightly /// less efficient than using the name from eventTypes.) /// </param> /// <param name="options"> /// Optional overrides for the event, such as the level, keyword, opcode, /// activityId, and relatedActivityId. Any settings not specified by options /// are obtained from eventTypes. /// </param> /// <param name="eventTypes"> /// Information about the event and the types of the values in the event. /// Must not be null. Note that the eventTypes object should be created once and /// saved. It should not be recreated for each event. /// </param> /// <param name="activityID"> /// A pointer to the activity ID GUID to log /// </param> /// <param name="childActivityID"> /// A pointer to the child activity ID to log (can be null) </param> /// <param name="values"> /// The values to include in the event. Must not be null. The number and types of /// the values must match the number and types of the fields described by the /// eventTypes parameter. /// </param> private unsafe void WriteMultiMerge( string eventName, ref EventSourceOptions options, TraceLoggingEventTypes eventTypes, Guid* activityID, Guid* childActivityID, params object[] values) { if (!this.IsEnabled()) { return; } byte level = (options.valuesSet & EventSourceOptions.levelSet) != 0 ? options.level : eventTypes.level; EventKeywords keywords = (options.valuesSet & EventSourceOptions.keywordsSet) != 0 ? options.keywords : eventTypes.keywords; if (this.IsEnabled((EventLevel)level, keywords)) { WriteMultiMergeInner(eventName, ref options, eventTypes, activityID, childActivityID, values); } } /// <summary> /// Writes an extended event, where the values of the event are the /// combined properties of any number of values. This method is /// intended for use in advanced logging scenarios that support a /// dynamic set of event context providers. /// Attention: This API does not check whether the event is enabled or not. /// Please use WriteMultiMerge to avoid spending CPU cycles for events that are /// not enabled. /// </summary> /// <param name="eventName"> /// The name for the event. If null, the name from eventTypes is used. /// (Note that providing the event name via the name parameter is slightly /// less efficient than using the name from eventTypes.) /// </param> /// <param name="options"> /// Optional overrides for the event, such as the level, keyword, opcode, /// activityId, and relatedActivityId. Any settings not specified by options /// are obtained from eventTypes. /// </param> /// <param name="eventTypes"> /// Information about the event and the types of the values in the event. /// Must not be null. Note that the eventTypes object should be created once and /// saved. It should not be recreated for each event. /// </param> /// <param name="activityID"> /// A pointer to the activity ID GUID to log /// </param> /// <param name="childActivityID"> /// A pointer to the child activity ID to log (can be null) /// </param> /// <param name="values"> /// The values to include in the event. Must not be null. The number and types of /// the values must match the number and types of the fields described by the /// eventTypes parameter. /// </param> private unsafe void WriteMultiMergeInner( string eventName, ref EventSourceOptions options, TraceLoggingEventTypes eventTypes, Guid* activityID, Guid* childActivityID, params object[] values) { #if FEATURE_MANAGED_ETW int identity = 0; byte level = (options.valuesSet & EventSourceOptions.levelSet) != 0 ? options.level : eventTypes.level; byte opcode = (options.valuesSet & EventSourceOptions.opcodeSet) != 0 ? options.opcode : eventTypes.opcode; EventTags tags = (options.valuesSet & EventSourceOptions.tagsSet) != 0 ? options.tags : eventTypes.Tags; EventKeywords keywords = (options.valuesSet & EventSourceOptions.keywordsSet) != 0 ? options.keywords : eventTypes.keywords; var nameInfo = eventTypes.GetNameInfo(eventName ?? eventTypes.Name, tags); if (nameInfo == null) { return; } identity = nameInfo.identity; EventDescriptor descriptor = new EventDescriptor(identity, level, opcode, (long)keywords); var pinCount = eventTypes.pinCount; var scratch = stackalloc byte[eventTypes.scratchSize]; var descriptors = stackalloc EventData[eventTypes.dataCount + 3]; var pins = stackalloc GCHandle[pinCount]; fixed (byte* pMetadata0 = this.providerMetadata, pMetadata1 = nameInfo.nameMetadata, pMetadata2 = eventTypes.typeMetadata) { descriptors[0].SetMetadata(pMetadata0, this.providerMetadata.Length, 2); descriptors[1].SetMetadata(pMetadata1, nameInfo.nameMetadata.Length, 1); descriptors[2].SetMetadata(pMetadata2, eventTypes.typeMetadata.Length, 1); #if (!ES_BUILD_PCL && !ES_BUILD_PN) System.Runtime.CompilerServices.RuntimeHelpers.PrepareConstrainedRegions(); #endif try { DataCollector.ThreadInstance.Enable( scratch, eventTypes.scratchSize, descriptors + 3, eventTypes.dataCount, pins, pinCount); for (int i = 0; i < eventTypes.typeInfos.Length; i++) { var info = eventTypes.typeInfos[i]; info.WriteData(TraceLoggingDataCollector.Instance, info.PropertyValueFactory(values[i])); } this.WriteEventRaw( eventName, ref descriptor, activityID, childActivityID, (int)(DataCollector.ThreadInstance.Finish() - descriptors), (IntPtr)descriptors); } finally { this.WriteCleanup(pins, pinCount); } } #endif // FEATURE_MANAGED_ETW } /// <summary> /// Writes an extended event, where the values of the event have already /// been serialized in "data". /// </summary> /// <param name="eventName"> /// The name for the event. If null, the name from eventTypes is used. /// (Note that providing the event name via the name parameter is slightly /// less efficient than using the name from eventTypes.) /// </param> /// <param name="options"> /// Optional overrides for the event, such as the level, keyword, opcode, /// activityId, and relatedActivityId. Any settings not specified by options /// are obtained from eventTypes. /// </param> /// <param name="eventTypes"> /// Information about the event and the types of the values in the event. /// Must not be null. Note that the eventTypes object should be created once and /// saved. It should not be recreated for each event. /// </param> /// <param name="activityID"> /// A pointer to the activity ID GUID to log /// </param> /// <param name="childActivityID"> /// A pointer to the child activity ID to log (can be null) /// </param> /// <param name="data"> /// The previously serialized values to include in the event. Must not be null. /// The number and types of the values must match the number and types of the /// fields described by the eventTypes parameter. /// </param> internal unsafe void WriteMultiMerge( string eventName, ref EventSourceOptions options, TraceLoggingEventTypes eventTypes, Guid* activityID, Guid* childActivityID, EventData* data) { #if FEATURE_MANAGED_ETW if (!this.IsEnabled()) { return; } fixed (EventSourceOptions* pOptions = &options) { EventDescriptor descriptor; var nameInfo = this.UpdateDescriptor(eventName, eventTypes, ref options, out descriptor); if (nameInfo == null) { return; } // We make a descriptor for each EventData, and because we morph strings to counted strings // we may have 2 for each arg, so we allocate enough for this. var descriptors = stackalloc EventData[eventTypes.dataCount + eventTypes.typeInfos.Length * 2 + 3]; fixed (byte* pMetadata0 = this.providerMetadata, pMetadata1 = nameInfo.nameMetadata, pMetadata2 = eventTypes.typeMetadata) { descriptors[0].SetMetadata(pMetadata0, this.providerMetadata.Length, 2); descriptors[1].SetMetadata(pMetadata1, nameInfo.nameMetadata.Length, 1); descriptors[2].SetMetadata(pMetadata2, eventTypes.typeMetadata.Length, 1); int numDescrs = 3; for (int i = 0; i < eventTypes.typeInfos.Length; i++) { // Until M3, we need to morph strings to a counted representation // When TDH supports null terminated strings, we can remove this. if (eventTypes.typeInfos[i].DataType == typeof(string)) { // Write out the size of the string descriptors[numDescrs].m_Ptr = (long)&descriptors[numDescrs + 1].m_Size; descriptors[numDescrs].m_Size = 2; numDescrs++; descriptors[numDescrs].m_Ptr = data[i].m_Ptr; descriptors[numDescrs].m_Size = data[i].m_Size - 2; // Remove the null terminator numDescrs++; } else { descriptors[numDescrs].m_Ptr = data[i].m_Ptr; descriptors[numDescrs].m_Size = data[i].m_Size; // old conventions for bool is 4 bytes, but meta-data assumes 1. if (data[i].m_Size == 4 && eventTypes.typeInfos[i].DataType == typeof(bool)) descriptors[numDescrs].m_Size = 1; numDescrs++; } } this.WriteEventRaw( eventName, ref descriptor, activityID, childActivityID, numDescrs, (IntPtr)descriptors); } } #endif // FEATURE_MANAGED_ETW } private unsafe void WriteImpl( string eventName, ref EventSourceOptions options, object data, Guid* pActivityId, Guid* pRelatedActivityId, TraceLoggingEventTypes eventTypes) { try { fixed (EventSourceOptions* pOptions = &options) { EventDescriptor descriptor; options.Opcode = options.IsOpcodeSet ? options.Opcode : GetOpcodeWithDefault(options.Opcode, eventName); var nameInfo = this.UpdateDescriptor(eventName, eventTypes, ref options, out descriptor); if (nameInfo == null) { return; } #if FEATURE_MANAGED_ETW var pinCount = eventTypes.pinCount; var scratch = stackalloc byte[eventTypes.scratchSize]; var descriptors = stackalloc EventData[eventTypes.dataCount + 3]; var pins = stackalloc GCHandle[pinCount]; fixed (byte* pMetadata0 = this.providerMetadata, pMetadata1 = nameInfo.nameMetadata, pMetadata2 = eventTypes.typeMetadata) { descriptors[0].SetMetadata(pMetadata0, this.providerMetadata.Length, 2); descriptors[1].SetMetadata(pMetadata1, nameInfo.nameMetadata.Length, 1); descriptors[2].SetMetadata(pMetadata2, eventTypes.typeMetadata.Length, 1); #endif // FEATURE_MANAGED_ETW #if (!ES_BUILD_PCL && !ES_BUILD_PN) System.Runtime.CompilerServices.RuntimeHelpers.PrepareConstrainedRegions(); #endif EventOpcode opcode = (EventOpcode)descriptor.Opcode; Guid activityId = Guid.Empty; Guid relatedActivityId = Guid.Empty; if (pActivityId == null && pRelatedActivityId == null && ((options.ActivityOptions & EventActivityOptions.Disable) == 0)) { if (opcode == EventOpcode.Start) { m_activityTracker.OnStart(m_name, eventName, 0, ref activityId, ref relatedActivityId, options.ActivityOptions); } else if (opcode == EventOpcode.Stop) { m_activityTracker.OnStop(m_name, eventName, 0, ref activityId); } if (activityId != Guid.Empty) pActivityId = &activityId; if (relatedActivityId != Guid.Empty) pRelatedActivityId = &relatedActivityId; } try { #if FEATURE_MANAGED_ETW DataCollector.ThreadInstance.Enable( scratch, eventTypes.scratchSize, descriptors + 3, eventTypes.dataCount, pins, pinCount); var info = eventTypes.typeInfos[0]; info.WriteData(TraceLoggingDataCollector.Instance, info.PropertyValueFactory(data)); this.WriteEventRaw( eventName, ref descriptor, pActivityId, pRelatedActivityId, (int)(DataCollector.ThreadInstance.Finish() - descriptors), (IntPtr)descriptors); #endif // FEATURE_MANAGED_ETW // TODO enable filtering for listeners. if (m_Dispatchers != null) { var eventData = (EventPayload)(eventTypes.typeInfos[0].GetData(data)); WriteToAllListeners(eventName, ref descriptor, nameInfo.tags, pActivityId, eventData); } } catch (Exception ex) { if (ex is EventSourceException) throw; else ThrowEventSourceException(eventName, ex); } #if FEATURE_MANAGED_ETW finally { this.WriteCleanup(pins, pinCount); } } #endif // FEATURE_MANAGED_ETW } } catch (Exception ex) { if (ex is EventSourceException) throw; else ThrowEventSourceException(eventName, ex); } } private unsafe void WriteToAllListeners(string eventName, ref EventDescriptor eventDescriptor, EventTags tags, Guid* pActivityId, EventPayload payload) { EventWrittenEventArgs eventCallbackArgs = new EventWrittenEventArgs(this); eventCallbackArgs.EventName = eventName; eventCallbackArgs.m_level = (EventLevel) eventDescriptor.Level; eventCallbackArgs.m_keywords = (EventKeywords) eventDescriptor.Keywords; eventCallbackArgs.m_opcode = (EventOpcode) eventDescriptor.Opcode; eventCallbackArgs.m_tags = tags; // Self described events do not have an id attached. We mark it internally with -1. eventCallbackArgs.EventId = -1; if (pActivityId != null) eventCallbackArgs.RelatedActivityId = *pActivityId; if (payload != null) { eventCallbackArgs.Payload = new ReadOnlyCollection<object>((IList<object>)payload.Values); eventCallbackArgs.PayloadNames = new ReadOnlyCollection<string>((IList<string>)payload.Keys); } DispatchToAllListeners(-1, pActivityId, eventCallbackArgs); } #if (!ES_BUILD_PCL && !ES_BUILD_PN) [System.Runtime.ConstrainedExecution.ReliabilityContract( System.Runtime.ConstrainedExecution.Consistency.WillNotCorruptState, System.Runtime.ConstrainedExecution.Cer.Success)] #endif [NonEvent] private unsafe void WriteCleanup(GCHandle* pPins, int cPins) { DataCollector.ThreadInstance.Disable(); for (int i = 0; i != cPins; i++) { if (IntPtr.Zero != (IntPtr)pPins[i]) { pPins[i].Free(); } } } private void InitializeProviderMetadata() { #if FEATURE_MANAGED_ETW if (m_traits != null) { List<byte> traitMetaData = new List<byte>(100); for (int i = 0; i < m_traits.Length - 1; i += 2) { if (m_traits[i].StartsWith("ETW_", StringComparison.Ordinal)) { string etwTrait = m_traits[i].Substring(4); byte traitNum; if (!byte.TryParse(etwTrait, out traitNum)) { if (etwTrait == "GROUP") { traitNum = 1; } else { throw new ArgumentException(Resources.GetResourceString("UnknownEtwTrait", etwTrait), "traits"); } } string value = m_traits[i + 1]; int lenPos = traitMetaData.Count; traitMetaData.Add(0); // Emit size (to be filled in later) traitMetaData.Add(0); traitMetaData.Add(traitNum); // Emit Trait number var valueLen = AddValueToMetaData(traitMetaData, value) + 3; // Emit the value bytes +3 accounts for 3 bytes we emited above. traitMetaData[lenPos] = unchecked((byte)valueLen); // Fill in size traitMetaData[lenPos + 1] = unchecked((byte)(valueLen >> 8)); } } providerMetadata = Statics.MetadataForString(this.Name, 0, traitMetaData.Count, 0); int startPos = providerMetadata.Length - traitMetaData.Count; foreach (var b in traitMetaData) providerMetadata[startPos++] = b; } else providerMetadata = Statics.MetadataForString(this.Name, 0, 0, 0); #endif //FEATURE_MANAGED_ETW } private static int AddValueToMetaData(List<byte> metaData, string value) { if (value.Length == 0) return 0; int startPos = metaData.Count; char firstChar = value[0]; if (firstChar == '@') metaData.AddRange(Encoding.UTF8.GetBytes(value.Substring(1))); else if (firstChar == '{') metaData.AddRange(new Guid(value).ToByteArray()); else if (firstChar == '#') { for (int i = 1; i < value.Length; i++) { if (value[i] != ' ') // Skip spaces between bytes. { if (!(i + 1 < value.Length)) { throw new ArgumentException(Resources.GetResourceString("EvenHexDigits"), "traits"); } metaData.Add((byte)(HexDigit(value[i]) * 16 + HexDigit(value[i + 1]))); i++; } } } else if ('A' <= firstChar || ' ' == firstChar) // Is it alphabetic or space (excludes digits and most punctuation). { metaData.AddRange(Encoding.UTF8.GetBytes(value)); } else { throw new ArgumentException(Resources.GetResourceString("IllegalValue", value), "traits"); } return metaData.Count - startPos; } /// <summary> /// Returns a value 0-15 if 'c' is a hexadecimal digit. If it throws an argument exception. /// </summary> private static int HexDigit(char c) { if ('0' <= c && c <= '9') { return (c - '0'); } if ('a' <= c) { c = unchecked((char)(c - ('a' - 'A'))); // Convert to lower case } if ('A' <= c && c <= 'F') { return (c - 'A' + 10); } throw new ArgumentException(Resources.GetResourceString("BadHexDigit", c), "traits"); } private NameInfo UpdateDescriptor( string name, TraceLoggingEventTypes eventInfo, ref EventSourceOptions options, out EventDescriptor descriptor) { NameInfo nameInfo = null; int identity = 0; byte level = (options.valuesSet & EventSourceOptions.levelSet) != 0 ? options.level : eventInfo.level; byte opcode = (options.valuesSet & EventSourceOptions.opcodeSet) != 0 ? options.opcode : eventInfo.opcode; EventTags tags = (options.valuesSet & EventSourceOptions.tagsSet) != 0 ? options.tags : eventInfo.Tags; EventKeywords keywords = (options.valuesSet & EventSourceOptions.keywordsSet) != 0 ? options.keywords : eventInfo.keywords; if (this.IsEnabled((EventLevel)level, keywords)) { nameInfo = eventInfo.GetNameInfo(name ?? eventInfo.Name, tags); identity = nameInfo.identity; } descriptor = new EventDescriptor(identity, level, opcode, (long)keywords); return nameInfo; } } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // //#define DUMP_CONSTRAINTSYSTEM // LLVM translation doesn't yet recognize SSA format, and will not produce correct code. // TODO: Remove this tag when that issue has been resolved. //#define ALLOW_SSA_FORM // Enable optimizations that only work for direct ARM translation (non-LLVM legacy path). // TODO: Remove this tag when comparison is no longer needed. //#define ENABLE_LOW_LEVEL_OPTIMIZATIONS // Enable more advanced optimizations which look for non-equivalent comparisons. This appears to be // overly aggressive when not in SSA form, resulting in incorrect code. // TODO: Remove this tag when this issue is resolved. //#define ENABLE_ADVANCED_COMPARES namespace Microsoft.Zelig.CodeGeneration.IR.CompilationSteps.Handlers { using System; using System.Collections.Generic; using Microsoft.Zelig.Runtime.TypeSystem; public class Optimizations { //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// #if ALLOW_SSA_FORM [CompilationSteps.OptimizationHandler(RunOnce = true, RunInExtendedSSAForm = true)] #else // ALLOW_SSA_FORM [CompilationSteps.OptimizationHandler(RunOnce = true)] #endif // ALLOW_SSA_FORM private static void RemoveRedundantChecks( PhaseExecution.NotificationContext nc ) { ControlFlowGraphStateForCodeTransformation cfg = nc.CurrentCFG; while(true) { GrowOnlyHashTable< VariableExpression, Operator > defLookup = cfg.DataFlow_SingleDefinitionLookup; if(PropagateFixedArrayLength( cfg, defLookup ) || RemoveRedundantChecks( cfg, defLookup ) ) { Transformations.RemoveDeadCode.Execute( cfg, false ); continue; } break; } } //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// private static bool PropagateFixedArrayLength( ControlFlowGraphStateForCodeTransformation cfg , GrowOnlyHashTable< VariableExpression, Operator > defLookup ) { bool fRunSimplify = false; #if ENABLE_LOW_LEVEL_OPTIMIZATIONS foreach(var op in cfg.FilterOperators< LoadIndirectOperator >()) #else // ENABLE_LOW_LEVEL_OPTIMIZATIONS foreach(var op in cfg.FilterOperators< LoadInstanceFieldOperator >()) #endif // ENABLE_LOW_LEVEL_OPTIMIZATIONS { if(op.HasAnnotation< ArrayLengthAnnotation >()) { FixedLengthArrayAnnotation an = FindFixedLengthArrayAnnotation( defLookup, op.FirstArgument ); if(an != null) { var var = op.FirstResult; var opNew = SingleAssignmentOperator.New( op.DebugInfo, var, cfg.TypeSystem.CreateConstant( (uint)an.ArrayLength ) ); op.SubstituteWithOperator( opNew, Operator.SubstitutionFlags.Default ); defLookup[var] = op; fRunSimplify = true; } } } return fRunSimplify; } private static FixedLengthArrayAnnotation FindFixedLengthArrayAnnotation( GrowOnlyHashTable< VariableExpression, Operator > defLookup , Expression ex ) { return FindFixedLengthArrayAnnotation( defLookup, SetFactory.NewWithReferenceEquality< VariableExpression >(), ex ); } private static FixedLengthArrayAnnotation FindFixedLengthArrayAnnotation( GrowOnlyHashTable< VariableExpression, Operator > defLookup , GrowOnlySet< VariableExpression > history , Expression ex ) { var array = ex as VariableExpression; if(array == null || history.Insert( array )) { // // Detected loop, bail out. // return null; } //--// Operator arrayDef; if(defLookup.TryGetValue( array, out arrayDef )) { var an = arrayDef.GetAnnotation< FixedLengthArrayAnnotation >(); if(an != null) { return an; } if(arrayDef is PhiOperator) { foreach(Expression rhs in arrayDef.Arguments) { FixedLengthArrayAnnotation an2 = FindFixedLengthArrayAnnotation( defLookup, history, rhs ); if(an2 == null) { return null; } if(an == null) { an = an2; } else if(an.ArrayLength != an2.ArrayLength) { return null; } } return an; } if(arrayDef is SingleAssignmentOperator || arrayDef is PiOperator ) { return FindFixedLengthArrayAnnotation( defLookup, history, arrayDef.FirstArgument ); } } return null; } //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// private static bool RemoveRedundantChecks( ControlFlowGraphStateForCodeTransformation cfg, GrowOnlyHashTable< VariableExpression, Operator > defLookup ) { var useChains = cfg.DataFlow_UseChains; bool fRunSimplify = false; cfg.ResetCacheCheckpoint( ); foreach(var controlOp in cfg.FilterOperators< BinaryConditionalControlOperator >()) { BasicBlock takenBranch = TryGetUnconditionalBranchTarget( controlOp, defLookup, useChains ); if(takenBranch != null) { UnconditionalControlOperator opNewCtrl = UnconditionalControlOperator.New( controlOp.DebugInfo, takenBranch ); controlOp.SubstituteWithOperator( opNewCtrl, Operator.SubstitutionFlags.Default ); fRunSimplify = true; } } // If modified, we need to refresh useChains in case variables were changed. If we don't do this, FilterOperators // below will refresh the operators, and SpanningTreeIndex and other info between variables and our stale useChains // may be mismatched. if(fRunSimplify) { cfg.AssertNoCacheRefreshSinceCheckpoint( ); useChains = cfg.DataFlow_UseChains; cfg.ResetCacheCheckpoint( ); } var cscUpperBound = new Transformations.ConstraintSystemCollector( cfg, Transformations.ConstraintSystemCollector.Kind.LessThanOrEqual ); var cscLowerBound = new Transformations.ConstraintSystemCollector( cfg, Transformations.ConstraintSystemCollector.Kind.GreaterThanOrEqual ); foreach(var controlOp in cfg.FilterOperators< CompareConditionalControlOperator >()) { BasicBlock takenBranch = TryGetUnconditionalBranchTarget( cscUpperBound, cscLowerBound, controlOp, defLookup, useChains ); if(takenBranch != null) { UnconditionalControlOperator opNewCtrl = UnconditionalControlOperator.New( controlOp.DebugInfo, takenBranch ); controlOp.SubstituteWithOperator( opNewCtrl, Operator.SubstitutionFlags.Default ); fRunSimplify = true; } } #if ENABLE_LOW_LEVEL_OPTIMIZATIONS foreach(var controlOp in cfg.FilterOperators< ConditionCodeConditionalControlOperator >()) { bool fNotEqual; switch(controlOp.Condition) { case ConditionCodeExpression.Comparison.NotEqual: fNotEqual = true; break; case ConditionCodeExpression.Comparison.Equal: fNotEqual = false; break; default: continue; } var opCmp = ControlFlowGraphState.CheckSingleDefinition( defLookup, controlOp.FirstArgument ) as CompareOperator; if(opCmp != null) { bool fNullOnRight; Expression ex = opCmp.IsBinaryOperationAgainstZeroValue( out fNullOnRight ); if(ex != null) { if(ProveNonZero( defLookup, ex ) == ProveResult.True) { UnconditionalControlOperator opNewCtrl = UnconditionalControlOperator.New( controlOp.DebugInfo, fNotEqual ? controlOp.TargetBranchTaken : controlOp.TargetBranchNotTaken ); controlOp.SubstituteWithOperator( opNewCtrl, Operator.SubstitutionFlags.Default ); fRunSimplify = true; } } } } foreach (var controlOp in cfg.FilterOperators< ConditionCodeConditionalControlOperator >()) { var opCmp = ControlFlowGraphState.CheckSingleDefinition( defLookup, controlOp.FirstArgument ) as CompareOperator; if(opCmp != null) { ProveResult result = ProveComparison( cscUpperBound, cscLowerBound, opCmp.FirstArgument, opCmp.SecondArgument, controlOp.Condition ); if(result != ProveResult.Unknown) { BasicBlock takenBranch = (result == ProveResult.AlwaysTrue) ? controlOp.TargetBranchTaken : controlOp.TargetBranchNotTaken; UnconditionalControlOperator opNewCtrl = UnconditionalControlOperator.New( controlOp.DebugInfo, takenBranch ); controlOp.SubstituteWithOperator( opNewCtrl, Operator.SubstitutionFlags.Default ); fRunSimplify = true; } } } #endif // ENABLE_LOW_LEVEL_OPTIMIZATIONS cfg.AssertNoCacheRefreshSinceCheckpoint( ); return fRunSimplify; } bool SomeProperty { get; set; } private static BasicBlock TryGetUnconditionalBranchTarget( BinaryConditionalControlOperator controlOp, GrowOnlyHashTable<VariableExpression, Operator> defLookup, Operator[][] useChains) { // Replace binary branches with unconditional ones when both targets are the same block. if (controlOp.TargetBranchTaken == controlOp.TargetBranchNotTaken) { return controlOp.TargetBranchTaken; } switch (ProveNonZero(controlOp.FirstArgument, defLookup, useChains)) { case ProveResult.AlwaysTrue: return controlOp.TargetBranchTaken; case ProveResult.NeverTrue: return controlOp.TargetBranchNotTaken; } return null; } private static BasicBlock TryGetUnconditionalBranchTarget( Transformations.ConstraintSystemCollector cscUpperBound, Transformations.ConstraintSystemCollector cscLowerBound, CompareConditionalControlOperator controlOp, GrowOnlyHashTable<VariableExpression, Operator> defLookup, Operator[][] useChains) { // Replace binary branches with unconditional ones when both targets are the same block. if (controlOp.TargetBranchTaken == controlOp.TargetBranchNotTaken) { return controlOp.TargetBranchTaken; } ProveResult result = ProveResult.Unknown; switch (controlOp.Condition) { case CompareAndSetOperator.ActionCondition.EQ: case CompareAndSetOperator.ActionCondition.NE: bool fNullOnRight; Expression expr = controlOp.IsBinaryOperationAgainstZeroValue(out fNullOnRight); if (expr == null) { break; } result = ProveNonZero(expr, defLookup, useChains); if (result == ProveResult.Unknown) { break; } // Invert the result for (expr == 0) if (controlOp.Condition == CompareAndSetOperator.ActionCondition.EQ) { if (result == ProveResult.AlwaysTrue) { result = ProveResult.NeverTrue; } else { result = ProveResult.AlwaysTrue; } } break; #if ENABLE_ADVANCED_COMPARES case CompareAndSetOperator.ActionCondition.LT: result = ProveLessThanOrEqual(cscUpperBound, cscLowerBound, controlOp.FirstArgument, controlOp.SecondArgument, weight: -1); break; case CompareAndSetOperator.ActionCondition.LE: result = ProveLessThanOrEqual(cscUpperBound, cscLowerBound, controlOp.FirstArgument, controlOp.SecondArgument, weight: 0); break; case CompareAndSetOperator.ActionCondition.GT: result = ProveLessThanOrEqual(cscUpperBound, cscLowerBound, controlOp.SecondArgument, controlOp.FirstArgument, weight: -1); break; case CompareAndSetOperator.ActionCondition.GE: result = ProveLessThanOrEqual(cscUpperBound, cscLowerBound, controlOp.SecondArgument, controlOp.FirstArgument, weight: 0); break; #endif // ENABLE_ADVANCED_COMPARES } switch (result) { case ProveResult.AlwaysTrue: return controlOp.TargetBranchTaken; case ProveResult.NeverTrue: return controlOp.TargetBranchNotTaken; default: return null; } } private enum ProveResult { Unknown = 0, AlwaysTrue, NeverTrue, } private static ProveResult ProveNonZero( Expression ex, GrowOnlyHashTable< VariableExpression, Operator > defLookup, Operator[][] useChains ) { return ProveNonZero( ex, defLookup, useChains, SetFactory.NewWithReferenceEquality< VariableExpression >() ); } private static ProveResult ProveNonZero( Expression ex, GrowOnlyHashTable< VariableExpression, Operator > defLookup, Operator[][] useChains, GrowOnlySet< VariableExpression > history ) { var exConst = ex as ConstantExpression; if (exConst != null) { if(exConst.IsEqualToZero()) { return ProveResult.NeverTrue; } return ProveResult.AlwaysTrue; } var exVar = ex as VariableExpression; // If we detected a loop, the value may change between iterations. if(exVar == null || history.Insert( exVar )) { return ProveResult.Unknown; } //--// Operator def; if(defLookup.TryGetValue( exVar, out def )) { // If we ever take the address of the expression, assume it can be modified. foreach (Operator op in useChains[exVar.SpanningTreeIndex]) { if (op is AddressAssignmentOperator) { return ProveResult.Unknown; } } if (def.HasAnnotation< NotNullAnnotation >()) { return ProveResult.AlwaysTrue; } // Phi: If all predecessors are provably the same result, then so is the operator's result. if(def is PhiOperator) { bool isFirstResult = true; ProveResult result = ProveResult.Unknown; foreach(Expression rhs in def.Arguments) { ProveResult curResult = ProveNonZero( rhs, defLookup, useChains, history ); if(isFirstResult) { isFirstResult = false; result = curResult; } else if(curResult != result) { // We got two different results from different blocks. return ProveResult.Unknown; } } return result; } // Assignment is transitive, so look up the source value. if(def is SingleAssignmentOperator) { return ProveNonZero( def.FirstArgument, defLookup, useChains, history ); } #if FUTURE // This block isn't strictly correct and assumes low-level knowledge. We should revisit it. if(def is BinaryOperator) { var binOp = (BinaryOperator)def; ConstantExpression exConst; bool fConstOnRight; var varSrc = binOp.IsBinaryOperationAgainstConstant( out exConst, out fConstOnRight ); if(varSrc != null) { // // Only apply the optimization to pointer-like variables. // if(varSrc.CanPointToMemory || exVar.CanPointToMemory) { switch(binOp.Alu) { case BinaryOperator.ALU.ADD: case BinaryOperator.ALU.SUB: // // Adding/removing a constant from a non-null value doesn't change its nullness. // return ProveNonZero( defLookup, history, varSrc ); case BinaryOperator.ALU.OR: // // OR'ing with a non-zero constant ensures the results are not null. // if(!exConst.IsEqualToZero()) { return ProveResult.AlwaysTrue; } break; } } } return ProveResult.Unknown; } #endif // FUTURE if(def is PiOperator) { var piOp = (PiOperator)def; ex = def.FirstArgument; if((piOp.LeftExpression == ex && piOp.RightExpression.IsEqualToZero()) || (piOp.RightExpression == ex && piOp.LeftExpression .IsEqualToZero()) ) { switch(piOp.RelationOperator) { case PiOperator.Relation.Equal: return ProveResult.NeverTrue; case PiOperator.Relation.NotEqual: return ProveResult.AlwaysTrue; } } return ProveNonZero( ex, defLookup, useChains, history ); } } return ProveResult.Unknown; } #if ENABLE_LOW_LEVEL_OPTIMIZATIONS private static ProveResult ProveComparison( Transformations.ConstraintSystemCollector cscUpperBound, Transformations.ConstraintSystemCollector cscLowerBound, Expression exLeft, Expression exRight, ConditionCodeExpression.Comparison condition) { switch(condition) { case ConditionCodeExpression.Comparison.UnsignedLowerThan: case ConditionCodeExpression.Comparison.SignedLessThan: return ProveLessThanOrEqual( cscUpperBound, cscLowerBound, exLeft, exRight, -1 ); case ConditionCodeExpression.Comparison.UnsignedLowerThanOrSame: case ConditionCodeExpression.Comparison.SignedLessThanOrEqual: return ProveLessThanOrEqual( cscUpperBound, cscLowerBound, exLeft, exRight, 0 ); case ConditionCodeExpression.Comparison.UnsignedHigherThan: case ConditionCodeExpression.Comparison.SignedGreaterThan: return ProveLessThanOrEqual( cscUpperBound, cscLowerBound, exRight, exLeft, -1 ); case ConditionCodeExpression.Comparison.UnsignedHigherThanOrSame: case ConditionCodeExpression.Comparison.SignedGreaterThanOrEqual: return ProveLessThanOrEqual( cscUpperBound, cscLowerBound, exRight, exLeft, 0 ); } return ProveResult.Unknown; } #endif // ENABLE_LOW_LEVEL_OPTIMIZATIONS private static ProveResult ProveLessThanOrEqual( Transformations.ConstraintSystemCollector cscUpperBound, Transformations.ConstraintSystemCollector cscLowerBound, Expression exLeft, Expression exRight, double weight) { if(exLeft is VariableExpression) { if(cscUpperBound.Prove( exLeft, exRight, weight )) { return ProveResult.AlwaysTrue; } } if(exRight is VariableExpression) { if(cscLowerBound.Prove( exRight, exLeft, -weight )) { return ProveResult.AlwaysTrue; } } return ProveResult.Unknown; } //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// #if ENABLE_LOW_LEVEL_OPTIMIZATIONS [CompilationSteps.OptimizationHandler(RunOnce=true, RunInSSAForm=true)] private static void ConvertLongCompareToNormalCompare( PhaseExecution.NotificationContext nc ) { ControlFlowGraphStateForCodeTransformation cfg = nc.CurrentCFG; Operator[][] useChains = cfg.DataFlow_UseChains; foreach(var opLong in cfg.FilterOperators< LongCompareOperator >()) { Debugging.DebugInfo debugInfo = opLong.DebugInfo; VariableExpression lhs = opLong.FirstResult; Expression[] rhs = opLong.Arguments; Operator[] uses = useChains[lhs.SpanningTreeIndex]; bool fOk = false; switch(uses.Length) { case 0: fOk = true; break; case 1: { bool fNullOnRight; Operator use = uses[0]; Expression[] target = opLong.IsCompareToNull( out fNullOnRight ); ConditionCodeExpression.Comparison cond = ConditionCodeExpression.Comparison.NotValid; if(use is ConditionalCompareOperator) { var ccOp = (ConditionalCompareOperator)use; cond = ccOp.Condition; } else if(use is ConditionCodeConditionalControlOperator) { var ccOp = (ConditionCodeConditionalControlOperator)use; cond = ccOp.Condition; } else if(use is SetIfConditionIsTrueOperator) { var setOp = (SetIfConditionIsTrueOperator)use; cond = setOp.Condition; } if(cond != ConditionCodeExpression.Comparison.NotValid) { ConditionCodeExpression.Comparison condOrig = cond; Expression rhsLeftLo = rhs[0]; Expression rhsLeftHi = rhs[1]; Expression rhsRightLo = rhs[2]; Expression rhsRightHi = rhs[3]; if(target != null) { if(!fNullOnRight) { rhsLeftLo = rhs[2]; rhsLeftHi = rhs[3]; rhsRightLo = rhs[0]; rhsRightHi = rhs[1]; cond = ConditionCodeExpression.NegateCondition( cond ); } // // We are comparing against zero. // // Can we use a single instruction to perform the comparison? // // For Zero/NonZero, we just OR together the two halves, and set the condition code. // // For Negative/PositiveOrZero, we just need to look at the high part. // // For unsigned: // >= is always true. // < is always false. // <= is like Equal. // > is like NotEqual. // // // For signed: // >= is like PositiveOrZero. // < is like Negative. // <= requires full comparison. // > requires full comparison. // switch(cond) { case ConditionCodeExpression.Comparison.Equal : case ConditionCodeExpression.Comparison.NotEqual: { VariableExpression tmp = cfg.AllocatePseudoRegister( rhsLeftLo.Type ); var op2 = BinaryOperatorWithCarryOut.New( debugInfo, BinaryOperator.ALU.OR, false, false, tmp, lhs, rhsLeftLo, rhsLeftHi ); opLong.SubstituteWithOperator( op2, Operator.SubstitutionFlags.Default ); } fOk = true; break; case ConditionCodeExpression.Comparison.Negative : case ConditionCodeExpression.Comparison.PositiveOrZero : { CompareOperator op2 = CompareOperator.New( debugInfo, lhs, rhsLeftHi, nc.TypeSystem.CreateConstant( rhsLeftHi.Type, 0 ) ); opLong.AddOperatorBefore( op2 ); opLong.Delete(); } fOk = true; break; //--// case ConditionCodeExpression.Comparison.UnsignedLowerThanOrSame: cond = ConditionCodeExpression.Comparison.Equal; goto case ConditionCodeExpression.Comparison.Equal; case ConditionCodeExpression.Comparison.UnsignedHigherThan: cond = ConditionCodeExpression.Comparison.NotEqual; goto case ConditionCodeExpression.Comparison.NotEqual; case ConditionCodeExpression.Comparison.UnsignedHigherThanOrSame: // Always true. goto case ConditionCodeExpression.Comparison.PositiveOrZero; case ConditionCodeExpression.Comparison.UnsignedLowerThan: // Always false. goto case ConditionCodeExpression.Comparison.PositiveOrZero; //--// case ConditionCodeExpression.Comparison.SignedGreaterThanOrEqual: cond = ConditionCodeExpression.Comparison.PositiveOrZero; goto case ConditionCodeExpression.Comparison.PositiveOrZero; case ConditionCodeExpression.Comparison.SignedLessThan: cond = ConditionCodeExpression.Comparison.Negative; goto case ConditionCodeExpression.Comparison.Negative; case ConditionCodeExpression.Comparison.SignedGreaterThan: case ConditionCodeExpression.Comparison.SignedLessThanOrEqual: // // We have to use the full 64bit comparison. // break; } } if(fOk == false) { // // We are comparing two variables. // // For Zero/NonZero and all the unsigned comparisons, // first compare the high parts and then compare the low ones, if the high parts are equal. // // For signed: // >= do a subtraction and set the result if SignedGreaterThanOrEqual. // < do a subtraction and set the result if SignedLessThan. // <= is like >= with the arguments swapped. // > is like < with the arguments swapped. // switch(cond) { case ConditionCodeExpression.Comparison.Equal : case ConditionCodeExpression.Comparison.NotEqual : case ConditionCodeExpression.Comparison.UnsignedHigherThanOrSame: case ConditionCodeExpression.Comparison.UnsignedLowerThan : case ConditionCodeExpression.Comparison.UnsignedHigherThan : case ConditionCodeExpression.Comparison.UnsignedLowerThanOrSame : { CompareOperator opHigh = CompareOperator.New( debugInfo, lhs, rhsLeftHi, rhsRightHi ); opLong.AddOperatorBefore( opHigh ); ConditionalCompareOperator opLow = ConditionalCompareOperator.New( debugInfo, ConditionCodeExpression.Comparison.Equal, lhs, rhsLeftLo, rhsRightLo, lhs ); opLong.AddOperatorBefore( opLow ); opLong.Delete(); } fOk = true; break; case ConditionCodeExpression.Comparison.SignedGreaterThanOrEqual: { VariableExpression tmpLo = cfg.AllocatePseudoRegister( rhsLeftLo.Type ); VariableExpression tmpHi = cfg.AllocatePseudoRegister( rhsLeftHi.Type ); var opLow = BinaryOperatorWithCarryOut.New( debugInfo, BinaryOperator.ALU.SUB, true, false, tmpLo, lhs, rhsLeftLo, rhsRightLo ); opLong.AddOperatorBefore( opLow ); var opHigh = BinaryOperatorWithCarryInAndOut.New( debugInfo, BinaryOperator.ALU.SUB, true, false, tmpHi, lhs, rhsLeftHi, rhsRightHi, lhs ); opLong.AddOperatorBefore( opHigh ); opLong.Delete(); cond = ConditionCodeExpression.Comparison.SignedGreaterThanOrEqual; } fOk = true; break; case ConditionCodeExpression.Comparison.SignedLessThan: { VariableExpression tmpLo = cfg.AllocatePseudoRegister( rhsLeftLo.Type ); VariableExpression tmpHi = cfg.AllocatePseudoRegister( rhsLeftHi.Type ); var opLow = BinaryOperatorWithCarryOut.New( debugInfo, BinaryOperator.ALU.SUB, true, false, tmpLo, lhs, rhsLeftLo, rhsRightLo ); opLong.AddOperatorBefore( opLow ); var opHigh = BinaryOperatorWithCarryInAndOut.New( debugInfo, BinaryOperator.ALU.SUB, true, false, tmpHi, lhs, rhsLeftHi, rhsRightHi, lhs ); opLong.AddOperatorBefore( opHigh ); opLong.Delete(); cond = ConditionCodeExpression.Comparison.SignedLessThan; } fOk = true; break; case ConditionCodeExpression.Comparison.SignedGreaterThan: { Expression tmpLo; Expression tmpHi; tmpLo = rhsLeftLo ; tmpHi = rhsLeftHi ; rhsLeftLo = rhsRightLo; rhsLeftHi = rhsRightHi; rhsRightLo = tmpLo ; rhsRightHi = tmpHi ; goto case ConditionCodeExpression.Comparison.SignedLessThan; } case ConditionCodeExpression.Comparison.SignedLessThanOrEqual: { Expression tmpLo; Expression tmpHi; tmpLo = rhsLeftLo ; tmpHi = rhsLeftHi ; rhsLeftLo = rhsRightLo; rhsLeftHi = rhsRightHi; rhsRightLo = tmpLo ; rhsRightHi = tmpHi ; goto case ConditionCodeExpression.Comparison.SignedGreaterThanOrEqual; } } } if(condOrig != cond) { if(use is ConditionalCompareOperator) { var ccOp = (ConditionalCompareOperator)use; ccOp.Condition = cond; } else if(use is ConditionCodeConditionalControlOperator) { var ccOp = (ConditionCodeConditionalControlOperator)use; ccOp.Condition = cond; } else if(use is SetIfConditionIsTrueOperator) { var setOp = (SetIfConditionIsTrueOperator)use; setOp.Condition = cond; } } } } break; default: throw TypeConsistencyErrorException.Create( "Unexpected form of '{0}' with multiple uses for the condition code", opLong ); } if(fOk == false) { throw TypeConsistencyErrorException.Create( "Unexpected form of '{0}'", opLong ); } } } #endif // ENABLE_LOW_LEVEL_OPTIMIZATIONS #if ALLOW_SSA_FORM [CompilationSteps.OptimizationHandler(RunOnce = true, RunInSSAForm = true)] #else // ALLOW_SSA_FORM [CompilationSteps.OptimizationHandler(RunOnce = true)] #endif // ALLOW_SSA_FORM private static void ReduceComparisons(PhaseExecution.NotificationContext nc) { ControlFlowGraphStateForCodeTransformation cfg = nc.CurrentCFG; Operator[][] defChains = cfg.DataFlow_DefinitionChains; Operator[][] useChains = cfg.DataFlow_UseChains; // Replace split compare + branch operators with CompareConditionalControlOperator. foreach (var branchOp in cfg.FilterOperators<BinaryConditionalControlOperator>()) { var comparand = branchOp.FirstArgument as VariableExpression; if (comparand == null) { continue; } var compareOp = ControlFlowGraphState.CheckSingleDefinition(defChains, comparand) as CompareAndSetOperator; if (compareOp == null) { continue; } var conditionalOp = CompareConditionalControlOperator.New( branchOp.DebugInfo, compareOp.Condition, compareOp.Signed, compareOp.FirstArgument, compareOp.SecondArgument, branchOp.TargetBranchNotTaken, branchOp.TargetBranchTaken); branchOp.SubstituteWithOperator(conditionalOp, Operator.SubstitutionFlags.Default); nc.MarkAsModified(); } } //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// #if ALLOW_SSA_FORM [CompilationSteps.OptimizationHandler(RunOnce = true, RunInSSAForm = true)] #else // ALLOW_SSA_FORM [CompilationSteps.OptimizationHandler(RunOnce = true)] #endif // ALLOW_SSA_FORM private static void RemoveRedundantConversions( PhaseExecution.NotificationContext nc ) { ControlFlowGraphStateForCodeTransformation cfg = nc.CurrentCFG; Operator[][] defChains = cfg.DataFlow_DefinitionChains; Operator[][] useChains = cfg.DataFlow_UseChains; cfg.ResetCacheCheckpoint( ); foreach(var opConv in cfg.FilterOperators< ConversionOperator >()) { var lhs = opConv.FirstResult; var rhs = opConv.FirstArgument; if(rhs is VariableExpression) { var def = ControlFlowGraphState.CheckSingleDefinition( defChains, (VariableExpression)rhs ); bool fSubstitute = false; var defLoad = def as LoadIndirectOperator; if(defLoad != null) { TypeRepresentation tdSrc = defLoad.Type; if(tdSrc.SizeOfHoldingVariable == opConv.SignificantSize) { if((opConv is ZeroExtendOperator && tdSrc.IsSigned == false) || (opConv is SignExtendOperator && tdSrc.IsSigned == true ) ) { fSubstitute = true; } } } else if(def is BinaryOperator) { var defBinOp = (BinaryOperator)def; switch(defBinOp.Alu) { case BinaryOperator.ALU.AND: defBinOp.EnsureConstantToTheRight(); var ex = defBinOp.SecondArgument as ConstantExpression; ulong val; if(ex != null && ex.GetAsUnsignedInteger( out val ) && val != 0) { int msb = BitVector.GetPositionOfLastBitSet( val ); uint max = opConv.SignificantSize * 8; // // Masking by a constant smaller than the destination size. // if((opConv is ZeroExtendOperator && (msb < max )) || (opConv is SignExtendOperator && (msb < max - 1)) ) { fSubstitute = true; } } break; } } if(fSubstitute) { foreach(Operator use in useChains[lhs.SpanningTreeIndex]) { use.SubstituteUsage( lhs, rhs ); } } } foreach(Operator use in useChains[lhs.SpanningTreeIndex]) { var useStore = use as StoreIndirectOperator; if(useStore != null) { if(opConv.SignificantSize >= useStore.Type.SizeOfHoldingVariable) { use.SubstituteUsage( lhs, rhs ); } } } } cfg.AssertNoCacheRefreshSinceCheckpoint( ); } #if ENABLE_LOW_LEVEL_OPTIMIZATIONS //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// //--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--//--// [CompilationSteps.OptimizationHandler(RunOnce = false, RunInSSAForm = true)] private static void ConstantMemoryDereference( PhaseExecution.NotificationContext nc ) { ControlFlowGraphStateForCodeTransformation cfg = nc.CurrentCFG; TypeSystemForCodeTransformation ts = nc.TypeSystem; foreach(var opLoad in cfg.FilterOperators< LoadIndirectOperator >()) { var debugInfo = opLoad.DebugInfo; var lhs = opLoad.FirstResult; var rhs = opLoad.FirstArgument as ConstantExpression; if(rhs != null) { var dd = rhs.Value as DataManager.DataDescriptor; if(dd != null) { var val2 = dd.GetDataAtOffset( opLoad.AccessPath, 0, opLoad.Offset ); var ddVal = val2 as DataManager.DataDescriptor; if(ddVal != null && ddVal.CanPropagate) { opLoad.SubstituteWithOperator( SingleAssignmentOperator.New( debugInfo, lhs, ts.CreateConstant( lhs.Type, val2 ) ), Operator.SubstitutionFlags.Default ); continue; } } } } } #endif // ENABLE_LOW_LEVEL_OPTIMIZATIONS } }
/* New BSD License ------------------------------------------------------------------------------- Copyright (c) 2006-2012, EntitySpaces, LLC All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the EntitySpaces, LLC nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL EntitySpaces, LLC BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ------------------------------------------------------------------------------- */ using System; using System.Data; using System.Collections.Generic; using System.Runtime.Serialization; using Tiraggo.DynamicQuery; using System.ComponentModel; namespace Tiraggo.Interfaces { /// <summary> /// Used internally by EntitySpaces in support of the DynamicQuery Prefetch logic. This is passed to each /// esPrefetchDelegate delegate /// </summary> public class tgPrefetchParameters { /// <summary> /// The Root query when the Prefetch path is complete /// </summary> public tgDynamicQuery Root; /// <summary> /// In the esPrefetchDelegate if this is non-null this is "you" so don't create your Query, it was created in the /// esPrefetchDelegate just before you were called /// </summary> public tgDynamicQuery You; /// <summary> /// Used by the prefetch logic, alias = "a" + Alias++.ToString() /// </summary> public int Alias = 1; public string NextAlias() { return "a" + Alias++.ToString(); } } /// <summary> /// Used internally by EntitySpaces in support of the DynamicQuery Prefetch logic /// </summary> public class tgPrefetchMap { /// <summary> /// The Query for the Prefetch, only on the root tgPrefetchMap will contain a query /// </summary> public tgDynamicQuery Query; /// <summary> /// The Query for the Prefetch, only on the root tgPrefetchMap will contain a DataTable (after the call to Query.Load()) /// </summary> public DataTable Table; /// <summary> /// The Prefetch delegate itself. This is used when building the query /// </summary> public tgPrefetchDelegate PrefetchDelegate; /// <summary> /// The Property name this prefetch Property, ie, "OrdersCollectionByEmployeeID" /// </summary> public string PropertyName; /// <summary> /// The name of the column in this entity /// </summary> public string MyColumnName; /// <summary> /// The name of the column in the parent entity /// </summary> public string ParentColumnName; /// <summary> /// If this is a SubPath /// </summary> public string Path; /// <summary> /// True if the foreignkey is a composite key /// </summary> public bool IsMultiPartKey; } /// <summary> /// A Prefetch delegate map entry used when building a Prefetch path /// </summary> /// <param name="data">Used to pass state to each esPrefetchDelegate delegate as the query is being created</param> public delegate void tgPrefetchDelegate(tgPrefetchParameters data); /// <summary> /// This provides the Dynamic Query mechanism used by your Business object (Employees), /// collection (EmployeesCollection), and query caching (EmployeesQuery). /// </summary> /// <example> /// DynamicQuery allows you to (without writing any stored procedures) /// query your database on the fly. All selection criteria are passed in /// via Parameters (SAParameter, OleDbParameter) in order to prevent /// sql injection techniques often attempted by hackers. /// Additional examples are provided here: /// <code> /// http://www.entityspaces.net/portal/QueryAPISamples/tabid/80/Default.aspx /// </code> /// <code> /// EmployeesCollection emps = new EmployeesCollection; /// /// emps.Query.es.CountAll = true; /// emps.Query.Select /// ( /// emps.Query.LastName, /// emps.Query.FirstName /// ) /// .Where /// ( /// (emps.Query.LastName.Like("%A%") || emps.Query.LastName.Like("%O%")) && /// emps.Query.BirthDate.Between("1940-01-01", "2006-12-31") /// ) /// .GroupBy /// ( /// emps.Query.LastName, /// emps.Query.FirstName /// ) /// .OrderBy /// ( /// emps.Query.LastName.Descending, /// emps.Query.FirstName.Ascending /// ); /// /// emps.Query.Load(); /// </code> /// </example> //[Serializable] public class tgDynamicQuery : tgDynamicQuerySerializable { /// <summary> /// The Constructor /// </summary> public tgDynamicQuery() { } /// <summary> /// The Constructor used when using this query in a "Join" /// </summary> /// <param name="joinAlias">The alias of the associated Table to be used in the "Join"</param> public tgDynamicQuery(string joinAlias) { iData.JoinAlias = joinAlias; } /// <summary> /// Read-only metadata for the entity. /// </summary> /// <remarks> /// The sample below loops through the <see cref="tgColumnMetadataCollection"/> in provided /// by the <see cref="IMetadata"/> interface. There is a lot of useful information here, in fact, /// there is enough information for EntitySpaces to build all of the dynamic sql required during /// operations that use dynamic sql. /// <code> /// public partial class Employees : esEmployees /// { /// public void CustomMethod() /// { /// foreach(tgColumnMetadata col in this.Meta.Columns) /// { /// if(col.IsInPrimaryKey) /// { /// // do something ... /// } /// } /// } /// } /// </code> /// </remarks> /// <seealso cref="tgColumnMetadata"/> /// <seealso cref="tgColumnMetadataCollection"/> /// <seealso cref="tgProviderSpecificMetadata"/> virtual protected IMetadata Meta { get { return null; } } /// <summary> /// Called whenever the Entity needs a connection. This can be used to override the default connection /// per object manually, or automatically by filling in the "Connection Name" on the "Generated Master" /// template. /// </summary> /// <returns></returns> override protected string GetConnectionName() { return null; } /// <summary> /// /// </summary> private void AssignProviderMetadata(tgDynamicQuerySerializable query, List<tgDynamicQuerySerializable> beenThere) { if (beenThere.Contains(query)) return; beenThere.Add(query); tgDynamicQuery theQuery = query as tgDynamicQuery; IDynamicQuerySerializableInternal iQuery = query as IDynamicQuerySerializableInternal; if (theQuery != null) { tgConnection conn = theQuery.tg2.Connection; if (iQuery.ProviderMetadata == null) { tgProviderSpecificMetadata providerMetadata = theQuery.Meta.GetProviderMetadata(conn.ProviderMetadataKey); iQuery.DataID = theQuery.Meta.DataID; iQuery.Columns = theQuery.Meta.Columns; iQuery.ProviderMetadata = providerMetadata; } iQuery.Catalog = conn.Catalog; iQuery.Schema = conn.Schema; } // This code is for proxies as they are unable to work with column and provider metadata // until serialized back to the server if (iQuery.SelectAll) { foreach (tgColumnMetadata col in (tgColumnMetadataCollection)iQuery.Columns) { tgQueryItem item = new tgQueryItem(this, col.Name, col.esType); query.Select(item); } } else { List<tgQueryItem> columns = iQuery.SelectAllExcept; if (columns != null) { foreach (tgColumnMetadata col in (tgColumnMetadataCollection)iQuery.Columns) { bool found = false; for (int i = 0; i < columns.Count; i++) { if (col.Name == (string)columns[i]) { found = true; break; } } if (found) continue; tgExpression item = new tgQueryItem(this, col.Name, col.esType); query.Select(item); } } } foreach (tgDynamicQuerySerializable subQuery in iQuery.queries.Values) { AssignProviderMetadata(subQuery, beenThere); } if (iQuery.InternalSetOperations != null) { foreach (tgSetOperation setOperation in iQuery.InternalSetOperations) { AssignProviderMetadata(setOperation.Query, beenThere); } } } /// <summary> /// This is called when DynamicQuery.Load() is called, /// this occurs when code like this is executed: emps.Query.Load(), /// by default this method does nothing. /// When overriding you don't use the internal keyword. /// </summary> /// <param name="Query">The DynamicQuery that was just loaded</param> /// <param name="table">The DataTable as passed into Query.Load()</param> /// <returns>True if at least one record was loaded</returns> public delegate bool QueryLoadedDelegate(tgDynamicQuery Query, DataTable table); /// <summary> /// This is called when DynamicQuery.Load() is called, /// this occurs when code like this is executed: emps.Query.Load(), /// by default this method does nothing. /// When overriding you don't use the internal keyword. /// </summary> [OptionalFieldAttribute] public QueryLoadedDelegate OnLoadDelegate; /// <summary> /// This initializes the tgDataRequest for the query. /// </summary> /// <param name="request">The request to populate.</param> protected void PopulateRequest(tgDataRequest request) { IMetadata meta = this.Meta; tgConnection conn = this.tg2.Connection; tgProviderSpecificMetadata providerMetadata = meta.GetProviderMetadata(conn.ProviderMetadataKey); IDynamicQuerySerializableInternal iQuery = this as IDynamicQuerySerializableInternal; if ((this.queries != null && this.queries.Count > 0) || iQuery.InternalSetOperations != null) { AssignProviderMetadata(this, new List<tgDynamicQuerySerializable>()); } string catalog = conn.Catalog; string schema = conn.Schema; iData.Catalog = catalog; iData.Schema = schema; iData.DataID = meta.DataID; iData.ProviderMetadata = providerMetadata; iData.Columns = meta.Columns; request.ConnectionString = conn.ConnectionString; request.CommandTimeout = conn.CommandTimeout; request.QueryType = tgQueryType.DynamicQuery; request.DynamicQuery = this; request.DataID = meta.DataID; request.ProviderMetadata = providerMetadata; request.Catalog = catalog; request.Schema = schema; request.Columns = meta.Columns; if (this.m_selectAll) { _selectAll(); } if (tg.QuerySource == null || tg.QuerySource.Length == 0) { tg.QuerySource = providerMetadata.Source; } } #region Select Processing private void _selectAll() { if (this.m_selectAll) { foreach (tgColumnMetadata col in this.Meta.Columns) { tgQueryItem item = new tgQueryItem(this, col.Name, col.esType); this.Select(item); } this.m_selectAll = false; } } #endregion #region Load private void FixupSerializedQueries() { if (m_selectAll) { SelectAll(); } else if (m_selectAllExcept != null) { SelectAllExcept(m_selectAllExcept.ToArray()); } HookupWithNoLock(this); } /// <summary> /// Execute the Query and loads your BusinessEntity. /// If you need to be notified that this is being called /// override BusinessEntity.Query.OnLoadEvent(). /// </summary> /// <remarks> /// The default conjunction is AND. /// You can change the default conjunction this way: /// <code> /// emps.Query.es.DefaultConjunction = tgConjunction.Or; /// </code> /// </remarks> /// <returns>True if at least one record was loaded</returns> virtual public bool Load() { bool loaded = false; DataTable table = null; FixupSerializedQueries(); tgDataRequest request = new tgDataRequest(); this.PopulateRequest(request); tgDataProvider provider = new tgDataProvider(); tgDataResponse response = provider.esLoadDataTable(request, this.tg2.Connection.ProviderSignature); table = response.Table; if (prefetchMaps != null) { foreach (tgPrefetchMap map in prefetchMaps) { // Give our Prefetch Queries the proper connection strings if (!map.Query.tg2.HasConnection) { string generatedName = this.GetConnectionName(); if (generatedName != null) { // Use the connection name typed into the generated master when they // generated the code map.Query.tg2.Connection.Name = generatedName; } else { // Use the connection from the Collection/Entity at the time they // call Load() map.Query.tg2.Connection.Name = this.connection.Name; } } map.Table = map.Query.LoadDataTable(); } } if (this.OnLoadDelegate != null) { loaded = OnLoadDelegate(this, table); } return loaded; } /// <summary> /// This merely parses and returns the SQL Syntax, no SQL is executed. /// </summary> /// <remarks> /// The default conjunction is AND. /// You can change the default conjunction this way: /// <code> /// emps.Query.es.DefaultConjunction = tgConjunction.Or; /// </code> /// </remarks> /// <returns>The SQL Syntax, the same as query.es.LastQuery when a query is executed.</returns> virtual public string Parse() { FixupSerializedQueries(); tgDataRequest request = new tgDataRequest(); this.PopulateRequest(request); request.QueryType = tgQueryType.DynamicQueryParseOnly; tgDataProvider provider = new tgDataProvider(); tgDataResponse response = provider.esLoadDataTable(request, this.tg2.Connection.ProviderSignature); return response.LastQuery; } /// <summary> /// Execute the Query and load a DataTable. /// </summary> /// <returns>A DataTable containing the loaded records.</returns> virtual public DataTable LoadDataTable() { DataTable table = null; FixupSerializedQueries(); tgDataRequest request = new tgDataRequest(); this.PopulateRequest(request); tgDataProvider provider = new tgDataProvider(); tgDataResponse response = provider.esLoadDataTable(request, this.tg2.Connection.ProviderSignature); table = response.Table; return table; } /// <summary> /// Execute the query and return a DataReader. You must use the 'using' syntax or Close the reader /// when finished with it. /// </summary> /// <returns>The DataReader</returns> virtual public IDataReader ExecuteReader() { FixupSerializedQueries(); tgDataRequest request = new tgDataRequest(); this.PopulateRequest(request); tgDataProvider provider = new tgDataProvider(); tgDataResponse response = provider.ExecuteReader(request, this.tg2.Connection.ProviderSignature); return response.DataReader; } /// <summary> /// Execute the query and return a single value. /// </summary> /// <returns>The value</returns> virtual public object ExecuteScalar() { FixupSerializedQueries(); tgDataRequest request = new tgDataRequest(); this.PopulateRequest(request); tgDataProvider provider = new tgDataProvider(); tgDataResponse response = provider.ExecuteScalar(request, this.tg2.Connection.ProviderSignature); return response.Scalar; } /// <summary> /// Execute the query and return a single value. /// </summary> /// <returns>The value</returns> virtual public T ExecuteScalar<T>() { FixupSerializedQueries(); tgDataRequest request = new tgDataRequest(); this.PopulateRequest(request); tgDataProvider provider = new tgDataProvider(); tgDataResponse response = provider.ExecuteScalar(request, this.tg2.Connection.ProviderSignature); return (T)response.Scalar; } #endregion #region Prefetch Support /// <summary> /// Used when Prefetching data. This is called once per each prefetched set of data /// </summary> /// <remarks> /// The code below loads EmployeeID number 1, and prefetches it's Orders and OrderDetail records. /// We could also load many Employees via a Collection and prefetch their Orders and OrderDetail records. /// <code> /// // The Main Query /// EmployeesQuery q = new EmployeesQuery("e"); /// q.Where(q.EmployeeID == 1); /// /// // The OrdersCollection /// OrdersQuery o = q.Prefetch&lt;OrdersQuery&gt;(Employees.Prefetch_OrdersCollectionByEmployeeID); /// EmployeesQuery emp1 = o.GetQuery&lt;EmployeesQuery&gt;(); /// o.Where(emp1.EmployeeID == 1); /// /// // The OrdersDetailsCollection /// OrderDetailsQuery od = q.Prefetch&lt;OrderDetailsQuery&gt;(Employees.Prefetch_OrdersCollectionByEmployeeID, Orders.Prefetch_OrderDetailsCollectionByOrderID); /// EmployeesQuery emp2 = od.GetQuery&lt;EmployeesQuery&gt;(); /// od.Where(emp2.EmployeeID == 1); /// /// // Load It /// Employees emp = new Employees(); /// emp.Load(q); /// </code> /// </remarks> /// <typeparam name="T">The Type of the esDynamicQuery returned</typeparam> /// <param name="maps">The Path to the data</param> /// <returns>The esDynamicQuery for the Type of query you intend to load</returns> public T Prefetch<T>(params tgPrefetchMap[] maps) where T : tgDynamicQuery { return this.Prefetch<T>(true, maps); } /// <summary> /// This Prefetch allows you to fill in the Select() statement for the query to control what columns are brought back /// </summary> /// <typeparam name="T">The Type of the esDynamicQuery returned</typeparam> /// <param name="provideSelect">If true then you must fill in the Query.Select() clause</param> /// <param name="maps">The Path to the data</param> /// <returns>The esDynamicQuery for the Type of query you intend to load</returns> public T Prefetch<T>(bool provideSelect, params tgPrefetchMap[] maps) where T : tgDynamicQuery { if (maps != null) { if (prefetchMaps == null) { prefetchMaps = new List<tgPrefetchMap>(); } tgPrefetchParameters data = new tgPrefetchParameters(); // Create the query, we do so in reverse order for (int i = maps.Length - 1; i >= 0; i--) { tgPrefetchDelegate prefetchDelegate = maps[i].PrefetchDelegate; prefetchDelegate(data); } // The path is the next to the last PropertyName string path = string.Empty; if (maps.Length > 1) { path = maps[maps.Length - 2].PropertyName; } tgPrefetchMap rootMap = maps[maps.Length - 1]; rootMap.Query = data.Root; rootMap.Path = path; prefetchMaps.Add(rootMap); if (provideSelect) { rootMap.Query.Select(rootMap.Query); } return (T)data.Root; } return null; } /// <summary> /// This Prefetch call is a simpler API for when you are not interested in tweaking the Query /// </summary> /// <param name="maps">The Path to the data</param> public void Prefetch(params tgPrefetchMap[] maps) { if (maps != null) { if (prefetchMaps == null) { prefetchMaps = new List<tgPrefetchMap>(); } tgPrefetchParameters data = new tgPrefetchParameters(); // Create the query, we do so in reverse order for (int i = maps.Length - 1; i >= 0; i--) { tgPrefetchDelegate prefetchDelegate = maps[i].PrefetchDelegate; prefetchDelegate(data); } // The path is the next to the last PropertyName string path = string.Empty; if (maps.Length > 1) { path = maps[maps.Length - 2].PropertyName; } tgPrefetchMap rootMap = maps[maps.Length - 1]; rootMap.Query = data.Root; rootMap.Path = path; prefetchMaps.Add(rootMap); rootMap.Query.Select(rootMap.Query); } } #endregion #region Overloads /// <summary> /// This method will create a Select statement for all of the columns in the entity except for the ones passed in. /// This is very useful when you want to eliminate blobs and other fields for performance. /// </summary> /// <param name="columns">The columns which you wish to exclude from the Select statement</param> /// <returns></returns> override public tgDynamicQuerySerializable SelectAllExcept(params tgQueryItem[] columns) { foreach (tgColumnMetadata col in this.Meta.Columns) { bool found = false; for (int i = 0; i < columns.Length; i++) { if (col.Name == (string)columns[i]) { found = true; break; } } if (found) continue; tgExpression item = new tgQueryItem(this, col.Name, col.esType); this.Select(item); } return this; } /// <summary> /// This method will select all of the columns that were present when you generated your /// classes as opposed to doing a SELECT * /// </summary> /// <returns></returns> override public tgDynamicQuerySerializable SelectAll() { foreach (tgColumnMetadata col in this.Meta.Columns) { tgQueryItem item = new tgQueryItem(this, col.Name, col.esType); this.Select(item); } return this; } #endregion #region Helper Routine private List<tgComparison> ProcessWhereItems(tgConjunction conj, params object[] theItems) { List<tgComparison> items = new List<tgComparison>(); items.Add(new tgComparison(tgParenthesis.Open)); bool first = true; tgComparison whereItem; int count = theItems.Length; for (int i = 0; i < count; i++) { object o = theItems[i]; whereItem = o as tgComparison; if (whereItem != null) { if (!first) { items.Add(new tgComparison(conj)); } items.Add(whereItem); first = false; } else { List<tgComparison> listItem = o as List<tgComparison>; if (listItem != null) { if (!first) { items.Add(new tgComparison(conj)); } items.AddRange(listItem); first = false; } else { throw new Exception("Unsupported Type"); } } } items.Add(new tgComparison(tgParenthesis.Close)); return items; } #endregion Helper Routine #region es2 /// <summary> /// This is to help hide some details from Intellisense. /// </summary> public DynamicQueryProps tg2 { get { if (this.props == null) { this.props = new DynamicQueryProps(this); } return this.props; } } [NonSerialized] private DynamicQueryProps props; /// <summary> /// The Dynamic Query properties. /// </summary> public new class DynamicQueryProps { /// <summary> /// The Dynamic Query properties. /// </summary> /// <param name="query">The esDynamicQuery's properties.</param> public DynamicQueryProps(tgDynamicQuery query) { this.dynamicQuery = query; } /// <summary> /// Returns true if this Query has been assigned it's own Connection /// </summary> public bool HasConnection { get { return this.dynamicQuery.connection != null; } } /// <summary> /// Used only to override the IConnectionNameService API when in use /// </summary> /// <param name="connectionName">The Name of the connection</param> /// <returns>The newly created tgConnection Connection</returns> [EditorBrowsable(EditorBrowsableState.Never)] public tgConnection ConnectionServiceOverride(string connectionName) { if (this.dynamicQuery.connection == null) { this.dynamicQuery.connection = new tgConnection(); this.dynamicQuery.connection.Name = connectionName; } return this.dynamicQuery.connection; } /// <summary> /// tgConnection Connection. /// </summary> public tgConnection Connection { get { if (this.dynamicQuery.connection == null) { this.dynamicQuery.connection = new tgConnection(); if (tgConnection.ConnectionService != null) { this.dynamicQuery.connection.Name = tgConnection.ConnectionService.GetName(); } else { string connName = this.dynamicQuery.GetConnectionName(); if (connName != null) { this.dynamicQuery.connection.Name = connName; } } } return this.dynamicQuery.connection; } set { this.dynamicQuery.connection = value; } } /// <summary> /// Used internally by Entityspaces, do not use this Property /// </summary> public List<tgPrefetchMap> PrefetchMaps { get { return this.dynamicQuery.prefetchMaps; } } private tgDynamicQuery dynamicQuery; } #endregion private IDynamicQuerySerializableInternal iData { get { return this as IDynamicQuerySerializableInternal; } } /// <summary> /// Used when deserializing the queries send from the client side /// </summary> [NonSerialized] static Dictionary<string, IMetadata> metadataDictionary = new Dictionary<string, IMetadata>(); /// <summary> /// This property is in "es2" because the client side queries cannot have connections /// </summary> [NonSerialized] private tgConnection connection; /// <summary> /// Holds the information for each Prefetch property to be loaded /// </summary> [NonSerialized] private List<tgPrefetchMap> prefetchMaps; } }
/* * MindTouch Dream - a distributed REST framework * Copyright (C) 2006-2011 MindTouch, Inc. * www.mindtouch.com oss@mindtouch.com * * For community documentation and downloads visit wiki.developer.mindtouch.com; * please review the licensing section. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.IO; using System.Text; using MindTouch.IO; using MindTouch.Tasking; using MindTouch.Web; using MindTouch.Xml; namespace MindTouch.Dream { using Yield = IEnumerator<IYield>; /// <summary> /// Provides the Dream encapsulations of Http request and response objects. /// </summary> public class DreamMessage { //--- Class Fields --- private static log4net.ILog _log = LogUtils.CreateLog(); //--- Class Methods --- /// <summary> /// New Message with HTTP status: Ok (200). /// </summary> /// <returns>New DreamMessage.</returns> public static DreamMessage Ok() { return new DreamMessage(DreamStatus.Ok, null); } /// <summary> /// New Message with HTTP status: Ok (200). /// </summary> /// <param name="doc">Message body.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Ok(XDoc doc) { return new DreamMessage(DreamStatus.Ok, null, doc); } /// <summary> /// New Message with HTTP status: Ok (200). /// </summary> /// <param name="contentType">Content Mime-Type.</param> /// <param name="doc">Message body.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Ok(MimeType contentType, XDoc doc) { return new DreamMessage(DreamStatus.Ok, null, contentType, doc); } /// <summary> /// New Message with HTTP status: Ok (200). /// </summary> /// <param name="contentType">Content Mime-Type.</param> /// <param name="text">Message body.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Ok(MimeType contentType, string text) { return new DreamMessage(DreamStatus.Ok, null, contentType, text); } /// <summary> /// Obsolete: Use <see cref="Ok(System.Collections.Generic.KeyValuePair{string,string}[])"/> instead. /// </summary> [Obsolete("Use DreamMessage.Ok(KeyValuePair<string, string>[] values) instead.")] public static DreamMessage Ok(XUri uri) { return new DreamMessage(DreamStatus.Ok, null, MimeType.FORM_URLENCODED, uri.Query); } /// <summary> /// New Message with HTTP status: Ok (200). /// </summary> /// <param name="values">Name/value pair body.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Ok(KeyValuePair<string, string>[] values) { return new DreamMessage(DreamStatus.Ok, null, MimeType.FORM_URLENCODED, XUri.RenderParams(values) ?? string.Empty); } /// <summary> /// New Message with HTTP status: Ok (200). /// </summary> /// <param name="contentType">Content Mime-Type.</param> /// <param name="content">Message body.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Ok(MimeType contentType, byte[] content) { return new DreamMessage(DreamStatus.Ok, null, contentType, content); } /// <summary> /// New Message with HTTP status: Ok (200). /// </summary> /// <param name="contentType">Content Mime-Type.</param> /// <param name="contentLength">Content length.</param> /// <param name="content">Message body.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Ok(MimeType contentType, long contentLength, Stream content) { return new DreamMessage(DreamStatus.Ok, null, contentType, contentLength, content); } /// <summary> /// New Message with HTTP status: Created (201). /// </summary> /// <param name="uri">Location of created resource.</param> /// <param name="doc">Message body.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Created(XUri uri, XDoc doc) { DreamMessage result = new DreamMessage(DreamStatus.Created, null, doc); result.Headers.Location = uri; return result; } /// <summary> /// New Message with HTTP status: Not Modified (304). /// </summary> /// <returns>New DreamMessage.</returns> public static DreamMessage NotModified() { return new DreamMessage(DreamStatus.NotModified, null, XDoc.Empty); } /// <summary> /// New Message with HTTP status: Not Found (404). /// </summary> /// <param name="reason">Reason.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage NotFound(string reason) { _log.DebugFormat("Response: Not Found - {0}{1}", reason, DebugOnly_GetRequestPath()); return new DreamMessage(DreamStatus.NotFound, null, GetDefaultErrorResponse(DreamStatus.NotFound, "Not Found", reason)); } /// <summary> /// New Message with HTTP status: Bad Request (400). /// </summary> /// <param name="reason">Reason.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage BadRequest(string reason) { _log.DebugFormat("Response: Bad Request - {0}{1}", reason, DebugOnly_GetRequestPath()); return new DreamMessage(DreamStatus.BadRequest, null, GetDefaultErrorResponse(DreamStatus.BadRequest, "Bad Request", reason)); } /// <summary> /// New Message with HTTP status: Not Implemented (501). /// </summary> /// <param name="reason">Reason.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage NotImplemented(string reason) { _log.DebugFormat("Response: Not Implemented - {0}{1}", reason, DebugOnly_GetRequestPath()); return new DreamMessage(DreamStatus.NotImplemented, null, GetDefaultErrorResponse(DreamStatus.NotImplemented, "Not Implemented", reason)); } /// <summary> /// New Message with HTTP status: Conflict (409). /// </summary> /// <param name="doc">Message body.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Conflict(XDoc doc) { _log.DebugMethodCall("Response: Conflict"); return new DreamMessage(DreamStatus.Conflict, null, doc); } /// <summary> /// New Message with HTTP status: Conflict (409). /// </summary> /// <param name="reason">Reason.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Conflict(string reason) { _log.DebugFormat("Response: Conflict - {0}{1}", reason, DebugOnly_GetRequestPath()); return new DreamMessage(DreamStatus.Conflict, null, GetDefaultErrorResponse(DreamStatus.Conflict, "Conflict", reason)); } /// <summary> /// New Message with HTTP status: Found (302) /// </summary> /// <param name="uri">Redirect target.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Redirect(XUri uri) { DreamMessage result = new DreamMessage(DreamStatus.Found, null, XDoc.Empty); result.Headers.Location = uri; return result; } /// <summary> /// New Message with HTTP status: Unauthorized (401) /// </summary> /// <param name="accessRealm">Access Realm.</param> /// <param name="reason">Reason.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage AccessDenied(string accessRealm, string reason) { _log.DebugFormat("Response: Unauthorized - {0}{1}", reason, DebugOnly_GetRequestPath()); DreamMessage result = new DreamMessage(DreamStatus.Unauthorized, null, GetDefaultErrorResponse(DreamStatus.Unauthorized, "Unauthorized", reason)); result.Headers.Authenticate = string.Format("Basic realm=\"{0}\"", accessRealm); return result; } /// <summary> /// New Message with HTTP status: LicenseRequired (402) /// </summary> /// <param name="reason">Reason.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage LicenseRequired(string reason) { _log.DebugFormat("Response: LicenseRequired - {0}{1}", reason, DebugOnly_GetRequestPath()); return new DreamMessage(DreamStatus.LicenseRequired, null, GetDefaultErrorResponse(DreamStatus.LicenseRequired, "LicenseRequired", reason)); } /// <summary> /// New Message with HTTP status: Forbidden (403) /// </summary> /// <param name="reason">Reason.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage Forbidden(string reason) { _log.DebugFormat("Response: Forbidden - {0}{1}", reason, DebugOnly_GetRequestPath()); return new DreamMessage(DreamStatus.Forbidden, null, GetDefaultErrorResponse(DreamStatus.Forbidden, "Forbidden", reason)); } /// <summary> /// New Message with HTTP status: Method Not Allowed (405) /// </summary> /// <param name="allowedMethods">Array of allowed request Verbs.</param> /// <param name="reason">Reason.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage MethodNotAllowed(string[] allowedMethods, string reason) { _log.DebugFormat("Response: MethodNotAllowed - {0}{1}", reason, DebugOnly_GetRequestPath()); DreamMessage result = new DreamMessage(DreamStatus.MethodNotAllowed, null, GetDefaultErrorResponse(DreamStatus.MethodNotAllowed, "Method Not Allowed", reason)); result.Headers.Allow = string.Join(",", allowedMethods); return result; } /// <summary> /// New Message with HTTP status: Internal Error (500) /// </summary> /// <returns>New DreamMessage.</returns> public static DreamMessage InternalError() { _log.DebugMethodCall("Response: Internal Error"); return new DreamMessage(DreamStatus.InternalError, null, XDoc.Empty); } /// <summary> /// New Message with HTTP status: Internal Error (500) /// </summary> /// <param name="text">Error message.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage InternalError(string text) { _log.DebugMethodCall("Response: Internal Error", text); return new DreamMessage(DreamStatus.InternalError, null, GetDefaultErrorResponse(DreamStatus.InternalError, "Internal Error", text)); } /// <summary> /// New Message with HTTP status: Internal Error (500) /// </summary> /// <param name="e">Exception responsible for internal error.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage InternalError(Exception e) { _log.DebugExceptionMethodCall(e, "Response: Internal Error"); return new DreamMessage(DreamStatus.InternalError, null, MimeType.DREAM_EXCEPTION, (e != null) ? new XException(e) : XDoc.Empty); } /// <summary> /// Create a message from a file. /// </summary> /// <param name="filename">Path to file.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage FromFile(string filename) { return FromFile(filename, false); } /// <summary> /// Create a message from a file. /// </summary> /// <param name="filename">Path to file.</param> /// <param name="omitFileContents">If <see langword="True"/> the contents of the file are omitted.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage FromFile(string filename, bool omitFileContents) { return FromFile(filename, null, null, omitFileContents); } /// <summary> /// Create a message from a file. /// </summary> /// <param name="filename">Path to file.</param> /// <param name="contentType">Mime-Type of message.</param> /// <param name="displayName">File name to emit.</param> /// <param name="omitFileContents">If <see langword="True"/> the contents of the file are omitted.</param> /// <returns>New DreamMessage.</returns> public static DreamMessage FromFile(string filename, MimeType contentType, string displayName, bool omitFileContents) { if(contentType == null) { contentType = MimeType.FromFileExtension(filename); } DreamMessage result; if(omitFileContents) { result = new DreamMessage(DreamStatus.Ok, null, contentType, new FileInfo(filename).Length, Stream.Null); } else { FileStream stream = File.OpenRead(filename); result = new DreamMessage(DreamStatus.Ok, null, contentType, stream.Length, stream); } if((displayName != null) && !StringUtil.EqualsInvariantIgnoreCase(Path.GetFileName(filename), displayName)) { result.Headers.ContentDisposition = new ContentDisposition(true, File.GetLastWriteTimeUtc(filename), null, null, displayName, result.ContentLength); } return result; } /// <summary> /// Create a new message tied to a Stream for streaming data. /// </summary> /// <param name="mime">Content Mime-Type.</param> /// <param name="message">The message to be created.</param> /// <param name="writer">The stream that will supply the streaming data.</param> public static void ForStreaming(MimeType mime, out DreamMessage message, out Stream writer) { Stream reader; StreamUtil.CreatePipe(out writer, out reader); message = Ok(mime, -1, reader); } /// <summary> /// Get a status string from a DreamMessage or null, or null, if the message is null. /// </summary> /// <param name="message">A DreamMessage instance or null.</param> /// <returns>The <see cref="Status"/> as an information string message if a non-null message was provide, or null otherwise.</returns> public static string GetStatusStringOrNull(DreamMessage message) { if(message != null) { return string.Format("HTTP Status: {0}({1})", message.Status, (int)message.Status); } return null; } private static XDoc GetDefaultErrorResponse(DreamStatus status, string title, string message) { XDoc result = new XDoc("error"); DreamContext context = DreamContext.CurrentOrNull; if((context != null) && (context.Env.Self != null)) { result.WithXslTransform(context.AsPublicUri(context.Env.Self).At("resources", "error.xslt").Path); } result.Elem("status", (int)status).Elem("title", title).Elem("message", message); if(context != null) { result.Elem("uri", context.Uri); } return result; } private static string DebugOnly_GetRequestPath() { if(!_log.IsDebugEnabled) { return null; } DreamContext context = DreamContext.CurrentOrNull; if(context == null) { return null; } return ", path: " + context.Uri.Path; } //--- Fields --- /// <summary> /// Http Status of message. /// </summary> public readonly DreamStatus Status; /// <summary> /// Message Http header collection. /// </summary> public readonly DreamHeaders Headers; private readonly bool _noContent; private XDoc _doc; private byte[] _bytes; private Stream _stream; private bool _streamOpen; private System.Diagnostics.StackTrace _stackTrace = DebugUtil.GetStackTrace(); //--- Constructors --- /// <summary> /// Create a new message. /// </summary> /// <param name="status">Http status.</param> /// <param name="headers">Header collection.</param> public DreamMessage(DreamStatus status, DreamHeaders headers) { this.Status = status; this.Headers = new DreamHeaders(headers); _bytes = new byte[0]; _noContent = true; } /// <summary> /// Create a new message. /// </summary> /// <param name="status">Http status.</param> /// <param name="headers">Header collection.</param> /// <param name="contentType">Content Mime-Type.</param> /// <param name="doc">Message body.</param> public DreamMessage(DreamStatus status, DreamHeaders headers, MimeType contentType, XDoc doc) { if(doc == null) { throw new ArgumentNullException("doc"); } this.Status = status; this.Headers = new DreamHeaders(headers); // check if document is empty if(doc.IsEmpty) { // we store empty XML documents as text content; it causes less confusion for browsers this.Headers.ContentType = MimeType.TEXT; this.Headers.ContentLength = 0L; _doc = doc; _bytes = new byte[0]; } else { this.Headers.ContentType = contentType ?? MimeType.XML; _doc = doc.Clone(); } } /// <summary> /// Create a new message. /// </summary> /// <param name="status">Http status.</param> /// <param name="headers">Header collection.</param> /// <param name="doc">Message body.</param> public DreamMessage(DreamStatus status, DreamHeaders headers, XDoc doc) : this(status, headers, MimeType.XML, doc) { } /// <summary> /// Create a new message. /// </summary> /// <param name="status">Http status.</param> /// <param name="headers">Header collection.</param> /// <param name="contentType">Content Mime-Type</param> /// <param name="contentLength">Content byte langth</param> /// <param name="stream">Stream to uas as the source for the message's content.</param> public DreamMessage(DreamStatus status, DreamHeaders headers, MimeType contentType, long contentLength, Stream stream) { this.Status = status; this.Headers = new DreamHeaders(headers); if(contentLength != -1) { this.Headers.ContentLength = contentLength; } this.Headers.ContentType = contentType ?? MimeType.DefaultMimeType; // set stream _stream = stream ?? Stream.Null; _streamOpen = !_stream.IsStreamMemorized(); } /// <summary> /// Create a new message. /// </summary> /// <param name="status">Http status.</param> /// <param name="headers">Header collection.</param> /// <param name="contentType">Content Mime-Type.</param> /// <param name="bytes">Message body.</param> public DreamMessage(DreamStatus status, DreamHeaders headers, MimeType contentType, byte[] bytes) { if(bytes == null) { throw new ArgumentNullException("bytes"); } this.Status = status; this.Headers = new DreamHeaders(headers); this.Headers.ContentLength = bytes.LongLength; this.Headers.ContentType = contentType ?? MimeType.DefaultMimeType; // set bytes _bytes = bytes; } /// <summary> /// Create a new message. /// </summary> /// <param name="status">Http status.</param> /// <param name="headers">Header collection.</param> /// <param name="contentType">Content Mime-Type.</param> /// <param name="text">Message body.</param> public DreamMessage(DreamStatus status, DreamHeaders headers, MimeType contentType, string text) : this(status, headers, contentType, contentType.CharSet.GetBytes(text)) { } #if DEBUG /// <summary> /// Finalizer for DreamMessage to warn and possibly throw an exception if a message with an open stream reaches garbage collection. /// </summary> ~DreamMessage() { if(_streamOpen) { _log.WarnMethodCall("message stream was not closed", _stackTrace); if(_stackTrace != null) { throw new Exception("message stream was not closed: " + _stackTrace); } } } #endif //--- Properties --- /// <summary> /// <see langword="True"/> if the Status indicates a successful response. /// </summary> /// <remarks>Requests are always marked as successful. Only responses use the status to convey information.</remarks> public bool IsSuccessful { get { return (Status >= DreamStatus.Ok) && (Status < DreamStatus.MultipleChoices); } } /// <summary> /// Message Content Mime-Type. /// </summary> public MimeType ContentType { get { return Headers.ContentType ?? MimeType.DefaultMimeType; } } /// <summary> /// Message contains cookies. /// </summary> public bool HasCookies { get { return Headers.HasCookies; } } /// <summary> /// Cookies. /// </summary> public List<DreamCookie> Cookies { get { return Headers.Cookies; } } /// <summary> /// Content Disposition Header. /// </summary> public ContentDisposition ContentDisposition { get { return Headers.ContentDisposition; } } /// <summary> /// <see langword="True"/> if the underlying content stream is closed. /// </summary> public bool IsClosed { get { return (_doc == null) && (_stream == null) && (_bytes == null); } } /// <summary> /// Total number of bytes in message. /// </summary> public long ContentLength { get { long? result = Headers.ContentLength; if(result != null) { return result.Value; } if(IsClosed) { return 0; } else if(_bytes != null) { return _bytes.LongLength; } else if(_stream.IsStreamMemorized()) { return _stream.Length; } return -1; } } /// <summary> /// <see langword="True"/> if the message content can be retrieved as an <see cref="XDoc"/> instance. /// </summary> public bool HasDocument { get { if(_doc == null) { MimeType mime = ContentType; return mime.IsXml || mime.Match(MimeType.FORM_URLENCODED); } return true; } } /// <summary> /// Can this message be clone? /// </summary> /// <remarks>In general only false for closed messages and messages with non-memorized streams.</remarks> public bool IsCloneable { get { return !IsClosed && (_stream == null || _stream == Stream.Null || _stream.IsStreamMemorized()); } } //--- Methods --- /// <summary> /// Get the message body as a document. /// </summary> /// <returns>XDoc instance.</returns> public XDoc ToDocument() { MakeDocument(); return _doc; } /// <summary> /// Get the message body as a Stream. /// </summary> /// <returns>Content Stream.</returns> public Stream ToStream() { MakeStream(); return _stream; } /// <summary> /// Convert the message body into a byte array. /// </summary> /// <remarks>This method is potentially thread-blocking. Please avoid using it if possible.</remarks> /// <returns>Array of bytes.</returns> #if WARN_ON_SYNC [Obsolete("This method is potentially thread-blocking. Please avoid using it if possible.")] #endif public byte[] ToBytes() { MakeBytes(); return _bytes; } /// <summary> /// Convert the message body to plain text. /// </summary> /// <returns>Content text.</returns> public string ToText() { return ContentType.CharSet.GetString(ToBytes()); } /// <summary> /// Convert the message body to a text reader. /// </summary> /// <returns>New text reader instance.</returns> public TextReader ToTextReader() { return new StreamReader(ToStream(), ContentType.CharSet); } /// <summary> /// Set Caching headers. /// </summary> /// <param name="timestamp">Last modified timestamp.</param> public void SetCacheMustRevalidate(DateTime timestamp) { Headers.CacheControl = "must-revalidate,private"; Headers.Vary = "Accept-Encoding"; Headers.LastModified = timestamp; Headers.ETag = timestamp.ToUniversalTime().ToString("r"); } /// <summary> /// Check if the cache needs ot be re-validated /// </summary> /// <param name="timestamp">Last modified timestamp.</param> /// <returns><see langword="True"/> if the cache needs to be re-validated.</returns> public bool CheckCacheRevalidation(DateTime timestamp) { DateTime rounded = new DateTime(timestamp.Year, timestamp.Month, timestamp.Day, timestamp.Hour, timestamp.Minute, timestamp.Second, timestamp.Kind); // check if an 'If-Modified-Since' header is present DateTime ifModSince = Headers.IfModifiedSince ?? DateTime.MinValue; if(rounded <= ifModSince) { return true; } // check if an 'ETag' header is present string ifNoneMatch = Headers.IfNoneMatch; if(!string.IsNullOrEmpty(ifNoneMatch)) { if(timestamp.ToUniversalTime().ToString("r") == ifNoneMatch) { return true; } } // either there was not validation check or the cached copy is out-of-date return false; } /// <summary> /// Clone the current message. /// </summary> /// <returns>A new message instance.</returns> public DreamMessage Clone() { if(!IsCloneable) { throw new InvalidOperationException("The current message cannot be cloned. It is either closed or contains a payload that cannot be duplicated."); } DreamMessage result; if(_noContent) { result = new DreamMessage(Status, Headers); } else if(_doc != null) { result = new DreamMessage(Status, Headers, _doc.Clone()); } else if(_stream == Stream.Null || (_stream != null && _stream.IsStreamMemorized())) { _stream.Position = 0; var copy = new ChunkedMemoryStream((int)_stream.Length); _stream.CopyTo(copy, _stream.Length); _stream.Position = 0; copy.Position = 0; result = new DreamMessage(Status, Headers, ContentType, ContentLength, copy); } else { var bytes = ToBytes(); result = new DreamMessage(Status, Headers, ContentType, bytes); // length may differ for HEAD requests if(bytes.LongLength != ContentLength) { result.Headers.ContentLength = bytes.LongLength; } } if(HasCookies) { result.Cookies.AddRange(Cookies); } return result; } /// <summary> /// Close any underlying stream on the message. /// </summary> public void Close() { if(_stream != null) { _stream.Close(); _streamOpen = false; } _doc = null; _stream = null; _bytes = null; } /// <summary> /// Memorize the content stream. /// </summary> /// <param name="result">The synchronization handle to return.</param> /// <returns>Synchronization handle for memorization completion.</returns> public Result Memorize(Result result) { return Memorize(-1, result); } /// <summary> /// Memorize the content stream. /// </summary> /// <param name="result">The synchronization handle to return.</param> /// <param name="max">Maximum number of bytes to memorize.</param> /// <returns>Synchronization handle for memorization completion.</returns> public Result Memorize(long max, Result result) { // check if we need to call Memorize_Helper() if((_stream == null) || _stream.IsStreamMemorized()) { // message already contains a document or byte array or a memory stream // we don't need to memorize those result.Return(); return result; } return Coroutine.Invoke(Memorize_Helper, max, result); } private Yield Memorize_Helper(long max, Result result) { // NOTE (steveb): this method is used to load an external stream into memory; this alleviates the problem of streams not being closed for simple operations if(max < 0) { max = long.MaxValue - 1; } // check if we already know that the stream will not fit long length = ContentLength; if(length > max) { // mark stream as closed _stream.Close(); _stream = null; _streamOpen = false; // throw size exceeded exception result.Throw(new InternalBufferOverflowException("message body exceeded max size")); yield break; } if(length < 0) { length = long.MaxValue; } // NOTE: the content-length and body length may differ (e.g. HEAD verb) // copy contents asynchronously var buffer = new ChunkedMemoryStream(); Result<long> res; // TODO (steveb): use WithCleanup() to dispose of resources in case of failure yield return res = _stream.CopyTo(buffer, Math.Min(length, max + 1), new Result<long>(TimeSpan.MaxValue)).Catch(); // mark stream as closed _stream.Close(); _stream = null; _streamOpen = false; // confirm successful outcome for asynchronous operation res.Confirm(); if(buffer.Length > max) { result.Throw(new InternalBufferOverflowException("message body exceeded max size")); yield break; } buffer.Position = 0; _stream = buffer; result.Return(); } /// <summary> /// Convert the message into a string. /// </summary> /// <returns>String.</returns> public override string ToString() { return new XMessage(this).ToString(); } private void MakeDocument() { if(IsClosed) { throw new InvalidOperationException("message has already been closed"); } if(_doc == null) { try { MakeStream(); _doc = XDocFactory.From(_stream, ContentType); if((_doc == null) || _doc.IsEmpty) { throw new InvalidDataException(string.Format("message body with content type '{0}' is not well-formed xml", ContentType)); } } finally { if(_stream != null) { _stream.Close(); _stream = null; _streamOpen = false; } } } } private void MakeStream() { if(IsClosed) { throw new InvalidOperationException("message has already been closed"); } if(_stream == null) { if(_bytes != null) { _stream = new MemoryStream(_bytes, 0, _bytes.Length, true, true); } else { var stream = new ChunkedMemoryStream(); _doc.WriteTo(stream, ContentType.CharSet); stream.Position = 0; _stream = stream; } _streamOpen = false; // NOTE: the content-length and body length may differ (e.g. HEAD verb) // update content-length if it isn't set yet if(Headers.ContentLength == null) { Headers.ContentLength = _stream.Length; } } } private void MakeBytes() { if(IsClosed) { throw new InvalidOperationException("message has already been closed"); } if(_bytes == null) { if(_stream == null) { Encoding encoding = ContentType.CharSet; _bytes = encoding.GetBytes(_doc.ToString(encoding)); } else if(_stream is ChunkedMemoryStream) { _bytes = ((ChunkedMemoryStream)_stream).ToArray(); _stream = null; _streamOpen = false; } else if(_stream is MemoryStream) { _bytes = ((MemoryStream)_stream).ToArray(); _stream = null; _streamOpen = false; } else { // NOTE: the content-length and body length may differ (e.g. HEAD verb) try { var buffer = new MemoryStream(); _stream.CopyTo(buffer, ContentLength); _bytes = buffer.ToArray(); } finally { _stream.Close(); _stream = null; _streamOpen = false; } } } } #region --- Obsolete Methods --- /// <summary> /// AsDocument() is obsolete. Use <see cref="ToDocument"/> instead. /// </summary> [Obsolete("AsDocument() is obsolete. Use ToDocument() instead.")] public XDoc AsDocument() { return ToDocument(); } /// <summary> /// AsStream() is obsolete. Use <see cref="ToStream"/> instead. /// </summary> [Obsolete("AsStream() is obsolete. Use ToStream() instead.")] public Stream AsStream() { return ToStream(); } /// <summary> /// AsBytes() is obsolete. Use <see cref="ToBytes"/> instead. /// </summary> [Obsolete("AsBytes() is obsolete. Use ToBytes() instead.")] public byte[] AsBytes() { return ToBytes(); } /// <summary> /// AsText() is obsolete. Use <see cref="ToText"/> instead. /// </summary> [Obsolete("AsText() is obsolete. Use ToText() instead.")] public string AsText() { return ToText(); } /// <summary> /// AsTextReader() is obsolete. Use <see cref="ToTextReader"/> instead. /// </summary> /// <returns></returns> [Obsolete("AsTextReader() is obsolete. Use ToTextReader() instead.")] public TextReader AsTextReader() { return ToTextReader(); } #endregion } }
using System; using Microsoft.SPOT; using System.Net; using System.Threading; using System.Net.Sockets; using System.Text; namespace uPLibrary.Networking.Ddns { /// <summary> /// EventArgs class for Ip address updated event /// </summary> public class IpAddressUpdatedEventArgs : EventArgs { /// <summary> /// IP Address (null if update failed) /// </summary> public IPAddress IpAddress { get; internal set; } /// <summary> /// Response from Dynamic Dns service provider /// </summary> public DdnsResponse Response { get; internal set; } } public delegate void IpAddressUpdatedHandler(object sender, IpAddressUpdatedEventArgs e); /// <summary> /// Abstract base class for all Dynamic Dnd Clients /// </summary> public abstract class DdnsClient : IDdnsClient { // max dimension for receive buffer protected const int RECEIVE_BUFFER_SIZE = 1024; // path to getting for update IP address protected const string DDNS_UPDATE_IP_PATH = "/nic/update"; protected const string DDNS_CHECK_IP_PATH = "/"; protected const string DDNS_CLIENT_USER_AGENT = "uDdns Client/1.0"; // IP address updated event public event IpAddressUpdatedHandler IpAddressUpdated; // Ddns configuration information protected DdnsConfig ddnsConfig; // timer for periodic check/update IP address private Timer updaterIpTimer; // socket for updating IP address protected Socket updateIpSocket; // socket for checking IP address protected Socket checkIpSocket; // buffers for send/receive data by socket protected byte[] sendBuffer; protected byte[] receiveBuffer; // current IP address protected IPAddress ipAddress; // Ddns service provider endpoint for updating IP address protected IPEndPoint updateIpEndPoint; // Ddns service provider endpoint for checking IP address protected IPEndPoint checkIpEndPoint; // commands for checking and updating Ip address protected DdnsCheckIpCommand checkIpCmd; protected DdnsUpdateIpCommand updateIpCmd; // response from Dynamic Dns service provider private DdnsResponse response; // names of check IP address ed update IP address hosts protected string checkIpHost; protected string updateIpHost; /// <summary> /// Constructor /// </summary> /// <param name="ddnsConfig">Ddns configuration information</param> public DdnsClient(DdnsConfig ddnsConfig) { this.ddnsConfig = ddnsConfig; // create timer for periodic and automatic check/update but not start it this.updaterIpTimer = new Timer(this.CheckUpdateIpAddressCallback, null, Timeout.Infinite, this.ddnsConfig.Period); this.receiveBuffer = new byte[RECEIVE_BUFFER_SIZE]; this.checkIpHost = String.Empty; this.updateIpHost = String.Empty; } /// <summary> /// Timer callback for check/update IP address /// </summary> /// <param name="state"></param> protected void CheckUpdateIpAddressCallback(object state) { try { // check IP address IPAddress ipAddress = this.CheckIpAddress(); // if IP address is null (no check executed) or // the last IP address is null (first updating) or // last IP address is different from checked IP address if ((ipAddress == null) || (this.ipAddress == null) || (!this.ipAddress.Equals(ipAddress))) { // if check IP address has returned a valid IP address if (ipAddress != null) this.updateIpCmd.IpAddress = ipAddress.ToString(); // update IP address this.UpdateIpAddress(); } } catch { } } /// <summary> /// Check current client IP address /// </summary> /// <returns>Current client IP address</returns> protected virtual IPAddress CheckIpAddress() { IPAddress ipAddress = null; // if check IP address end point is set if (this.checkIpEndPoint != null) { // create socket for checkin IP address using (this.checkIpSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { this.checkIpSocket.Connect(this.checkIpEndPoint); // send IP address check request this.sendBuffer = Encoding.UTF8.GetBytes(this.checkIpCmd.ToString()); this.checkIpSocket.Send(this.sendBuffer); this.receiveBuffer = new byte[RECEIVE_BUFFER_SIZE]; // read and decode response if (this.checkIpSocket.Receive(this.receiveBuffer) > 0) { ipAddress = this.DecodeCheckIpResponse(new String(Encoding.UTF8.GetChars(this.receiveBuffer))); } } } return ipAddress; } /// <summary> /// Update client Ip address /// </summary> protected virtual void UpdateIpAddress() { // create socket for updating IP address using (this.updateIpSocket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { this.updateIpSocket.Connect(this.updateIpEndPoint); // prepare buffers for send and receive this.sendBuffer = Encoding.UTF8.GetBytes(this.updateIpCmd.ToString()); this.receiveBuffer = new byte[RECEIVE_BUFFER_SIZE]; int byteRead = 0; // SSL configured if (this.ddnsConfig.SSL) { // create SSL stream using (Microsoft.SPOT.Net.Security.SslStream sslStream = new Microsoft.SPOT.Net.Security.SslStream(this.updateIpSocket)) { // SSL handshake authentication sslStream.AuthenticateAsClient(this.updateIpHost, Microsoft.SPOT.Net.Security.SslProtocols.TLSv1); // send message sslStream.Write(this.sendBuffer, 0, this.sendBuffer.Length); // cycle for reading from socket int offset = 0; int read = 0; do { read = sslStream.Read(this.receiveBuffer, offset, RECEIVE_BUFFER_SIZE - offset); offset += read; } while (read != 0); byteRead = offset; } } else { // send message and read response this.updateIpSocket.Send(this.sendBuffer); byteRead = this.updateIpSocket.Receive(this.receiveBuffer); } if (byteRead > 0) { this.response = this.DecodeUpdateIpResponse(new String(Encoding.UTF8.GetChars(this.receiveBuffer))); // raise IP address updated event this.OnIpAddressUpdated(new IpAddressUpdatedEventArgs { IpAddress = this.ipAddress, Response = this.response }); } } } /// <summary> /// Decode update IP address response /// </summary> /// <param name="updateIpResp">Update IP address response</param> /// <returns>Response code from Dynamic Dns service provider</returns> protected abstract DdnsResponse DecodeUpdateIpResponse(string updateIpResp); /// <summary> /// Decode check IP address response /// </summary> /// <param name="checkedIpResp">Check IP address response</param> /// <returns>IP address extracted from Dynamic Dns service provider response</returns> protected abstract IPAddress DecodeCheckIpResponse(string checkedIpResp); /// <summary> /// Raise IP address updated event /// </summary> /// <param name="e">Event Args object</param> private void OnIpAddressUpdated(IpAddressUpdatedEventArgs e) { if (this.IpAddressUpdated != null) this.IpAddressUpdated(this, e); } #region IDdnsClient... public IPAddress IpAddress { get { return this.ipAddress; } } public void Start() { // start updater IP address timer this.updaterIpTimer.Change(0, this.ddnsConfig.Period); } public void Stop() { // stop updater IP address timer this.updaterIpTimer.Change(Timeout.Infinite, Timeout.Infinite); } #endregion } }
//--------------------------------------------------------------------- // <copyright file="Dump.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> // // @owner [....] // @backupOwner [....] //--------------------------------------------------------------------- using System; using System.Collections.Generic; using System.Data.Common; using System.Data.Query.PlanCompiler; using System.Globalization; using System.Text; using System.Diagnostics; using System.IO; using System.Xml; using md = System.Data.Metadata.Edm; // // This module serves as a dump routine for an IQT // The output is a weird form of Sql - closer to Quel (and perhaps, C# // comprehensions) // namespace System.Data.Query.InternalTrees { /// <summary> /// A dump module for the Iqt /// </summary> internal class Dump : BasicOpVisitor, IDisposable { #region private state private XmlWriter _writer; #endregion #region constructors private Dump(Stream stream) : this(stream, Dump.DefaultEncoding, true) { } private Dump(Stream stream, Encoding encoding, bool indent) : base() { XmlWriterSettings settings = new XmlWriterSettings(); settings.CheckCharacters = false; settings.Indent = true; settings.Encoding = encoding; _writer = XmlWriter.Create(stream, settings); _writer.WriteStartDocument(true); } #endregion #region "public" surface internal static readonly Encoding DefaultEncoding = Encoding.UTF8; /// <summary> /// Driver method to dump the entire tree /// </summary> /// <param name="itree"></param> /// <returns></returns> static internal string ToXml(Command itree) { return ToXml(itree, itree.Root); } /// <summary> /// Driver method to dump the a subtree of a tree /// </summary> /// <param name="itree"></param> /// <param name="subtree"></param> /// <returns></returns> static internal string ToXml(Command itree, Node subtree) { MemoryStream stream = new MemoryStream(); using (Dump dumper = new Dump(stream)) { // Just in case the node we're provided doesn't dump as XML, we'll always stick // an XML wrapper around it -- this happens when we're dumping scalarOps, for // example, and it's unfortunate if you can't debug them using a dump... using (new AutoXml(dumper, "nodes")) { dumper.VisitNode(subtree); } } return DefaultEncoding.GetString(stream.ToArray()); } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode", Scope = "member", Target = "System.Data.Query.InternalTrees.Dump.ToXml")] static internal string ToXml(ColumnMap columnMap) { MemoryStream stream = new MemoryStream(); using (Dump dumper = new Dump(stream)) { // Just in case the node we're provided doesn't dump as XML, we'll always stick // an XML wrapper around it -- this happens when we're dumping scalarOps, for // example, and it's unfortunate if you can't debug them using a dump... using (new AutoXml(dumper, "columnMap")) { columnMap.Accept(ColumnMapDumper.Instance, dumper); } } return DefaultEncoding.GetString(stream.ToArray()); } #endregion #region Begin/End management void IDisposable.Dispose() { // Technically, calling GC.SuppressFinalize is not required because the class does not // have a finalizer, but it does no harm, protects against the case where a finalizer is added // in the future, and prevents an FxCop warning. GC.SuppressFinalize(this); try { _writer.WriteEndDocument(); _writer.Flush(); _writer.Close(); } catch (Exception e) { if (!EntityUtil.IsCatchableExceptionType(e)) { throw; } // eat this exception; we don't care if the dumper is failing... } } internal void Begin(string name, Dictionary<string, object> attrs) { _writer.WriteStartElement(name); if (attrs != null) { foreach (KeyValuePair<string, object> attr in attrs) { _writer.WriteAttributeString(attr.Key, attr.Value.ToString()); } } } internal void BeginExpression() { WriteString("("); } internal void EndExpression() { WriteString(")"); } internal void End(string name) { _writer.WriteEndElement(); } internal void WriteString(string value) { _writer.WriteString(value); } #endregion #region VisitorMethods protected override void VisitDefault(Node n) { using (new AutoXml(this, n.Op)) { base.VisitDefault(n); } } protected override void VisitScalarOpDefault(ScalarOp op, Node n) { using (new AutoString(this, op)) { string separator = string.Empty; foreach (Node chi in n.Children) { WriteString(separator); VisitNode(chi); separator = ","; } } } protected override void VisitJoinOp(JoinBaseOp op, Node n) { using (new AutoXml(this, op)) { if (n.Children.Count > 2) { using (new AutoXml(this, "condition")) { VisitNode(n.Child2); } } using (new AutoXml(this, "input")) { VisitNode(n.Child0); } using (new AutoXml(this, "input")) { VisitNode(n.Child1); } } } public override void Visit(CaseOp op, Node n) { using (new AutoXml(this, op)) { int i = 0; while (i < n.Children.Count) { if ((i + 1) < n.Children.Count) { using (new AutoXml(this, "when")) { VisitNode(n.Children[i++]); } using (new AutoXml(this, "then")) { VisitNode(n.Children[i++]); } } else { using (new AutoXml(this, "else")) { VisitNode(n.Children[i++]); } } } } } public override void Visit(CollectOp op, Node n) { using (new AutoXml(this, op)) { VisitChildren(n); } } protected override void VisitConstantOp(ConstantBaseOp op, Node n) { using (new AutoString(this, op)) { if (null == op.Value) { WriteString("null"); } else { WriteString("("); WriteString(op.Type.EdmType.FullName); WriteString(")"); WriteString(String.Format(CultureInfo.InvariantCulture,"{0}",op.Value)); } VisitChildren(n); } } public override void Visit(DistinctOp op, Node n) { Dictionary<string, object> attrs = new Dictionary<string, object>(); StringBuilder sb = new StringBuilder(); string separator = string.Empty; foreach (Var v in op.Keys) { sb.Append(separator); sb.Append(v.Id); separator = ","; } if (0 != sb.Length) { attrs.Add("Keys", sb.ToString()); } using (new AutoXml(this, op, attrs)) { VisitChildren(n); } } protected override void VisitGroupByOp(GroupByBaseOp op, Node n) { Dictionary<string, object> attrs = new Dictionary<string, object>(); StringBuilder sb = new StringBuilder(); string separator = string.Empty; foreach (Var v in op.Keys) { sb.Append(separator); sb.Append(v.Id); separator = ","; } if (0 != sb.Length) { attrs.Add("Keys", sb.ToString()); } using (new AutoXml(this, op, attrs)) { using (new AutoXml(this, "outputs")) { foreach (Var v in op.Outputs) { DumpVar(v); } } VisitChildren(n); } } public override void Visit(IsOfOp op, Node n) { using (new AutoXml(this, ( op.IsOfOnly ? "IsOfOnly" : "IsOf" ))) { string separator = string.Empty; foreach (Node chi in n.Children) { WriteString(separator); VisitNode(chi); separator = ","; } } } protected override void VisitNestOp(NestBaseOp op, Node n) { Dictionary<string, object> attrs = new Dictionary<string, object>(); SingleStreamNestOp ssnOp = op as SingleStreamNestOp; if (null != ssnOp) { attrs.Add("Discriminator", (ssnOp.Discriminator == null) ? "<null>" : ssnOp.Discriminator.ToString()); } StringBuilder sb = new StringBuilder(); string separator; if (null != ssnOp) { sb.Length = 0; separator = string.Empty; foreach (Var v in ssnOp.Keys) { sb.Append(separator); sb.Append(v.Id); separator = ","; } if (0 != sb.Length) { attrs.Add("Keys", sb.ToString()); } } using (new AutoXml(this, op, attrs)) { using (new AutoXml(this, "outputs")) { foreach (Var v in op.Outputs) { DumpVar(v); } } foreach (CollectionInfo ci in op.CollectionInfo) { Dictionary<string, object> attrs2 = new Dictionary<string, object>(); attrs2.Add("CollectionVar", ci.CollectionVar); if (null != ci.DiscriminatorValue) { attrs2.Add("DiscriminatorValue", ci.DiscriminatorValue); } if (0 != ci.FlattenedElementVars.Count) { attrs2.Add("FlattenedElementVars", FormatVarList(sb, ci.FlattenedElementVars)); } if (0 != ci.Keys.Count) { attrs2.Add("Keys", ci.Keys); } if (0 != ci.SortKeys.Count) { attrs2.Add("SortKeys", FormatVarList(sb, ci.SortKeys)); } using (new AutoXml(this, "collection", attrs2)) { ci.ColumnMap.Accept(ColumnMapDumper.Instance, this); } } VisitChildren(n); } } private static string FormatVarList(StringBuilder sb, VarList varList) { string separator; sb.Length = 0; separator = string.Empty; foreach (Var v in varList) { sb.Append(separator); sb.Append(v.Id); separator = ","; } return sb.ToString(); } private static string FormatVarList(StringBuilder sb, List<SortKey> varList) { string separator; sb.Length = 0; separator = string.Empty; foreach (SortKey v in varList) { sb.Append(separator); sb.Append(v.Var.Id); separator = ","; } return sb.ToString(); } private void VisitNewOp(Op op, Node n) { using (new AutoXml(this, op)) { foreach (Node chi in n.Children) { using (new AutoXml(this, "argument", null)) { VisitNode(chi); } } } } public override void Visit(NewEntityOp op, Node n) { VisitNewOp(op, n); } public override void Visit(NewInstanceOp op, Node n) { VisitNewOp(op, n); } public override void Visit(DiscriminatedNewEntityOp op, Node n) { VisitNewOp(op, n); } public override void Visit(NewMultisetOp op, Node n) { VisitNewOp(op, n); } public override void Visit(NewRecordOp op, Node n) { VisitNewOp(op, n); } public override void Visit(PhysicalProjectOp op, Node n) { using (new AutoXml(this, op)) { using (new AutoXml(this, "outputs")) { foreach (Var v in op.Outputs) { DumpVar(v); } } using (new AutoXml(this, "columnMap")) { op.ColumnMap.Accept(ColumnMapDumper.Instance, this); } using (new AutoXml(this, "input")) { VisitChildren(n); } } } public override void Visit(ProjectOp op, Node n) { using (new AutoXml(this, op)) { using (new AutoXml(this, "outputs")) { foreach (Var v in op.Outputs) { DumpVar(v); } } VisitChildren(n); } } public override void Visit(PropertyOp op, Node n) { using (new AutoString(this, op)) { VisitChildren(n); WriteString("."); WriteString(op.PropertyInfo.Name); } } public override void Visit(RelPropertyOp op, Node n) { using (new AutoString(this, op)) { VisitChildren(n); WriteString(".NAVIGATE("); WriteString(op.PropertyInfo.Relationship.Name); WriteString(","); WriteString(op.PropertyInfo.FromEnd.Name); WriteString(","); WriteString(op.PropertyInfo.ToEnd.Name); WriteString(")"); } } public override void Visit(ScanTableOp op, Node n) { using (new AutoXml(this, op)) { DumpTable(op.Table); VisitChildren(n); } } public override void Visit(ScanViewOp op, Node n) { using (new AutoXml(this, op)) { DumpTable(op.Table); VisitChildren(n); } } protected override void VisitSetOp(SetOp op, Node n) { Dictionary<string, object> attrs = new Dictionary<string, object>(); if (OpType.UnionAll == op.OpType) { UnionAllOp uallOp = (UnionAllOp)op; if (null != uallOp.BranchDiscriminator) { attrs.Add("branchDiscriminator", uallOp.BranchDiscriminator); } } using (new AutoXml(this, op, attrs)) { using (new AutoXml(this, "outputs")) { foreach (Var v in op.Outputs) { DumpVar(v); } } int i = 0; foreach (Node chi in n.Children) { Dictionary<string, object> attrs2 = new Dictionary<string, object>(); attrs2.Add("VarMap", op.VarMap[i++].ToString()); using (new AutoXml(this, "input", attrs2)) { VisitNode(chi); } } } } public override void Visit(SortOp op, Node n) { using (new AutoXml(this, op)) { base.Visit(op, n); } } public override void Visit(ConstrainedSortOp op, Node n) { Dictionary<string, object> attrs = new Dictionary<string, object>(); attrs.Add("WithTies", op.WithTies); using (new AutoXml(this, op, attrs)) { base.Visit(op, n); } } protected override void VisitSortOp(SortBaseOp op, Node n) { using (new AutoXml(this, "keys")) { foreach (InternalTrees.SortKey sortKey in op.Keys) { Dictionary<string, object> attrs = new Dictionary<string, object>(); attrs.Add("Var", sortKey.Var); attrs.Add("Ascending", sortKey.AscendingSort); attrs.Add("Collation", sortKey.Collation); using (new AutoXml(this, "sortKey", attrs)) { } } } VisitChildren(n); } public override void Visit(UnnestOp op, Node n) { Dictionary<string, object> attrs = new Dictionary<string, object>(); if (null != op.Var) { attrs.Add("Var", op.Var.Id); } using (new AutoXml(this, op, attrs)) { DumpTable(op.Table); VisitChildren(n); } } public override void Visit(VarDefOp op, Node n) { Dictionary<string, object> attrs = new Dictionary<string, object>(); attrs.Add("Var", op.Var.Id); using (new AutoXml(this, op, attrs)) { VisitChildren(n); } } public override void Visit(VarRefOp op, Node n) { using (new AutoString(this, op)) { VisitChildren(n); if (null != op.Type) { WriteString("Type="); WriteString(TypeHelpers.GetFullName(op.Type)); WriteString(", "); } WriteString("Var="); WriteString(op.Var.Id.ToString(CultureInfo.InvariantCulture)); } } #endregion #region dumper helpers private void DumpVar(Var v) { Dictionary<string, object> attrs = new Dictionary<string, object>(); attrs.Add("Var", v.Id); ColumnVar cv = v as ColumnVar; if (null != cv) { attrs.Add("Name", cv.ColumnMetadata.Name); attrs.Add("Type", TypeHelpers.GetFullName(cv.ColumnMetadata.Type)); } using (new AutoXml(this, v.GetType().Name, attrs)) { } } private void DumpVars(List<Var> vars) { foreach (Var v in vars) { DumpVar(v); } } private void DumpTable(Table table) { Dictionary<string, object> attrs = new Dictionary<string, object>(); attrs.Add("Table", table.TableId); if (null != table.TableMetadata.Extent) { attrs.Add("Extent", table.TableMetadata.Extent.Name); } using (new AutoXml(this, "Table", attrs)) { DumpVars(table.Columns); } } #region ColumnMap dumper internal class ColumnMapDumper : ColumnMapVisitor<Dump> { static internal ColumnMapDumper Instance = new ColumnMapDumper(); /// <summary> /// Private constructor /// </summary> private ColumnMapDumper() { } #region Helpers /// <summary> /// Common CollectionColumnMap code /// </summary> private void DumpCollection(CollectionColumnMap columnMap, Dump dumper) { if (columnMap.ForeignKeys.Length > 0) { using (new AutoXml(dumper, "foreignKeys")) { VisitList(columnMap.ForeignKeys, dumper); } } if (columnMap.Keys.Length > 0) { using (new AutoXml(dumper, "keys")) { VisitList(columnMap.Keys, dumper); } } using (new AutoXml(dumper, "element")) { columnMap.Element.Accept(this, dumper); } } /// <summary> /// Common code to produce an the attributes for the dumper's XML node /// </summary> /// <param name="columnMap"></param> /// <returns></returns> private static Dictionary<string, object> GetAttributes(ColumnMap columnMap) { Dictionary<string, object> attrs = new Dictionary<string, object>(); attrs.Add("Type", columnMap.Type.ToString()); return attrs; } #endregion /// <summary> /// ComplexTypeColumnMap /// </summary> /// <param name="columnMap"></param> /// <param name="dumper"></param> /// <returns></returns> internal override void Visit(ComplexTypeColumnMap columnMap, Dump dumper) { using (new AutoXml(dumper, "ComplexType", GetAttributes(columnMap))) { if (columnMap.NullSentinel != null) { using (new AutoXml(dumper, "nullSentinel")) { columnMap.NullSentinel.Accept(this, dumper); } } VisitList(columnMap.Properties, dumper); } } /// <summary> /// DiscriminatedCollectionColumnMap /// </summary> /// <param name="columnMap"></param> /// <param name="dumper"></param> /// <returns></returns> internal override void Visit(DiscriminatedCollectionColumnMap columnMap, Dump dumper) { using (new AutoXml(dumper, "DiscriminatedCollection", GetAttributes(columnMap))) { Dictionary<string, object> attrs = new Dictionary<string,object>(); attrs.Add("Value", columnMap.DiscriminatorValue); using (new AutoXml(dumper, "discriminator", attrs)) { columnMap.Discriminator.Accept(this, dumper); } DumpCollection(columnMap, dumper); } } /// <summary> /// EntityColumnMap /// </summary> /// <param name="columnMap"></param> /// <param name="dumper"></param> /// <returns></returns> internal override void Visit(EntityColumnMap columnMap, Dump dumper) { using (new AutoXml(dumper, "Entity", GetAttributes(columnMap))) { using (new AutoXml(dumper, "entityIdentity")) { VisitEntityIdentity(columnMap.EntityIdentity, dumper); } VisitList(columnMap.Properties, dumper); } } /// <summary> /// PolymorphicColumnMap /// </summary> /// <param name="columnMap"></param> /// <param name="dumper"></param> /// <returns></returns> internal override void Visit(SimplePolymorphicColumnMap columnMap, Dump dumper) { using (new AutoXml(dumper, "SimplePolymorphic", GetAttributes(columnMap))) { using (new AutoXml(dumper, "typeDiscriminator")) { columnMap.TypeDiscriminator.Accept(this, dumper); } Dictionary<string, object> attrs = new Dictionary<string, object>(); foreach (KeyValuePair<object, TypedColumnMap> tc in columnMap.TypeChoices) { attrs.Clear(); attrs.Add("DiscriminatorValue", tc.Key); using (new AutoXml(dumper, "choice", attrs)) { tc.Value.Accept(this, dumper); } } using (new AutoXml(dumper, "default")) { VisitList(columnMap.Properties, dumper); } } } /// <summary> /// MultipleDiscriminatorPolymorphicColumnMap /// </summary> internal override void Visit(MultipleDiscriminatorPolymorphicColumnMap columnMap, Dump dumper) { using (new AutoXml(dumper, "MultipleDiscriminatorPolymorphic", GetAttributes(columnMap))) { using (new AutoXml(dumper, "typeDiscriminators")) { VisitList(columnMap.TypeDiscriminators, dumper); } Dictionary<string, object> attrs = new Dictionary<string, object>(); foreach (var tc in columnMap.TypeChoices) { attrs.Clear(); attrs.Add("EntityType", tc.Key); using (new AutoXml(dumper, "choice", attrs)) { tc.Value.Accept(this, dumper); } } using (new AutoXml(dumper, "default")) { VisitList(columnMap.Properties, dumper); } } } /// <summary> /// RecordColumnMap /// </summary> /// <param name="columnMap"></param> /// <param name="dumper"></param> /// <returns></returns> internal override void Visit(RecordColumnMap columnMap, Dump dumper) { using (new AutoXml(dumper, "Record", GetAttributes(columnMap))) { if (columnMap.NullSentinel != null) { using (new AutoXml(dumper, "nullSentinel")) { columnMap.NullSentinel.Accept(this, dumper); } } VisitList(columnMap.Properties, dumper); } } /// <summary> /// RefColumnMap /// </summary> /// <param name="columnMap"></param> /// <param name="dumper"></param> /// <returns></returns> internal override void Visit(RefColumnMap columnMap, Dump dumper) { using (new AutoXml(dumper, "Ref", GetAttributes(columnMap))) { using (new AutoXml(dumper, "entityIdentity")) { VisitEntityIdentity(columnMap.EntityIdentity, dumper); } } } /// <summary> /// SimpleCollectionColumnMap /// </summary> /// <param name="columnMap"></param> /// <param name="dumper"></param> /// <returns></returns> internal override void Visit(SimpleCollectionColumnMap columnMap, Dump dumper) { using (new AutoXml(dumper, "SimpleCollection", GetAttributes(columnMap))) { DumpCollection(columnMap, dumper); } } /// <summary> /// SimpleColumnMap /// </summary> /// <param name="columnMap"></param> /// <param name="dumper"></param> /// <returns></returns> internal override void Visit(ScalarColumnMap columnMap, Dump dumper) { Dictionary<string, object> attrs = GetAttributes(columnMap); attrs.Add("CommandId", columnMap.CommandId); attrs.Add("ColumnPos", columnMap.ColumnPos); using (new AutoXml(dumper, "AssignedSimple", attrs)) { } } /// <summary> /// SimpleColumnMap /// </summary> /// <param name="columnMap"></param> /// <param name="dumper"></param> /// <returns></returns> internal override void Visit(VarRefColumnMap columnMap, Dump dumper) { Dictionary<string, object> attrs = GetAttributes(columnMap); attrs.Add("Var", ((VarRefColumnMap)columnMap).Var.Id); using (new AutoXml(dumper, "VarRef", attrs)) { } } /// <summary> /// DiscriminatedEntityIdentity /// </summary> /// <param name="entityIdentity"></param> /// <param name="dumper"></param> /// <returns></returns> protected override void VisitEntityIdentity(DiscriminatedEntityIdentity entityIdentity, Dump dumper) { using (new AutoXml(dumper, "DiscriminatedEntityIdentity")) { using (new AutoXml(dumper, "entitySetId")) { entityIdentity.EntitySetColumnMap.Accept(this, dumper); } if (entityIdentity.Keys.Length > 0) { using (new AutoXml(dumper, "keys")) { VisitList(entityIdentity.Keys, dumper); } } } } /// <summary> /// SimpleEntityIdentity /// </summary> /// <param name="entityIdentity"></param> /// <param name="dumper"></param> /// <returns></returns> protected override void VisitEntityIdentity(SimpleEntityIdentity entityIdentity, Dump dumper) { using (new AutoXml(dumper, "SimpleEntityIdentity")) { if (entityIdentity.Keys.Length > 0) { using (new AutoXml(dumper, "keys")) { VisitList(entityIdentity.Keys, dumper); } } } } } #endregion #endregion internal struct AutoString : IDisposable { private Dump _dumper; internal AutoString(Dump dumper, Op op) { _dumper = dumper; _dumper.WriteString(AutoString.ToString(op.OpType)); _dumper.BeginExpression(); } public void Dispose() { try { _dumper.EndExpression(); } catch (Exception e) { if (!EntityUtil.IsCatchableExceptionType(e)) { throw; } // eat this exception; we don't care if the dumper is failing... } } internal static string ToString(OpType op) { // perf: Enum.ToString() actually is very perf intensive in time & memory switch (op) { case OpType.Aggregate: return "Aggregate"; case OpType.And: return "And"; case OpType.Case: return "Case"; case OpType.Cast: return "Cast"; case OpType.Collect: return "Collect"; case OpType.Constant: return "Constant"; case OpType.ConstantPredicate: return "ConstantPredicate"; case OpType.CrossApply: return "CrossApply"; case OpType.CrossJoin: return "CrossJoin"; case OpType.Deref: return "Deref"; case OpType.Distinct: return "Distinct"; case OpType.Divide: return "Divide"; case OpType.Element: return "Element"; case OpType.EQ: return "EQ"; case OpType.Except: return "Except"; case OpType.Exists: return "Exists"; case OpType.Filter: return "Filter"; case OpType.FullOuterJoin: return "FullOuterJoin"; case OpType.Function: return "Function"; case OpType.GE: return "GE"; case OpType.GetEntityRef: return "GetEntityRef"; case OpType.GetRefKey: return "GetRefKey"; case OpType.GroupBy: return "GroupBy"; case OpType.GroupByInto: return "GroupByInto"; case OpType.GT: return "GT"; case OpType.InnerJoin: return "InnerJoin"; case OpType.InternalConstant: return "InternalConstant"; case OpType.Intersect: return "Intersect"; case OpType.IsNull: return "IsNull"; case OpType.IsOf: return "IsOf"; case OpType.LE: return "LE"; case OpType.Leaf: return "Leaf"; case OpType.LeftOuterJoin: return "LeftOuterJoin"; case OpType.Like: return "Like"; case OpType.LT: return "LT"; case OpType.Minus: return "Minus"; case OpType.Modulo: return "Modulo"; case OpType.Multiply: return "Multiply"; case OpType.MultiStreamNest: return "MultiStreamNest"; case OpType.Navigate: return "Navigate"; case OpType.NE: return "NE"; case OpType.NewEntity: return "NewEntity"; case OpType.NewInstance: return "NewInstance"; case OpType.DiscriminatedNewEntity: return "DiscriminatedNewEntity"; case OpType.NewMultiset: return "NewMultiset"; case OpType.NewRecord: return "NewRecord"; case OpType.Not: return "Not"; case OpType.Null: return "Null"; case OpType.NullSentinel: return "NullSentinel"; case OpType.Or: return "Or"; case OpType.OuterApply: return "OuterApply"; case OpType.PhysicalProject: return "PhysicalProject"; case OpType.Plus: return "Plus"; case OpType.Project: return "Project"; case OpType.Property: return "Property"; case OpType.Ref: return "Ref"; case OpType.RelProperty: return "RelProperty"; case OpType.ScanTable: return "ScanTable"; case OpType.ScanView: return "ScanView"; case OpType.SingleRow: return "SingleRow"; case OpType.SingleRowTable: return "SingleRowTable"; case OpType.SingleStreamNest: return "SingleStreamNest"; case OpType.SoftCast: return "SoftCast"; case OpType.Sort: return "Sort"; case OpType.Treat: return "Treat"; case OpType.UnaryMinus: return "UnaryMinus"; case OpType.UnionAll: return "UnionAll"; case OpType.Unnest: return "Unnest"; case OpType.VarDef: return "VarDef"; case OpType.VarDefList: return "VarDefList"; case OpType.VarRef: return "VarRef"; case OpType.ConstrainedSort: return "ConstrainedSort"; default: Debug.Assert(false, "need to special case enum->string: " + op.ToString()); return op.ToString(); } } } internal struct AutoXml : IDisposable { private string _nodeName; private Dump _dumper; internal AutoXml(Dump dumper, Op op) { _dumper = dumper; _nodeName = AutoString.ToString(op.OpType); Dictionary<string, object> attrs = new Dictionary<string, object>(); if (null != op.Type) { attrs.Add("Type", TypeHelpers.GetFullName(op.Type)); } _dumper.Begin(_nodeName, attrs); } internal AutoXml(Dump dumper, Op op, Dictionary<string, object> attrs) { _dumper = dumper; _nodeName = AutoString.ToString(op.OpType); Dictionary<string, object> attrs2 = new Dictionary<string, object>(); if (null != op.Type) { attrs2.Add("Type", TypeHelpers.GetFullName(op.Type)); } foreach (KeyValuePair<string, object> kv in attrs) { attrs2.Add(kv.Key, kv.Value); } _dumper.Begin(_nodeName, attrs2); } internal AutoXml(Dump dumper, string nodeName) : this(dumper, nodeName, null) { } internal AutoXml(Dump dumper, string nodeName, Dictionary<string, object> attrs) { _dumper = dumper; _nodeName = nodeName; _dumper.Begin(_nodeName, attrs); } public void Dispose() { _dumper.End(_nodeName); } } } }
// Copyright (c) 2010-2014 SharpDX - Alexandre Mutel // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections; using System.Collections.Generic; using System.IO; namespace SharpDX.Multimedia { /// <summary> /// Riff chunk enumerator. /// </summary> public class RiffParser : IEnumerator<RiffChunk>, IEnumerable<RiffChunk> { private readonly Stream input; private readonly long startPosition; private readonly BinaryReader reader; private readonly Stack<RiffChunk> chunckStack; private bool descendNext; private bool isEndOfRiff; private bool isErrorState; private RiffChunk current; /// <summary> /// Initializes a new instance of the <see cref="RiffParser"/> class. /// </summary> /// <param name="input">The input.</param> public RiffParser(Stream input) { this.input = input; this.startPosition = input.Position; this.reader = new BinaryReader(input); this.chunckStack = new Stack<RiffChunk>(); } /// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> public void Dispose() { // Nothing to dispose. } /// <summary> /// Advances the enumerator to the next element of the collection. /// </summary> /// <returns> /// true if the enumerator was successfully advanced to the next element; false if the enumerator has passed the end of the collection. /// </returns> /// <exception cref="T:System.InvalidOperationException"> /// The collection was modified after the enumerator was created. /// </exception> public bool MoveNext() { CheckState(); if (current != null) { // By default, set the starting position to the data of the chunk long nextOffset = current.DataPosition; // If we descend if (descendNext) { // Next time, proceed chunk sequentially descendNext = false; } else { // Else, go to next chunk nextOffset += current.Size; // Pad DWORD if ((nextOffset & 1) != 0) nextOffset++; } input.Position = nextOffset; // Check that moveNext is not going outside a parent chunk. // If yes, pop the last chunk from the stack var currentChunkContainer = chunckStack.Peek(); long endOfOuterChunk = currentChunkContainer.DataPosition + currentChunkContainer.Size; if (input.Position >= endOfOuterChunk) chunckStack.Pop(); // If there are no more chunk in the if (chunckStack.Count == 0) { isEndOfRiff = true; return false; } } var fourCC = ((FourCC) reader.ReadUInt32()); bool isList = (fourCC == "LIST"); bool isHeader = (fourCC == "RIFF"); uint chunkSize = 0; if (input.Position == (startPosition+4) && !isHeader) { isErrorState = true; throw new InvalidOperationException("Invalid RIFF file format"); } // Read chunk size chunkSize = reader.ReadUInt32(); // If list or header if (isList || isHeader) { // Check file size if (isHeader && chunkSize > (input.Length - 8)) { isErrorState = true; throw new InvalidOperationException("Invalid RIFF file format"); } chunkSize -= 4; fourCC = reader.ReadUInt32(); } // Read RIFF type and create chunk current = new RiffChunk(input, fourCC, chunkSize, (uint)input.Position, isList, isHeader); return true; } private void CheckState() { if (isEndOfRiff) throw new InvalidOperationException("End of Riff. Cannot MoveNext"); if (isErrorState) throw new InvalidOperationException("The enumerator is in an error state"); } /// <summary> /// Gets the current stack of chunks. /// </summary> public Stack<RiffChunk> ChunkStack { get { return chunckStack; } } /// <summary> /// Sets the enumerator to its initial position, which is before the first element in the collection. /// </summary> /// <exception cref="T:System.InvalidOperationException"> /// The collection was modified after the enumerator was created. /// </exception> public void Reset() { CheckState(); current = null; input.Position = startPosition; } /// <summary> /// Ascends to the outer chunk. /// </summary> public void Ascend() { CheckState(); var outerChunk = chunckStack.Pop(); input.Position = outerChunk.DataPosition + outerChunk.Size; } /// <summary> /// Descends to the current chunk. /// </summary> public void Descend() { CheckState(); chunckStack.Push(current); descendNext = true; } /// <summary> /// Gets all chunks. /// </summary> /// <returns></returns> public IList<RiffChunk> GetAllChunks() { var chunks = new List<RiffChunk>(); foreach (var riffChunk in this) chunks.Add(riffChunk); return chunks; } /// <summary> /// Gets the element in the collection at the current position of the enumerator. /// </summary> /// <returns> /// The element in the collection at the current position of the enumerator. /// </returns> public RiffChunk Current { get { CheckState(); return current; } } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns> /// A <see cref="T:System.Collections.Generic.IEnumerator`1"/> that can be used to iterate through the collection. /// </returns> public IEnumerator<RiffChunk> GetEnumerator() { return this; } object IEnumerator.Current { get { CheckState(); return Current; } } IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See LICENSE in the project root for license information. // using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Xml; using UnityEditor; using UnityEngine; namespace SpectatorView { /// <summary> /// Class containing various utility methods to build a WSA solution from a Unity project. /// </summary> public static class BuildSLNUtilities { public class CopyDirectoryInfo { public string Source { get; set; } public string Destination { get; set; } public string Filter { get; set; } public bool Recursive { get; set; } public CopyDirectoryInfo() { Source = null; Destination = null; Filter = "*"; Recursive = false; } } public class BuildInfo { public string OutputDirectory { get; set; } public IEnumerable<string> Scenes { get; set; } public IEnumerable<CopyDirectoryInfo> CopyDirectories { get; set; } public Action<BuildInfo> PreBuildAction { get; set; } public Action<BuildInfo, string> PostBuildAction { get; set; } public BuildOptions BuildOptions { get; set; } // EditorUserBuildSettings public BuildTarget BuildTarget { get; set; } public WSASDK? WSASdk { get; set; } public WSAUWPBuildType? WSAUWPBuildType { get; set; } public Boolean? WSAGenerateReferenceProjects { get; set; } public ColorSpace? ColorSpace { get; set; } public bool IsCommandLine { get; set; } public string BuildSymbols { get; private set; } public BuildInfo() { BuildSymbols = string.Empty; } public void AppendSymbols(params string[] symbol) { this.AppendSymbols((IEnumerable<string>)symbol); } public void AppendSymbols(IEnumerable<string> symbols) { string[] toAdd = symbols.Except(this.BuildSymbols.Split(';')) .Where(sym => !string.IsNullOrEmpty(sym)).ToArray(); if (!toAdd.Any()) { return; } if (!String.IsNullOrEmpty(this.BuildSymbols)) { this.BuildSymbols += ";"; } this.BuildSymbols += String.Join(";", toAdd); } public bool HasAnySymbols(params string[] symbols) { return this.BuildSymbols.Split(';').Intersect(symbols).Any(); } public bool HasConfigurationSymbol() { return HasAnySymbols( BuildSLNUtilities.BuildSymbolDebug, BuildSLNUtilities.BuildSymbolRelease, BuildSLNUtilities.BuildSymbolMaster); } public static IEnumerable<string> RemoveConfigurationSymbols(string symbolstring) { return symbolstring.Split(';').Except(new[] { BuildSLNUtilities.BuildSymbolDebug, BuildSLNUtilities.BuildSymbolRelease, BuildSLNUtilities.BuildSymbolMaster }); } public bool HasAnySymbols(IEnumerable<string> symbols) { return this.BuildSymbols.Split(';').Intersect(symbols).Any(); } } // Build configurations. Exactly one of these should be defined for any given build. public const string BuildSymbolDebug = "DEBUG"; public const string BuildSymbolRelease = "RELEASE"; public const string BuildSymbolMaster = "MASTER"; /// <summary> /// Event triggered when a build starts. /// </summary> public static event Action<BuildInfo> BuildStarted; /// <summary> /// Event triggered when a build completes. /// </summary> public static event Action<BuildInfo, string> BuildCompleted; public static void PerformBuild(BuildInfo buildInfo) { BuildTargetGroup buildTargetGroup = GetGroup(buildInfo.BuildTarget); string oldBuildSymbols = PlayerSettings.GetScriptingDefineSymbolsForGroup(buildTargetGroup); if (!string.IsNullOrEmpty(oldBuildSymbols)) { if (buildInfo.HasConfigurationSymbol()) { buildInfo.AppendSymbols(BuildInfo.RemoveConfigurationSymbols(oldBuildSymbols)); } else { buildInfo.AppendSymbols(oldBuildSymbols.Split(';')); } } if ((buildInfo.BuildOptions & BuildOptions.Development) == BuildOptions.Development) { if (!buildInfo.HasConfigurationSymbol()) { buildInfo.AppendSymbols(BuildSLNUtilities.BuildSymbolDebug); } } if (buildInfo.HasAnySymbols(BuildSLNUtilities.BuildSymbolDebug)) { buildInfo.BuildOptions |= BuildOptions.Development | BuildOptions.AllowDebugging; } if (buildInfo.HasAnySymbols(BuildSLNUtilities.BuildSymbolRelease)) { //Unity automatically adds the DEBUG symbol if the BuildOptions.Development flag is //specified. In order to have debug symbols and the RELEASE symbole we have to //inject the symbol Unity relies on to enable the /debug+ flag of csc.exe which is "DEVELOPMENT_BUILD" buildInfo.AppendSymbols("DEVELOPMENT_BUILD"); } var oldBuildTarget = EditorUserBuildSettings.activeBuildTarget; EditorUserBuildSettings.SwitchActiveBuildTarget(buildInfo.BuildTarget); var oldWSASDK = EditorUserBuildSettings.wsaSDK; if (buildInfo.WSASdk.HasValue) { EditorUserBuildSettings.wsaSDK = buildInfo.WSASdk.Value; } WSAUWPBuildType? oldWSAUWPBuildType = null; if (EditorUserBuildSettings.wsaSDK == WSASDK.UWP) { oldWSAUWPBuildType = EditorUserBuildSettings.wsaUWPBuildType; if (buildInfo.WSAUWPBuildType.HasValue) { EditorUserBuildSettings.wsaUWPBuildType = buildInfo.WSAUWPBuildType.Value; } } var oldWSAGenerateReferenceProjects = EditorUserBuildSettings.wsaGenerateReferenceProjects; if (buildInfo.WSAGenerateReferenceProjects.HasValue) { EditorUserBuildSettings.wsaGenerateReferenceProjects = buildInfo.WSAGenerateReferenceProjects.Value; } var oldColorSpace = PlayerSettings.colorSpace; if (buildInfo.ColorSpace.HasValue) { PlayerSettings.colorSpace = buildInfo.ColorSpace.Value; } if (buildInfo.BuildSymbols != null) { PlayerSettings.SetScriptingDefineSymbolsForGroup(buildTargetGroup, buildInfo.BuildSymbols); } string buildError = "Error"; try { // For the WSA player, Unity builds into a target directory. // For other players, the OutputPath parameter indicates the // path to the target executable to build. if (buildInfo.BuildTarget == BuildTarget.WSAPlayer) { Directory.CreateDirectory(buildInfo.OutputDirectory); } OnPreProcessBuild(buildInfo); buildError = BuildPipeline.BuildPlayer( buildInfo.Scenes.ToArray(), buildInfo.OutputDirectory, buildInfo.BuildTarget, buildInfo.BuildOptions); if (buildError.StartsWith("Error")) { throw new Exception(buildError); } } finally { OnPostProcessBuild(buildInfo, buildError); PlayerSettings.colorSpace = oldColorSpace; PlayerSettings.SetScriptingDefineSymbolsForGroup(buildTargetGroup, oldBuildSymbols); EditorUserBuildSettings.wsaSDK = oldWSASDK; if (oldWSAUWPBuildType.HasValue) { EditorUserBuildSettings.wsaUWPBuildType = oldWSAUWPBuildType.Value; } EditorUserBuildSettings.wsaGenerateReferenceProjects = oldWSAGenerateReferenceProjects; EditorUserBuildSettings.SwitchActiveBuildTarget(oldBuildTarget); } } public static void ParseBuildCommandLine(ref BuildInfo buildInfo) { string[] arguments = System.Environment.GetCommandLineArgs(); buildInfo.IsCommandLine = true; for (int i = 0; i < arguments.Length; ++i) { // Can't use -buildTarget which is something Unity already takes as an argument for something. if (string.Equals(arguments[i], "-duskBuildTarget", StringComparison.InvariantCultureIgnoreCase)) { buildInfo.BuildTarget = (BuildTarget)Enum.Parse(typeof(BuildTarget), arguments[++i]); } else if (string.Equals(arguments[i], "-wsaSDK", StringComparison.InvariantCultureIgnoreCase)) { string wsaSdkArg = arguments[++i]; buildInfo.WSASdk = (WSASDK)Enum.Parse(typeof(WSASDK), wsaSdkArg); } else if (string.Equals(arguments[i], "-wsaUWPBuildType", StringComparison.InvariantCultureIgnoreCase)) { buildInfo.WSAUWPBuildType = (WSAUWPBuildType)Enum.Parse(typeof(WSAUWPBuildType), arguments[++i]); } else if (string.Equals(arguments[i], "-wsaGenerateReferenceProjects", StringComparison.InvariantCultureIgnoreCase)) { buildInfo.WSAGenerateReferenceProjects = Boolean.Parse(arguments[++i]); } else if (string.Equals(arguments[i], "-buildOutput", StringComparison.InvariantCultureIgnoreCase)) { buildInfo.OutputDirectory = arguments[++i]; } else if (string.Equals(arguments[i], "-buildDesc", StringComparison.InvariantCultureIgnoreCase)) { ParseBuildDescriptionFile(arguments[++i], ref buildInfo); } else if (string.Equals(arguments[i], "-unityBuildSymbols", StringComparison.InvariantCultureIgnoreCase)) { string newBuildSymbols = arguments[++i]; buildInfo.AppendSymbols(newBuildSymbols.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries)); } } } public static void PerformBuild_CommandLine() { BuildInfo buildInfo = new BuildInfo() { // Use scenes from the editor build settings. Scenes = EditorBuildSettings.scenes.Where(scene => scene.enabled).Select(scene => scene.path), }; ParseBuildCommandLine(ref buildInfo); PerformBuild(buildInfo); } public static void ParseBuildDescriptionFile(string filename, ref BuildInfo buildInfo) { Debug.Log(string.Format(CultureInfo.InvariantCulture, "Build: Using \"{0}\" as build description", filename)); // Parse the XML file XmlTextReader reader = new XmlTextReader(filename); while (reader.Read()) { switch (reader.NodeType) { case XmlNodeType.Element: if (string.Equals(reader.Name, "SceneList", StringComparison.InvariantCultureIgnoreCase)) { // Set the scenes we want to build buildInfo.Scenes = ReadSceneList(reader); } else if (string.Equals(reader.Name, "CopyList", StringComparison.InvariantCultureIgnoreCase)) { // Set the directories we want to copy buildInfo.CopyDirectories = ReadCopyList(reader); } break; } } } private static BuildTargetGroup GetGroup(BuildTarget buildTarget) { switch (buildTarget) { case BuildTarget.WSAPlayer: return BuildTargetGroup.WSA; case BuildTarget.StandaloneWindows: case BuildTarget.StandaloneWindows64: return BuildTargetGroup.Standalone; default: return BuildTargetGroup.Unknown; } } private static IEnumerable<string> ReadSceneList(XmlTextReader reader) { List<string> result = new List<string>(); while (reader.Read()) { switch (reader.NodeType) { case XmlNodeType.Element: if (string.Equals(reader.Name, "Scene", StringComparison.InvariantCultureIgnoreCase)) { while (reader.MoveToNextAttribute()) { if (string.Equals(reader.Name, "Name", StringComparison.InvariantCultureIgnoreCase)) { result.Add(reader.Value); Debug.Log(string.Format(CultureInfo.InvariantCulture, "Build: Adding scene \"{0}\"", reader.Value)); } } } break; case XmlNodeType.EndElement: if (string.Equals(reader.Name, "SceneList", StringComparison.InvariantCultureIgnoreCase)) return result; break; } } return result; } private static IEnumerable<CopyDirectoryInfo> ReadCopyList(XmlTextReader reader) { List<CopyDirectoryInfo> result = new List<CopyDirectoryInfo>(); while (reader.Read()) { switch (reader.NodeType) { case XmlNodeType.Element: if (string.Equals(reader.Name, "Copy", StringComparison.InvariantCultureIgnoreCase)) { string source = null; string dest = null; string filter = null; bool recursive = false; while (reader.MoveToNextAttribute()) { if (string.Equals(reader.Name, "Source", StringComparison.InvariantCultureIgnoreCase)) { source = reader.Value; } else if (string.Equals(reader.Name, "Destination", StringComparison.InvariantCultureIgnoreCase)) { dest = reader.Value; } else if (string.Equals(reader.Name, "Recursive", StringComparison.InvariantCultureIgnoreCase)) { recursive = System.Convert.ToBoolean(reader.Value); } else if (string.Equals(reader.Name, "Filter", StringComparison.InvariantCultureIgnoreCase)) { filter = reader.Value; } } if (source != null) { // Either the file specifies the Destination as well, or else CopyDirectory will use Source for Destination CopyDirectoryInfo info = new CopyDirectoryInfo(); info.Source = source; if (dest != null) { info.Destination = dest; } if (filter != null) { info.Filter = filter; } info.Recursive = recursive; Debug.Log(string.Format(CultureInfo.InvariantCulture, @"Build: Adding {0}copy ""{1}\{2}"" => ""{3}""", info.Recursive ? "Recursive " : "", info.Source, info.Filter, info.Destination ?? info.Source)); result.Add(info); } } break; case XmlNodeType.EndElement: if (string.Equals(reader.Name, "CopyList", StringComparison.InvariantCultureIgnoreCase)) return result; break; } } return result; } public static void CopyDirectory(string sourceDirectoryPath, string destinationDirectoryPath, CopyDirectoryInfo directoryInfo) { sourceDirectoryPath = Path.Combine(sourceDirectoryPath, directoryInfo.Source); destinationDirectoryPath = Path.Combine(destinationDirectoryPath, directoryInfo.Destination ?? directoryInfo.Source); Debug.Log(string.Format(CultureInfo.InvariantCulture, @"{0} ""{1}\{2}"" to ""{3}""", directoryInfo.Recursive ? "Recursively copying" : "Copying", sourceDirectoryPath, directoryInfo.Filter, destinationDirectoryPath)); foreach (string sourceFilePath in Directory.GetFiles(sourceDirectoryPath, directoryInfo.Filter, directoryInfo.Recursive ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly)) { string destinationFilePath = sourceFilePath.Replace(sourceDirectoryPath, destinationDirectoryPath); try { Directory.CreateDirectory(Path.GetDirectoryName(destinationFilePath)); if (File.Exists(destinationFilePath)) { File.SetAttributes(destinationFilePath, FileAttributes.Normal); } File.Copy(sourceFilePath, destinationFilePath, true); File.SetAttributes(destinationFilePath, FileAttributes.Normal); } catch (Exception exception) { Debug.LogError(string.Format(CultureInfo.InvariantCulture, "Failed to copy \"{0}\" to \"{1}\" with \"{2}\"", sourceFilePath, destinationFilePath, exception)); } } } private static void OnPreProcessBuild(BuildInfo buildInfo) { // Raise the global event for listeners if (BuildStarted != null) { BuildStarted(buildInfo); } // Call the pre-build action, if any if (buildInfo.PreBuildAction != null) { buildInfo.PreBuildAction(buildInfo); } } private static void OnPostProcessBuild(BuildInfo buildInfo, string buildError) { if (string.IsNullOrEmpty(buildError)) { if (buildInfo.CopyDirectories != null) { string inputProjectDirectoryPath = GetProjectPath(); string outputProjectDirectoryPath = Path.Combine(GetProjectPath(), buildInfo.OutputDirectory); foreach (var directory in buildInfo.CopyDirectories) { CopyDirectory(inputProjectDirectoryPath, outputProjectDirectoryPath, directory); } } } // Raise the global event for listeners if (BuildCompleted != null) { BuildCompleted(buildInfo, buildError); } // Call the post-build action, if any if (buildInfo.PostBuildAction != null) { buildInfo.PostBuildAction(buildInfo, buildError); } } private static string GetProjectPath() { return Path.GetDirectoryName(Path.GetFullPath(Application.dataPath)); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Linq; using LibGit2Sharp.Core; using LibGit2Sharp.Core.Handles; namespace LibGit2Sharp { /// <summary> /// A collection of <see cref="Note"/> exposed in the <see cref="Repository"/>. /// </summary> [DebuggerDisplay("{DebuggerDisplay,nq}")] public class NoteCollection : IEnumerable<Note> { internal readonly Repository repo; private readonly Lazy<string> defaultNamespace; /// <summary> /// Needed for mocking purposes. /// </summary> protected NoteCollection() { } internal NoteCollection(Repository repo) { this.repo = repo; defaultNamespace = new Lazy<string>(RetrieveDefaultNamespace); } #region Implementation of IEnumerable /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns>An <see cref="IEnumerator{T}"/> object that can be used to iterate through the collection.</returns> public virtual IEnumerator<Note> GetEnumerator() { return this[DefaultNamespace].GetEnumerator(); } /// <summary> /// Returns an enumerator that iterates through the collection. /// </summary> /// <returns>An <see cref="IEnumerator"/> object that can be used to iterate through the collection.</returns> IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } #endregion /// <summary> /// The default namespace for notes. /// </summary> public virtual string DefaultNamespace { get { return defaultNamespace.Value; } } /// <summary> /// The list of canonicalized namespaces related to notes. /// </summary> public virtual IEnumerable<string> Namespaces { get { return NamespaceRefs.Select(UnCanonicalizeName); } } internal IEnumerable<string> NamespaceRefs { get { return new[] { NormalizeToCanonicalName(DefaultNamespace) }.Concat(repo.Refs .Select(reference => reference.CanonicalName) .Where(refCanonical => refCanonical.StartsWith(Reference.NotePrefix, StringComparison.Ordinal) && refCanonical != NormalizeToCanonicalName(DefaultNamespace))); } } /// <summary> /// Gets the collection of <see cref="Note"/> associated with the specified <see cref="ObjectId"/>. /// </summary> public virtual IEnumerable<Note> this[ObjectId id] { get { Ensure.ArgumentNotNull(id, "id"); return NamespaceRefs .Select(ns => this[ns, id]) .Where(n => n != null); } } /// <summary> /// Gets the collection of <see cref="Note"/> associated with the specified namespace. /// <para>This is similar to the 'get notes list' command.</para> /// </summary> public virtual IEnumerable<Note> this[string @namespace] { get { Ensure.ArgumentNotNull(@namespace, "@namespace"); string canonicalNamespace = NormalizeToCanonicalName(@namespace); return Proxy.git_note_foreach(repo.Handle, canonicalNamespace, (blobId, annotatedObjId) => this[canonicalNamespace, annotatedObjId]); } } /// <summary> /// Gets the <see cref="Note"/> associated with the specified objectId and the specified namespace. /// </summary> public virtual Note this[string @namespace, ObjectId id] { get { Ensure.ArgumentNotNull(id, "id"); Ensure.ArgumentNotNull(@namespace, "@namespace"); string canonicalNamespace = NormalizeToCanonicalName(@namespace); using (NoteHandle noteHandle = Proxy.git_note_read(repo.Handle, canonicalNamespace, id)) { return noteHandle == null ? null : Note.BuildFromPtr(noteHandle, UnCanonicalizeName(canonicalNamespace), id); } } } private string RetrieveDefaultNamespace() { string notesRef = Proxy.git_note_default_ref(repo.Handle); return UnCanonicalizeName(notesRef); } internal static string NormalizeToCanonicalName(string name) { Ensure.ArgumentNotNullOrEmptyString(name, "name"); if (name.LooksLikeNote()) { return name; } return string.Concat(Reference.NotePrefix, name); } internal static string UnCanonicalizeName(string name) { Ensure.ArgumentNotNullOrEmptyString(name, "name"); if (!name.LooksLikeNote()) { return name; } return name.Substring(Reference.NotePrefix.Length); } /// <summary> /// Creates or updates a <see cref="Note"/> on the specified object, and for the given namespace. /// </summary> /// <param name="targetId">The target <see cref="ObjectId"/>, for which the note will be created.</param> /// <param name="message">The note message.</param> /// <param name="author">The author.</param> /// <param name="committer">The committer.</param> /// <param name="namespace">The namespace on which the note will be created. It can be either a canonical namespace or an abbreviated namespace ('refs/notes/myNamespace' or just 'myNamespace').</param> /// <returns>The note which was just saved.</returns> public virtual Note Add(ObjectId targetId, string message, Signature author, Signature committer, string @namespace) { Ensure.ArgumentNotNull(targetId, "targetId"); Ensure.ArgumentNotNullOrEmptyString(message, "message"); Ensure.ArgumentNotNull(author, "author"); Ensure.ArgumentNotNull(committer, "committer"); Ensure.ArgumentNotNullOrEmptyString(@namespace, "@namespace"); string canonicalNamespace = NormalizeToCanonicalName(@namespace); Remove(targetId, author, committer, @namespace); Proxy.git_note_create(repo.Handle, canonicalNamespace, author, committer, targetId, message, true); return this[canonicalNamespace, targetId]; } /// <summary> /// Deletes the note on the specified object, and for the given namespace. /// </summary> /// <param name="targetId">The target <see cref="ObjectId"/>, for which the note will be created.</param> /// <param name="author">The author.</param> /// <param name="committer">The committer.</param> /// <param name="namespace">The namespace on which the note will be removed. It can be either a canonical namespace or an abbreviated namespace ('refs/notes/myNamespace' or just 'myNamespace').</param> public virtual void Remove(ObjectId targetId, Signature author, Signature committer, string @namespace) { Ensure.ArgumentNotNull(targetId, "targetId"); Ensure.ArgumentNotNull(author, "author"); Ensure.ArgumentNotNull(committer, "committer"); Ensure.ArgumentNotNullOrEmptyString(@namespace, "@namespace"); string canonicalNamespace = NormalizeToCanonicalName(@namespace); Proxy.git_note_remove(repo.Handle, canonicalNamespace, author, committer, targetId); } private string DebuggerDisplay { get { return string.Format(CultureInfo.InvariantCulture, "Count = {0}", this.Count()); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Threading; using Debug = System.Diagnostics.Debug; using Interlocked = System.Threading.Interlocked; namespace System.Xml.Linq { /// <summary> /// This is a thread-safe hash table which maps string keys to values of type TValue. It is assumed that the string key is embedded in the hashed value /// and can be extracted via a call to ExtractKeyDelegate (in order to save space and allow cleanup of key if value is released due to a WeakReference /// TValue releasing its target). /// </summary> /// <remarks> /// All methods on this class are thread-safe. /// /// When the hash table fills up, it is necessary to resize it and rehash all contents. Because this can be expensive, /// a lock is taken, and one thread is responsible for the resize. Other threads which need to add values must wait /// for the resize to be complete. /// /// Thread-Safety Notes /// =================== /// /// 1. Because performance and scalability are such a concern with the global name table, I have avoided the use of /// BIFALOs (Big Fat Locks). Instead, I use CompareExchange, Interlocked.Increment, memory barriers, atomic state objects, /// etc. to avoid locks. Any changes to code which accesses these variables should be carefully reviewed and tested, /// as it can be *very* tricky. In particular, if you don't understand the CLR memory model or if you don't know /// what a memory barrier is, DON'T attempt to modify this code. A good discussion of these topics can be found at /// <![CDATA[http://discuss.develop.com/archives/wa.exe?A2=ind0203B&L=DOTNET&P=R375]]>. /// /// 2. Because I am not sure if the CLR spec has changed since versions 1.0/1.1, I am assuming the weak memory model that /// is described in the ECMA spec, in which normal writes can be reordered. This means I must introduce more memory /// barriers than otherwise would be necessary. /// /// 3. There are several thread-safety concepts and patterns I utilize in this code: /// a. Publishing -- There are a small number of places where state is exposed, or published, to multiple threads. /// These places are marked with the comment "PUBLISH", and are key locations to consider when /// reviewing the code for thread-safety. /// /// b. Immutable objects -- Immutable objects initialize their fields once in their constructor and then never modify /// them again. As long as care is taken to ensure that initial field values are visible to /// other threads before publishing the immutable object itself, immutable objects are /// completely thread-safe. /// /// c. Atomic state objects -- Locks typically are taken when several pieces of state must be updated atomically. In /// other words, there is a window in which state is inconsistent, and that window must /// be protected from view by locking. However, if a new object is created each time state /// changes (or state changes substantially), then during creation the new object is only /// visible to a single thread. Once construction is complete, an assignment (guaranteed /// atomic) can replace the old state object with the new state object, thus publishing a /// consistent view to all threads. /// /// d. Retry -- When several threads contend over shared state which only one is allowed to possess, it is possible /// to avoid locking by repeatedly attempting to acquire the shared state. The CompareExchange method /// is useful for atomically ensuring that only one thread succeeds, and other threads are notified that /// they must retry. /// /// 4. All variables which can be written by multiple threads are marked "SHARED STATE". /// </remarks> internal sealed class XHashtable<TValue> { private XHashtableState _state; // SHARED STATE: Contains all XHashtable state, so it can be atomically swapped when resizes occur /// <summary> /// Prototype of function which is called to extract a string key value from a hashed value. /// Returns null if the hashed value is invalid (e.g. value has been released due to a WeakReference TValue being cleaned up). /// </summary> public delegate string ExtractKeyDelegate(TValue value); /// <summary> /// Construct a new XHashtable with the specified starting capacity. /// </summary> public XHashtable(ExtractKeyDelegate extractKey, int capacity) { _state = new XHashtableState(extractKey, capacity); } /// <summary> /// Get an existing value from the hash table. Return false if no such value exists. /// </summary> public bool TryGetValue(string key, int index, int count, out TValue value) { return _state.TryGetValue(key, index, count, out value); } /// <summary> /// Add a value to the hash table, hashed based on a string key embedded in it. Return the added value (may be a different object than "value"). /// </summary> public TValue Add(TValue value) { TValue newValue; // Loop until value is in hash table while (true) { // Add new value // XHashtableState.TryAdd returns false if hash table is not big enough if (_state.TryAdd(value, out newValue)) return newValue; // PUBLISH (state) // Hash table was not big enough, so resize it. // We only want one thread to perform a resize, as it is an expensive operation // First thread will perform resize; waiting threads will call Resize(), but should immediately // return since there will almost always be space in the hash table resized by the first thread. lock (this) { XHashtableState newState = _state.Resize(); // Use memory barrier to ensure that the resized XHashtableState object is fully constructed before it is assigned Thread.MemoryBarrier(); _state = newState; } } } /// <summary> /// This class contains all the hash table state. Rather than creating a bucket object, buckets are structs /// packed into an array. Buckets with the same truncated hash code are linked into lists, so that collisions /// can be disambiguated. /// </summary> /// <remarks> /// Note that the "buckets" and "entries" arrays are never themselves written by multiple threads. Instead, the /// *contents* of the array are written by multiple threads. Resizing the hash table does not modify these variables, /// or even modify the contents of these variables. Instead, resizing makes an entirely new XHashtableState object /// in which all entries are rehashed. This strategy allows reader threads to continue finding values in the "old" /// XHashtableState, while writer threads (those that need to add a new value to the table) are blocked waiting for /// the resize to complete. /// </remarks> private sealed class XHashtableState { private int[] _buckets; // Buckets contain indexes into entries array (bucket values are SHARED STATE) private Entry[] _entries; // Entries contain linked lists of buckets (next pointers are SHARED STATE) private int _numEntries; // SHARED STATE: Current number of entries (including orphaned entries) private ExtractKeyDelegate _extractKey; // Delegate called in order to extract string key embedded in hashed TValue private const int EndOfList = 0; // End of linked list marker private const int FullList = -1; // Indicates entries should not be added to end of linked list /// <summary> /// Construct a new XHashtableState object with the specified capacity. /// </summary> public XHashtableState(ExtractKeyDelegate extractKey, int capacity) { Debug.Assert((capacity & (capacity - 1)) == 0, "capacity must be a power of 2"); Debug.Assert(extractKey != null, "extractKey may not be null"); // Initialize hash table data structures, with specified maximum capacity _buckets = new int[capacity]; _entries = new Entry[capacity]; // Save delegate _extractKey = extractKey; } /// <summary> /// If this table is not full, then just return "this". Otherwise, create and return a new table with /// additional capacity, and rehash all values in the table. /// </summary> public XHashtableState Resize() { // No need to resize if there are open entries if (_numEntries < _buckets.Length) return this; int newSize = 0; // Determine capacity of resized hash table by first counting number of valid, non-orphaned entries // As this count proceeds, close all linked lists so that no additional entries can be added to them for (int bucketIdx = 0; bucketIdx < _buckets.Length; bucketIdx++) { int entryIdx = _buckets[bucketIdx]; if (entryIdx == EndOfList) { // Replace EndOfList with FullList, so that any threads still attempting to add will be forced to resize entryIdx = Interlocked.CompareExchange(ref _buckets[bucketIdx], FullList, EndOfList); } // Loop until we've guaranteed that the list has been counted and closed to further adds while (entryIdx > EndOfList) { // Count each valid entry if (_extractKey(_entries[entryIdx].Value) != null) newSize++; if (_entries[entryIdx].Next == EndOfList) { // Replace EndOfList with FullList, so that any threads still attempting to add will be forced to resize entryIdx = Interlocked.CompareExchange(ref _entries[entryIdx].Next, FullList, EndOfList); } else { // Move to next entry in the list entryIdx = _entries[entryIdx].Next; } } Debug.Assert(entryIdx == EndOfList, "Resize() should only be called by one thread"); } // Double number of valid entries; if result is less than current capacity, then use current capacity if (newSize < _buckets.Length / 2) { newSize = _buckets.Length; } else { newSize = _buckets.Length * 2; if (newSize < 0) throw new OverflowException(); } // Create new hash table with additional capacity XHashtableState newHashtable = new XHashtableState(_extractKey, newSize); // Rehash names (TryAdd will always succeed, since we won't fill the new table) // Do not simply walk over entries and add them to table, as that would add orphaned // entries. Instead, walk the linked lists and add each name. for (int bucketIdx = 0; bucketIdx < _buckets.Length; bucketIdx++) { int entryIdx = _buckets[bucketIdx]; TValue newValue; while (entryIdx > EndOfList) { newHashtable.TryAdd(_entries[entryIdx].Value, out newValue); entryIdx = _entries[entryIdx].Next; } Debug.Assert(entryIdx == FullList, "Linked list should have been closed when it was counted"); } return newHashtable; } /// <summary> /// Attempt to find "key" in the table. If the key exists, return the associated value in "value" and /// return true. Otherwise return false. /// </summary> public bool TryGetValue(string key, int index, int count, out TValue value) { int hashCode = ComputeHashCode(key, index, count); int entryIndex = 0; // If a matching entry is found, return its value if (FindEntry(hashCode, key, index, count, ref entryIndex)) { value = _entries[entryIndex].Value; return true; } // No matching entry found, so return false value = default(TValue); return false; } /// <summary> /// Attempt to add "value" to the table, hashed by an embedded string key. If a value having the same key already exists, /// then return the existing value in "newValue". Otherwise, return the newly added value in "newValue". /// /// If the hash table is full, return false. Otherwise, return true. /// </summary> public bool TryAdd(TValue value, out TValue newValue) { int newEntry, entryIndex; string key; int hashCode; // Assume "value" will be added and returned as "newValue" newValue = value; // Extract the key from the value. If it's null, then value is invalid and does not need to be added to table. key = _extractKey(value); if (key == null) return true; // Compute hash code over entire length of key hashCode = ComputeHashCode(key, 0, key.Length); // Assume value is not yet in the hash table, and prepare to add it (if table is full, return false). // Use the entry index returned from Increment, which will never be zero, as zero conflicts with EndOfList. // Although this means that the first entry will never be used, it avoids the need to initialize all // starting buckets to the EndOfList value. newEntry = Interlocked.Increment(ref _numEntries); if (newEntry < 0 || newEntry >= _buckets.Length) return false; _entries[newEntry].Value = value; _entries[newEntry].HashCode = hashCode; // Ensure that all writes to the entry can't be reordered past this barrier (or other threads might see new entry // in list before entry has been initialized!). Thread.MemoryBarrier(); // Loop until a matching entry is found, a new entry is added, or linked list is found to be full entryIndex = 0; while (!FindEntry(hashCode, key, 0, key.Length, ref entryIndex)) { // PUBLISH (buckets slot) // No matching entry found, so add the new entry to the end of the list ("entryIndex" is index of last entry) if (entryIndex == 0) entryIndex = Interlocked.CompareExchange(ref _buckets[hashCode & (_buckets.Length - 1)], newEntry, EndOfList); else entryIndex = Interlocked.CompareExchange(ref _entries[entryIndex].Next, newEntry, EndOfList); // Return true only if the CompareExchange succeeded (happens when replaced value is EndOfList). // Return false if the linked list turned out to be full because another thread is currently resizing // the hash table. In this case, entries[newEntry] is orphaned (not part of any linked list) and the // Add needs to be performed on the new hash table. Otherwise, keep looping, looking for new end of list. if (entryIndex <= EndOfList) return entryIndex == EndOfList; } // Another thread already added the value while this thread was trying to add, so return that instance instead. // Note that entries[newEntry] will be orphaned (not part of any linked list) in this case newValue = _entries[entryIndex].Value; return true; } /// <summary> /// Searches a linked list of entries, beginning at "entryIndex". If "entryIndex" is 0, then search starts at a hash bucket instead. /// Each entry in the list is matched against the (hashCode, key, index, count) key. If a matching entry is found, then its /// entry index is returned in "entryIndex" and true is returned. If no matching entry is found, then the index of the last entry /// in the list (or 0 if list is empty) is returned in "entryIndex" and false is returned. /// </summary> /// <remarks> /// This method has the side effect of removing invalid entries from the list as it is traversed. /// </remarks> private bool FindEntry(int hashCode, string key, int index, int count, ref int entryIndex) { int previousIndex = entryIndex; int currentIndex; // Set initial value of currentIndex to index of the next entry following entryIndex if (previousIndex == 0) currentIndex = _buckets[hashCode & (_buckets.Length - 1)]; else currentIndex = previousIndex; // Loop while not at end of list while (currentIndex > EndOfList) { // Check for matching hash code, then matching key if (_entries[currentIndex].HashCode == hashCode) { string keyCompare = _extractKey(_entries[currentIndex].Value); // If the key is invalid, then attempt to remove the current entry from the linked list. // This is thread-safe in the case where the Next field points to another entry, since once a Next field points // to another entry, it will never be modified to be EndOfList or FullList. if (keyCompare == null) { if (_entries[currentIndex].Next > EndOfList) { // PUBLISH (buckets slot or entries slot) // Entry is invalid, so modify previous entry to point to its next entry _entries[currentIndex].Value = default(TValue); currentIndex = _entries[currentIndex].Next; if (previousIndex == 0) _buckets[hashCode & (_buckets.Length - 1)] = currentIndex; else _entries[previousIndex].Next = currentIndex; continue; } } else { // Valid key, so compare keys if (count == keyCompare.Length && string.CompareOrdinal(key, index, keyCompare, 0, count) == 0) { // Found match, so return true and matching entry in list entryIndex = currentIndex; return true; } } } // Move to next entry previousIndex = currentIndex; currentIndex = _entries[currentIndex].Next; } // Return false and last entry in list entryIndex = previousIndex; return false; } /// <summary> /// Compute hash code for a string key (index, count substring of "key"). The algorithm used is the same on used in NameTable.cs in System.Xml. /// </summary> private static int ComputeHashCode(string key, int index, int count) { Debug.Assert(key != null, "key should have been checked previously for null"); return string.GetHashCode(key.AsSpan(index, count)); } /// <summary> /// Hash table entry. The "Value" and "HashCode" fields are filled during initialization, and are never changed. The "Next" /// field is updated when a new entry is chained to this one, and therefore care must be taken to ensure that updates to /// this field are thread-safe. /// </summary> private struct Entry { public TValue Value; // Hashed value public int HashCode; // Hash code of string key (equal to extractKey(Value).GetHashCode()) public int Next; // SHARED STATE: Points to next entry in linked list } } } }
/******************************************************************************* * * Copyright (C) 2013-2014 Frozen North Computing * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * *******************************************************************************/ using System; using System.Drawing; using System.Collections.Generic; #if FN2D_WIN using OpenTK.Graphics.OpenGL; using FN2DBitmap = System.Drawing.Bitmap; #elif FN2D_IOS using MonoTouch.UIKit; using OpenTK.Graphics.ES11; using FN2DBitmap = MonoTouch.UIKit.UIImage; using MatrixMode = OpenTK.Graphics.ES11.All; #elif FN2D_AND using OpenTK.Graphics.ES11; using FN2DBitmap = Android.Graphics.Bitmap; using MatrixMode = OpenTK.Graphics.ES11.All; #endif namespace FrozenNorth.OpenGL.FN2D { /// <summary> /// OpenGL 2D base control. /// </summary> public class FN2DControl : IDisposable { // public events public EventHandler Tapped = null; // instance variables protected FN2DCanvas canvas; protected FN2DControl parent = null; protected Rectangle frame; private FN2DControlList controls; protected FN2DRectangle background = new FN2DRectangle(); protected FN2DImage backgroundImage = null; protected float zoom = 1; protected float minZoom = 0.1f; protected float maxZoom = 10; protected Size pan = Size.Empty; protected float rotation = 0; protected bool touchEnabled = true; protected bool touching = false; protected bool enabled = true; protected bool visible = true; protected bool refreshEnabled = true; protected int tag = 0; /// <summary> /// Constructor - Creates a control with a gradient background with rounded corners. /// </summary> /// <param name="canvas">Canvas that the control is on.</param> /// <param name="frame">Position and size of the control.</param> /// <param name="cornerRadius">Radius for rounded corners.</param> /// <param name="topColor">Top color for the gradient background.</param> /// <param name="bottomColor">Bottom color for the gradient background.</param> public FN2DControl(FN2DCanvas canvas, Rectangle frame, int cornerRadius, Color topColor, Color bottomColor) { if (canvas == null) { throw new ArgumentNullException("FN2DControl: The canvas cannot be null."); } controls = new FN2DControlList(this); this.canvas = canvas; background.CornerRadius = cornerRadius; background.TopColor = topColor; background.BottomColor = bottomColor; Frame = frame; } /// <summary> /// Constructor - Creates a control with a solid background with rounded corners. /// </summary> /// <param name="canvas">Canvas that the control is on.</param> /// <param name="frame">Position and size of the control.</param> /// <param name="cornerRadius">Radius for rounded corners.</param> /// <param name="backgroundColor">Color of the background.</param> public FN2DControl(FN2DCanvas canvas, Rectangle frame, int cornerRadius, Color backgroundColor) : this(canvas, frame, cornerRadius, backgroundColor, backgroundColor) { } /// <summary> /// Constructor - Creates a control with a gradient background. /// </summary> /// <param name="canvas">Canvas that the control is on.</param> /// <param name="frame">Position and size of the control.</param> /// <param name="topColor">Top color for the gradient background.</param> /// <param name="bottomColor">Bottom color for the gradient background.</param> public FN2DControl(FN2DCanvas canvas, Rectangle frame, Color topColor, Color bottomColor) : this(canvas, frame, 0, topColor, bottomColor) { } /// <summary> /// Constructor - Creates a control with a solid background. /// </summary> /// <param name="canvas">Canvas that the control is on.</param> /// <param name="frame">Position and size of the control.</param> /// <param name="backgroundColor">Color of the background.</param> public FN2DControl(FN2DCanvas canvas, Rectangle frame, Color backgroundColor) : this(canvas, frame, 0, backgroundColor, backgroundColor) { } /// <summary> /// Constructor - Creates a control with no background. /// </summary> /// <param name="canvas">Canvas that the control is on.</param> /// <param name="frame">Position and size of the control.</param> public FN2DControl(FN2DCanvas canvas, Rectangle frame) : this(canvas, frame, Color.Transparent) { } /// <summary> /// Constructor - Creates a control with no background or frame. /// </summary> /// <param name="canvas">Canvas that the control is on.</param> public FN2DControl(FN2DCanvas canvas) : this(canvas, Rectangle.Empty, Color.Transparent) { } /// <summary> /// Destructor - Calls Dispose(). /// </summary> ~FN2DControl() { Dispose(false); } /// <summary> /// Releases all resource used by the object. /// </summary> public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Frees unmanaged resources and calls Dispose() on the member objects. /// </summary> protected virtual void Dispose(bool disposing) { // if we got here via Dispose(), call Dispose() on the member objects if (disposing) { if (controls != null) { for (int i = controls.Count - 1; i >= 0; i--) { FN2DControl control = controls[i]; controls.RemoveAt(i); control.Dispose(); } } if (background != null) background.Dispose(); if (backgroundImage != null) backgroundImage.Dispose(); } // clear the object references canvas = null; controls = null; background = null; backgroundImage = null; } /// <summary> /// Initializes the drawing state. /// </summary> /// <param name="parentBounds">Bounds of the parent control.</param> public virtual Rectangle DrawBegin(Rectangle parentBounds) { // get the control bounds Rectangle bounds = new Rectangle(parentBounds.X + X, parentBounds.Y + Y, Width, Height); //Console.WriteLine("DrawBegin: " + parentBounds + " " + frame + " " + bounds); // reset the project matrix GL.MatrixMode(MatrixMode.Projection); GL.PushMatrix(); GL.LoadIdentity(); // apply zoom and pan to the bounds SizeF zoomOffset = new SizeF((bounds.Width / zoom - bounds.Width) / 2, (bounds.Height / zoom - bounds.Height) / 2); PointF zoomTL = new PointF(-zoomOffset.Width, -zoomOffset.Height); PointF zoomBR = new PointF(bounds.Width + zoomOffset.Width, bounds.Height + zoomOffset.Height); PointF panTL = new PointF(zoomTL.X - pan.Width, zoomTL.Y - pan.Height); PointF panBR = new PointF(zoomBR.X - pan.Width, zoomBR.Y - pan.Height); // set the orthographic projection for the control GL.Ortho(panTL.X, panBR.X, panBR.Y, panTL.Y, -1, 1); // bound the control to its screen location GL.MatrixMode(MatrixMode.Modelview); GL.PushMatrix(); int y = (canvas != null) ? ((int)canvas.Size.Height - bounds.Y - bounds.Height) : bounds.Y; GL.Viewport(bounds.X, y, bounds.Width, bounds.Height); bounds.Intersect(parentBounds); y = (canvas != null) ? ((int)canvas.Size.Height - bounds.Y - bounds.Height) : bounds.Y; GL.Scissor(bounds.X, y, bounds.Width, bounds.Height); //Console.WriteLine("DrawBegin: " + parentBounds + " " + frame + " " + bounds + " " + y); if (rotation != 0) { GL.Translate(bounds.Width / 2, bounds.Height / 2, 0); GL.Rotate(rotation, 0, 0, 1); GL.Translate(-bounds.Width / 2, -bounds.Height / 2, 0); } return bounds; } /// <summary> /// Restore the state after drawing /// </summary> public virtual void DrawEnd() { GL.MatrixMode(MatrixMode.Projection); GL.PopMatrix(); GL.MatrixMode(MatrixMode.Modelview); GL.PopMatrix(); } /// <summary> /// Draws the background. /// </summary> public virtual void DrawBackground() { GL.Color4(0, 0, 0, 0); background.Draw(); if (backgroundImage != null) { backgroundImage.Draw(); } } /// <summary> /// Draws the control. /// </summary> public virtual void Draw() { } /// <summary> /// Draws the sub-controls. /// </summary> public virtual void DrawControls(Rectangle bounds) { // draw the controls foreach (FN2DControl control in controls) { if (control.Visible) { // initialize the state Rectangle controlBounds = control.DrawBegin(bounds); // draw the control and all of its sub-controls control.DrawBackground(); control.Draw(); control.DrawControls(controlBounds); // restore the state control.DrawEnd(); } } } /// <summary> /// Gets or sets the canvas. /// </summary> public virtual FN2DCanvas Canvas { get { return canvas; } set { canvas = value; foreach (FN2DControl control in controls) { control.Canvas = canvas; } } } /// <summary> /// Gets the list of sub-controls. /// </summary> public FN2DControlList Controls { get { return controls; } } /// <summary> /// Gets or sets the corner radius. /// </summary> public virtual int CornerRadius { get { return background.CornerRadius; } set { background.CornerRadius = value; Refresh(); } } /// <summary> /// Gets or sets the top background color. /// </summary> public virtual Color TopColor { get { return background.TopColor; } set { background.TopColor = value; Refresh(); } } /// <summary> /// Gets or sets the bottom background color. /// </summary> public virtual Color BottomColor { get { return background.BottomColor; } set { background.BottomColor = value; Refresh(); } } /// <summary> /// Gets or sets the color of the background. /// </summary> public virtual Color BackgroundColor { get { return background.Color; } set { background.Color = value; Refresh(); } } /// <summary> /// Gets the background image. /// </summary> public virtual FN2DBitmap BackgroundImage { get { return (backgroundImage != null) ? backgroundImage.Image : null; } set { if (value != null) { if (backgroundImage == null) { backgroundImage = new FN2DImage(canvas, value); } else { backgroundImage.Image = value; } } else { backgroundImage.Dispose(); backgroundImage = null; } Refresh(); } } /// <summary> /// Gets the background image control. /// </summary> public virtual FN2DImage BackgroundImageControl { get { return backgroundImage; } } /// <summary> /// Gets or sets the zoom factor. /// </summary> public virtual float Zoom { get { return zoom; } set { // range check the new zoom float newZoom = value; if (newZoom < minZoom) newZoom = minZoom; else if (newZoom > maxZoom) newZoom = maxZoom; // if the zoom has changed if (newZoom != zoom) { // set the zoom float ratio = newZoom / zoom; zoom = newZoom; //Console.WriteLine("Zoom = " + zoom + " " + minZoom + " " + maxZoom); canvas.IsDirty = true; // adjust the pan by the relative zoom change Pan = new Size((int)(pan.Width * ratio), (int)(pan.Height * ratio)); } } } /// <summary> /// Gets or sets the minimum zoom. /// </summary> public virtual float MinZoom { get { return minZoom; } set { minZoom = value; } } /// <summary> /// Gets or sets the maximum zoom. /// </summary> public virtual float MaxZoom { get { return maxZoom; } set { maxZoom = value; } } /// <summary> /// Gets or sets the pan. /// </summary> public virtual Size Pan { get { return pan; } set { // range check the new pan Size newPan = value; Size maxPan = MaxPan; if (maxPan.Width == 0) newPan.Width = 0; else if (newPan.Width < -maxPan.Width) newPan.Width = -maxPan.Width; else if (newPan.Width > maxPan.Width) newPan.Width = maxPan.Width; if (maxPan.Height == 0) newPan.Height = 0; else if (newPan.Height < -maxPan.Height) newPan.Height = -maxPan.Height; else if (newPan.Height > maxPan.Height) newPan.Height = maxPan.Height; // if the pan has changed, set it if (newPan != pan) { pan = newPan; //Console.WriteLine("Pan = " + pan + " " + MaxPan); canvas.IsDirty = true; } } } /// <summary> /// Gets the maximum pan. /// </summary> public virtual Size MaxPan { get { return new Size((int)Math.Max(Math.Round(-(Width / Zoom - Width) / 2), 0), (int)Math.Max(Math.Round(-(Height / Zoom - Height) / 2), 0)); } } /// <summary> /// Determines whether or not panning is possible. /// </summary> public virtual bool CanPan { get { return MaxPan != Size.Empty; } } /// <summary> /// Gets or sets the rotation angle. /// </summary> public virtual float Rotation { get { return rotation; } set { if (value != rotation) { rotation = value; Refresh(); } } } /// <summary> /// Gets or sets the frame. /// </summary> public virtual Rectangle Frame { get { return frame; } set { // determine what has changed bool moved = value.X != frame.X || value.Y != frame.Y; bool resized = value.Width != frame.Width || value.Height != frame.Height; // fire the before events if (moved) { BeforeMove(); } if (resized) { BeforeResize(); } // change the frame frame = value; // fire the after events if (moved) { AfterMove(); } if (resized) { AfterResize(); } } } /// <summary> /// Gets or sets the location. /// </summary> public virtual Point Location { get { return frame.Location; } set { Frame = new Rectangle(value, frame.Size); } } /// <summary> /// Gets or sets the size. /// </summary> public virtual Size Size { get { return frame.Size; } set { Frame = new Rectangle(frame.Location, value); } } /// <summary> /// Gets or sets the x position. /// </summary> public virtual int X { get { return frame.X; } set { Frame = new Rectangle(value, frame.Y, frame.Width, frame.Height); } } /// <summary> /// Gets or sets the y position. /// </summary> public virtual int Y { get { return frame.Y; } set { Frame = new Rectangle(frame.X, value, frame.Width, frame.Height); } } /// <summary> /// Gets or sets the width. /// </summary> public virtual int Width { get { return frame.Width; } set { Frame = new Rectangle(frame.X, frame.Y, value, frame.Height); } } /// <summary> /// Gets or sets the height. /// </summary> public virtual int Height { get { return frame.Height; } set { Frame = new Rectangle(frame.X, frame.Y, frame.Width, value); } } /// <summary> /// Gets the right position (x + width). /// </summary> public virtual int Right { get { return frame.X + frame.Width; } } /// <summary> /// Gets the bottom position (y + height). /// </summary> public virtual int Bottom { get { return frame.Y + frame.Height; } } /// <summary> /// Gets or sets the center position. /// </summary> public virtual Point Center { get { return new Point(frame.X + frame.Width / 2, frame.Y + frame.Height / 2); } set { Location = new Point(value.X - frame.Width / 2, value.Y - frame.Height / 2); } } /// <summary> /// Sizes the control to fit all the sub-controls with an optional margin. /// </summary> public virtual void SizeToFit(int margin = 0) { // make sure the margin is positive if (margin < 0) { margin = 0; } // find the top-left and bottom-right locations Point topLeft = Point.Empty; Point bottomRight = Point.Empty; foreach (FN2DControl control in controls) { if (control.X < topLeft.X) { topLeft.X = control.X; } if (control.Y < topLeft.Y) { topLeft.Y = control.Y; } if (control.Right > bottomRight.X) { bottomRight.X = control.Right; } if (control.Bottom > bottomRight.Y) { bottomRight.Y = control.Bottom; } } // position the controls foreach (FN2DControl control in controls) { control.Location = new Point(control.X - topLeft.X + margin, control.Y - topLeft.Y + margin); } // set the size Size = new Size(bottomRight.X - topLeft.X + margin * 2, bottomRight.Y - topLeft.Y + margin * 2); } /// <summary> /// Gets or sets the tag. /// </summary> public virtual int Tag { get { return tag; } set { tag = value; } } /// <summary> /// Gets or sets whether or not the control is visible. /// </summary> public virtual bool Visible { get { return visible; } set { if (value != visible) { visible = value; if (!visible) { Touching = false; } } } } /// <summary> /// Gets or sets whether or not the control is enabled. /// </summary> public virtual bool Enabled { get { return enabled; } set { if (value != enabled) { enabled = value; if (!enabled) { Touching = false; } foreach (FN2DControl control in controls) { control.Enabled = enabled; } Refresh(); } } } /// <summary> /// Gets or sets whether or not refreshing is enabled. /// </summary> public virtual bool RefreshEnabled { get { return refreshEnabled; } set { if (value != refreshEnabled) { refreshEnabled = value; if (refreshEnabled) { Refresh(); } } } } /// <summary> /// Gets or sets whether or not touch input is enabled. /// </summary> public virtual bool TouchEnabled { get { return touchEnabled; } set { if (value != touchEnabled) { touchEnabled = value; if (!touchEnabled) { Touching = false; } Refresh(); } } } /// <summary> /// Gets whether or not touching is active. /// </summary> public virtual bool Touching { get { return touching; } set { touching = value; canvas.TouchControl = touching ? this : null; } } /// <summary> /// Perform special actions before being resized. /// </summary> public virtual void BeforeResize() { } /// <summary> /// Perform special actions after being resized. /// </summary> public virtual void AfterResize() { Refresh(); } /// <summary> /// Perform special actions before being moved. /// </summary> public virtual void BeforeMove() { } /// <summary> /// Perform special actions after being moved. /// </summary> public virtual void AfterMove() { canvas.IsDirty = true; } /// <summary> /// Refreshes the OpenGL drawing values after an update of the control values. /// </summary> public virtual void Refresh() { if (refreshEnabled) { // set the background sizes background.Frame = new Rectangle(Point.Empty, Size); if (backgroundImage != null) { backgroundImage.Size = Size; backgroundImage.Fill(); } // set the dirty flag canvas.IsDirty = true; } } /// <summary> /// Gets or sets the control that contains this control. /// </summary> public virtual FN2DControl Parent { get { return parent; } set { parent = value; } } /// <summary> /// Adds a control to the root control's list of sub-controls. /// </summary> /// <param name="control">Control to be added.</param> public virtual void Add(FN2DControl control) { controls.Add(control); } /// <summary> /// Inserts a control into the root control's list of sub-controls at a specific position. /// </summary> /// <param name="index">Position at which to insert the control.</param> /// <param name="control">Control to be inserted.</param> public virtual void Insert(int index, FN2DControl control) { controls.Insert(index, control); } /// <summary> /// Removes a control from the root control's list of sub-controls. /// </summary> public virtual bool Remove(FN2DControl control) { return controls.Remove(control); } /// <summary> /// Determines if a control is a sub-control of this control. /// </summary> /// <param name="control">Control to look for in this control.</param> /// <returns>True if found, false if not</returns> public virtual bool Contains(FN2DControl control) { foreach (FN2DControl ctrl in controls) { if (ctrl == control || ctrl.Contains(control)) { return true; } } return false; } /// <summary> /// Gets the topmost sub-control that contains a specific location. /// </summary> /// <param name="location">Location to test.</param> /// <returns>Topmost control if the location is within this control, null if not.</returns> public virtual FN2DControl HitTest(Point location) { if (Visible && Enabled && TouchEnabled && Frame.Contains(location)) { Point controlLocation = new Point(location.X - Frame.X, location.Y - Frame.Y); for (int i = controls.Count - 1; i >= 0; i--) { FN2DControl hitControl = controls[i].HitTest(controlLocation); if (hitControl != null) { location.X = controlLocation.X; location.Y = controlLocation.Y; return hitControl; } } return this; } return null; } /// <summary> /// Called when a touch down event occurs on the control. /// </summary> /// <param name="e">Touch event arguments.</param> public virtual void TouchDown(FN2DTouchEventArgs e) { } /// <summary> /// Called when a touch move event occurs when this is the current touch control. /// </summary> /// <param name="e">Touch event arguments.</param> public virtual void TouchMove(FN2DTouchEventArgs e) { } /// <summary> /// Called when a touch up event occurs when this is the current touch control. /// </summary> /// <param name="e">Touch event arguments.</param> public virtual void TouchUp(FN2DTouchEventArgs e) { if (Tapped != null && Touching && e.Location.X >= 0 && e.Location.X < Frame.Width && e.Location.Y >= 0 && e.Location.Y < Frame.Height) { Tapped(this, EventArgs.Empty); } } /// <summary> /// Called when a touch cancel event occurs when this is the current touch control. /// </summary> /// <param name="e">Touch event arguments.</param> public virtual void TouchCancel(FN2DTouchEventArgs e) { } } /// <summary> /// OpenGL 2D list of controls. /// </summary> public class FN2DControlList : List<FN2DControl> { private FN2DControl owner; public FN2DControlList(FN2DControl owner = null) { this.owner = owner; } public new void Add(FN2DControl control) { if (control != null && owner != null) { control.Parent = owner; } base.Add(control); } public new void AddRange(IEnumerable<FN2DControl> collection) { if (owner != null) { foreach (FN2DControl control in collection) { if (control != null) { control.Parent = owner; } } } base.AddRange(collection); } public new void Insert(int index, FN2DControl control) { if (control != null && owner != null) { control.Parent = owner; } base.Insert(index, control); } public new void InsertRange(int index, IEnumerable<FN2DControl> collection) { if (owner != null) { foreach (FN2DControl control in collection) { if (control != null) { control.Parent = owner; } } } base.InsertRange(index, collection); } public new bool Remove(FN2DControl control) { if (control != null) { control.Parent = null; } return base.Remove(control); } public new void RemoveAt(int index) { if (index >= 0 && index < Count) { this[index].Parent = null; } base.RemoveAt(index); } public new void RemoveRange(int index, int count) { int n = 0; for (int i = index; i < Count && n < count; i++, n++) { this[i].Parent = null; } base.RemoveRange(index, count); } public new int RemoveAll(Predicate<FN2DControl> match) { foreach (FN2DControl control in this) { control.Parent = null; } return base.RemoveAll(match); } public virtual void BringToFront(FN2DControl control) { int i = IndexOf(control); if (i != -1 && i != Count - 1) { Remove(control); Add(control); } } public virtual void SendToBack(FN2DControl control) { int i = IndexOf(control); if (i > 0) { Remove(control); Insert(0, control); } } } public class FN2DTouchEventArgs : EventArgs { public Point Location; public FN2DTouchEventArgs(int x, int y) { Location.X = x; Location.Y = y; } public FN2DTouchEventArgs(Point location) : this(location.X, location.Y) { } public FN2DTouchEventArgs(PointF location) : this((int)location.X, (int)location.Y) { } public new static FN2DTouchEventArgs Empty { get { return new FN2DTouchEventArgs(Point.Empty); } } } }
#region Licence... /* The MIT License (MIT) Copyright (c) 2014 Oleg Shilo Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #endregion using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using Microsoft.Deployment.WindowsInstaller; namespace WixSharp { /// <summary> /// Method delegate implementing WiX CustomAction. /// </summary> public delegate ActionResult CustomActionMethod(Session session); /// <summary> /// Defines WiX Managed CustomAction. /// <para> /// Managed CustomAction can be defined either in the Wix# script or in the external assembly or C# file. /// The only requirements for any C# method to be qualified for being Managed CustomAcyion is to /// have DTF Action signature <c>public static ActionResult MyManagedAction(Session session)</c>, and be /// marked with <c>[CustomAction]</c> attribute. /// </para> /// <para> /// If Managed CustomAction depends on any assembly, which will not be registered with GAC on the /// target system such assembly needs to be listed in the <see cref="ManagedAction.RefAssemblies"/>. /// </para> /// <remarks> /// <see cref="ManagedAction"/> often needs to be executed with the elevated privileges. Thus after instantiation it will have /// <see cref="Action.Impersonate"/> set to <c>false</c> and <see cref="Action.Execute"/> set to <c>Execute.deferred</c> to allow elevating. /// </remarks> /// </summary> /// <example>The following is an example of using <c>MyManagedAction</c> method of the class /// <c>CustomActions</c> as a Managed CustomAction. /// <code> /// class Script /// { /// static public void Main(string[] args) /// { /// var project = /// new Project("My Product", /// /// new Dir(@"%ProgramFiles%\My Company\My Product", /// /// new File(@"AppFiles\MyApp.exe", /// new WixSharp.Shortcut("MyApp", @"%ProgramMenu%\My Company\My Product"), /// new WixSharp.Shortcut("MyApp", @"%Desktop%")), /// /// new File(@"AppFiles\Readme.txt"), /// /// new ManagedAction(CustomActions.MyManagedAction), /// /// ... /// /// Compiler.BuildMsi(project); /// } /// } /// /// public class CustomActions /// { /// [CustomAction] /// public static ActionResult MyManagedAction(Session session) /// { /// MessageBox.Show("Hello World!", "Managed CA"); /// return ActionResult.Success; /// } /// } /// </code> /// </example> public partial class ManagedAction : Action { /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class. /// </summary> public ManagedAction() { Return = Return.check; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="name">Name of the CustomAction. The name should match the method implementing the custom action functionality.</param> public ManagedAction(string name) : base() { Name = "Action" + (++count) + "_" + name; MethodName = name; Return = Return.check; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="action">The full name of static CustomAction method.</param> public ManagedAction(CustomActionMethod action) : base() { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; Return = Return.check; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="id">The explicit <see cref="Id"></see> to be associated with <see cref="ManagedAction"/> instance.</param> /// <param name="action">The full name of static CustomAction method.</param> public ManagedAction(Id id, CustomActionMethod action) : base(id) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; Return = Return.check; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="actionAssembly">Path to the assembly containing the CustomAction implementation. Specify <c>"%this%"</c> if the assembly /// is in the Wix# script.</param> public ManagedAction(CustomActionMethod action, string actionAssembly) : base() { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; ActionAssembly = actionAssembly; base.Return = Return.check; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="id">The explicit <see cref="Id"></see> to be associated with <see cref="ManagedAction"/> instance.</param> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="actionAssembly">Path to the assembly containing the CustomAction implementation. Specify <c>"%this%"</c> if the assembly /// is in the Wix# script.</param> public ManagedAction(Id id, CustomActionMethod action, string actionAssembly) : base(id) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; ActionAssembly = actionAssembly; base.Return = Return.check; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="returnType">The return type of the action.</param> /// <param name="when"><see cref="T:WixSharp.When"/> the action should be executed with respect to the <paramref name="step"/> parameter.</param> /// <param name="step"><see cref="T:WixSharp.Step"/> the action should be executed before/after during the installation.</param> /// <param name="condition">The launch condition for the <see cref="ManagedAction"/>.</param> public ManagedAction(CustomActionMethod action, Return returnType, When when, Step step, Condition condition) : base(returnType, when, step, condition) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="actionAssembly">Path to the assembly containing the CustomAction implementation. Specify <c>"%this%"</c> if the assembly /// is in the Wix# script.</param> /// <param name="returnType">The return type of the action.</param> /// <param name="when"><see cref="T:WixSharp.When"/> the action should be executed with respect to the <paramref name="step"/> parameter.</param> /// <param name="step"><see cref="T:WixSharp.Step"/> the action should be executed before/after during the installation.</param> /// <param name="condition">The launch condition for the <see cref="ManagedAction"/>.</param> public ManagedAction(CustomActionMethod action, string actionAssembly, Return returnType, When when, Step step, Condition condition) : base(returnType, when, step, condition) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; ActionAssembly = actionAssembly; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="id">The explicit <see cref="Id"></see> to be associated with <see cref="ManagedAction"/> instance.</param> /// <param name="returnType">The return type of the action.</param> /// <param name="when"><see cref="T:WixSharp.When"/> the action should be executed with respect to the <paramref name="step"/> parameter.</param> /// <param name="step"><see cref="T:WixSharp.Step"/> the action should be executed before/after during the installation.</param> /// <param name="condition">The launch condition for the <see cref="ManagedAction"/>.</param> public ManagedAction(Id id, CustomActionMethod action, Return returnType, When when, Step step, Condition condition) : base(id, returnType, when, step, condition) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="id">The explicit <see cref="Id"></see> to be associated with <see cref="ManagedAction"/> instance.</param> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="actionAssembly">Path to the assembly containing the CustomAction implementation. Specify <c>"%this%"</c> if the assembly /// is in the Wix# script.</param> /// <param name="returnType">The return type of the action.</param> /// <param name="when"><see cref="T:WixSharp.When"/> the action should be executed with respect to the <paramref name="step"/> parameter.</param> /// <param name="step"><see cref="T:WixSharp.Step"/> the action should be executed before/after during the installation.</param> /// <param name="condition">The launch condition for the <see cref="ManagedAction"/>.</param> public ManagedAction(Id id, CustomActionMethod action, string actionAssembly, Return returnType, When when, Step step, Condition condition) : base(id, returnType, when, step, condition) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; ActionAssembly = actionAssembly; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="returnType">The return type of the action.</param> /// <param name="when"><see cref="T:WixSharp.When"/> the action should be executed with respect to the <paramref name="step"/> parameter.</param> /// <param name="step"><see cref="T:WixSharp.Step"/> the action should be executed before/after during the installation.</param> /// <param name="condition">The launch condition for the <see cref="ManagedAction"/>.</param> /// <param name="sequence">The MSI sequence the action belongs to.</param> public ManagedAction(CustomActionMethod action, Return returnType, When when, Step step, Condition condition, Sequence sequence) : base(returnType, when, step, condition, sequence) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="actionAssembly">Path to the assembly containing the CustomAction implementation. Specify <c>"%this%"</c> if the assembly /// is in the Wix# script.</param> /// <param name="returnType">The return type of the action.</param> /// <param name="when"><see cref="T:WixSharp.When"/> the action should be executed with respect to the <paramref name="step"/> parameter.</param> /// <param name="step"><see cref="T:WixSharp.Step"/> the action should be executed before/after during the installation.</param> /// <param name="condition">The launch condition for the <see cref="ManagedAction"/>.</param> /// <param name="sequence">The MSI sequence the action belongs to.</param> public ManagedAction(CustomActionMethod action, string actionAssembly, Return returnType, When when, Step step, Condition condition, Sequence sequence) : base(returnType, when, step, condition, sequence) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; ActionAssembly = actionAssembly; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="id">The explicit <see cref="Id"></see> to be associated with <see cref="ManagedAction"/> instance.</param> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="returnType">The return type of the action.</param> /// <param name="when"><see cref="T:WixSharp.When"/> the action should be executed with respect to the <paramref name="step"/> parameter.</param> /// <param name="step"><see cref="T:WixSharp.Step"/> the action should be executed before/after during the installation.</param> /// <param name="condition">The launch condition for the <see cref="ManagedAction"/>.</param> /// <param name="sequence">The MSI sequence the action belongs to.</param> public ManagedAction(Id id, CustomActionMethod action, Return returnType, When when, Step step, Condition condition, Sequence sequence) : base(id, returnType, when, step, condition, sequence) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; } /// <summary> /// Initializes a new instance of the <see cref="ManagedAction"/> class with properties/fields initialized with specified parameters. /// </summary> /// <param name="id">The explicit <see cref="Id"></see> to be associated with <see cref="ManagedAction"/> instance.</param> /// <param name="action">The full name of static CustomAction method.</param> /// <param name="actionAssembly">Path to the assembly containing the CustomAction implementation. Specify <c>"%this%"</c> if the assembly /// is in the Wix# script.</param> /// <param name="returnType">The return type of the action.</param> /// <param name="when"><see cref="T:WixSharp.When"/> the action should be executed with respect to the <paramref name="step"/> parameter.</param> /// <param name="step"><see cref="T:WixSharp.Step"/> the action should be executed before/after during the installation.</param> /// <param name="condition">The launch condition for the <see cref="ManagedAction"/>.</param> /// <param name="sequence">The MSI sequence the action belongs to.</param> public ManagedAction(Id id, CustomActionMethod action, string actionAssembly, Return returnType, When when, Step step, Condition condition, Sequence sequence) : base(id, returnType, when, step, condition, sequence) { string name = action.Method.Name; Name = "Action" + (++count) + "_" + name; MethodName = name; ActionAssembly = actionAssembly; } /// <summary> /// Collection of path strings for dependency assemblies to be included in MSI. <c>RefAssemblies</c> should be used if the Managed CustomAction /// depends on any assembly, which will not be registered with GAC on the target system. /// </summary> public string[] RefAssemblies = new string[0]; internal int GetRefAssembliesHashCode(IEnumerable<string> defaultAssemblies) { return RefAssemblies.Concat(defaultAssemblies) .Select(a => System.IO.Path.GetFullPath(a).ToLower()) .Distinct() .OrderBy(a => a) .GetItemsHashCode(); } /// <summary> /// Path to the assembly containing the CustomAction implementation. Specify <c>"%this%"</c> if the assembly /// is in the Wix# script. /// <para>Default value is <c>%this%</c>.</para> /// </summary> public string ActionAssembly = "%this%"; /// <summary> /// Name of the method implementing the Managed CustomAction action functionality. /// </summary> public string MethodName = ""; /// <summary> /// Comma separated list of properties which the custom action is intended to use. Set this property if you are implementing the 'deferred' (as well as 'rollback' and 'commit') action. /// <remarks> /// <para>Deferred custom actions cannot access any session property as the session is terminated at the time of the action execution (limitation of MSI). /// The standard way of overcoming this limitation is to create a new custom action for setting the property, set the property name to the name of the deferred action, /// set the property value to the specially formatted map, schedule the execution of the custom action and access the mapped properties only via <see cref="T:Microsoft.Deployment.WindowsInstaller.Session.CustomActionData"/>. /// </para> /// <para> All this can be done in a single hit with Wix# as it fully automates creation of the all mapping infrastructure. /// </para> /// </remarks> /// </summary> /// <example>The following is the example of passing the location of the MyApp.exe file in the deferred managed action. /// <code> /// var project = /// new Project("My Product", /// new Dir(@"%ProgramFiles%\My Company\My Product", /// new File(@"Files\MyApp.exe"), /// new File(@"Files\MyApp.exe.config")), /// new ElevatedManagedAction("ConfigureProduct", Return.check, When.After, Step.InstallFiles, Condition.NOT_Installed) /// { /// UsesProperties = "INSTALLDIR" /// }); /// /// ... /// /// [CustomAction] /// public static ActionResult ConfigureProduct(Session session) /// { /// string configFile = session.Property("INSTALLDIR") + "MyApp.exe.config"; /// ... /// </code> /// </example> /// <remarks> /// <para>Note that you don't even have to specify <c>UsesProperties = "INSTALLDIR"</c> as by default Wix# always maps INSTALLDIR and /// UILevel for all deferred managed actions. The default mapping is controlled by <see cref="ManagedAction.DefaultUsesProperties"/>.</para> /// <para>It is also possible to map the 'deferred' properties in the typical WiX way: </para> /// <code>UsesProperties = "CONFIG_FILE=[INSTALLDIR]MyApp.exe.config, APP_FILE=[INSTALLDIR]MyApp.exe"</code> /// </remarks> public string UsesProperties; /// <summary> /// The default properties mapped for use with the 'deferred' (as well as 'rollback' and 'commit') custom actions. See <see cref="ManagedAction.UsesProperties"/> for the details. /// <para>The default value is "INSTALLDIR,UILevel"</para> /// </summary> public static string DefaultUsesProperties = "INSTALLDIR,UILevel,ProductCode"; internal string ExpandAllUsedProperties() { var allProps = (UsesProperties + "," + DefaultUsesProperties); var result = string.Join(";", allProps.Split(new[] { ',', ';' }, StringSplitOptions.RemoveEmptyEntries) .Select(x => { if (x.Contains('=')) //e.g. INSTALLDIR=[INSTALLDIR] return x.Trim(); else return string.Format("{0}=[{0}]", x.Trim()); }) .ToArray()); return result; } } }
#region PDFsharp - A .NET library for processing PDF // // Authors: // Stefan Lange (mailto:Stefan.Lange@pdfsharp.com) // // Copyright (c) 2005-2009 empira Software GmbH, Cologne (Germany) // // http://www.pdfsharp.com // http://sourceforge.net/projects/pdfsharp // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. #endregion using System; using System.Diagnostics; using PdfSharp.Internal; using PdfSharp.Pdf.IO; using PdfSharp.Pdf.Internal; namespace PdfSharp.Pdf.Filters { /// <summary> /// Applies standard filters to streams. /// </summary> public static class Filtering { /// <summary> /// Gets the filter specified by the case sensitive name. /// </summary> public static Filter GetFilter(string filterName) { if (filterName.StartsWith("/")) filterName = filterName.Substring(1); // Some tools use abbreviations switch (filterName) { case "ASCIIHexDecode": case "AHx": if (Filtering.asciiHexDecode == null) Filtering.asciiHexDecode = new ASCIIHexDecode(); return Filtering.asciiHexDecode; case "ASCII85Decode": case "A85": if (Filtering.ascii85Decode == null) Filtering.ascii85Decode = new ASCII85Decode(); return Filtering.ascii85Decode; case "LZWDecode": case "LZW": if (Filtering.lzwDecode == null) Filtering.lzwDecode = new LzwDecode(); return Filtering.lzwDecode; case "FlateDecode": case "Fl": if (Filtering.flateDecode == null) Filtering.flateDecode = new FlateDecode(); return Filtering.flateDecode; //case "RunLengthDecode": // if (Filtering.RunLengthDecode == null) // Filtering.RunLengthDecode = new RunLengthDecode(); // return Filtering.RunLengthDecode; // //case "CCITTFaxDecode": // if (Filtering.CCITTFaxDecode == null) // Filtering.CCITTFaxDecode = new CCITTFaxDecode(); // return Filtering.CCITTFaxDecode; // //case "JBIG2Decode": // if (Filtering.JBIG2Decode == null) // Filtering.JBIG2Decode = new JBIG2Decode(); // return Filtering.JBIG2Decode; // //case "DCTDecode": // if (Filtering.DCTDecode == null) // Filtering.DCTDecode = new DCTDecode(); // return Filtering.DCTDecode; // //case "JPXDecode": // if (Filtering.JPXDecode == null) // Filtering.JPXDecode = new JPXDecode(); // return Filtering.JPXDecode; // //case "Crypt": // if (Filtering.Crypt == null) // Filtering.Crypt = new Crypt(); // return Filtering.Crypt; case "RunLengthDecode": case "CCITTFaxDecode": case "JBIG2Decode": case "DCTDecode": case "JPXDecode": case "Crypt": Debug.WriteLine("Filter not implemented: " + filterName); return null; } throw new NotImplementedException("Unknown filter: " + filterName); } /// <summary> /// Gets the filter singleton. /// </summary> public static ASCIIHexDecode ASCIIHexDecode { get { if (Filtering.asciiHexDecode == null) Filtering.asciiHexDecode = new ASCIIHexDecode(); return Filtering.asciiHexDecode; } } static ASCIIHexDecode asciiHexDecode; /// <summary> /// Gets the filter singleton. /// </summary> public static ASCII85Decode ASCII85Decode { get { if (Filtering.ascii85Decode == null) Filtering.ascii85Decode = new ASCII85Decode(); return Filtering.ascii85Decode; } } static ASCII85Decode ascii85Decode; /// <summary> /// Gets the filter singleton. /// </summary> public static LzwDecode LzwDecode { get { if (Filtering.lzwDecode == null) Filtering.lzwDecode = new LzwDecode(); return Filtering.lzwDecode; } } static LzwDecode lzwDecode; /// <summary> /// Gets the filter singleton. /// </summary> public static FlateDecode FlateDecode { get { if (Filtering.flateDecode == null) Filtering.flateDecode = new FlateDecode(); return Filtering.flateDecode; } } static FlateDecode flateDecode; //runLengthDecode //ccittFaxDecode //jbig2Decode //dctDecode //jpxDecode //crypt /// <summary> /// Encodes the data with the specified filter. /// </summary> public static byte[] Encode(byte[] data, string filterName) { Filter filter = GetFilter(filterName); if (filter != null) return filter.Encode(data); return null; } /// <summary> /// Encodes a raw string with the specified filter. /// </summary> public static byte[] Encode(string rawString, string filterName) { Filter filter = GetFilter(filterName); if (filter != null) return filter.Encode(rawString); return null; } /// <summary> /// Decodes the data with the specified filter. /// </summary> public static byte[] Decode(byte[] data, string filterName, FilterParms parms) { Filter filter = GetFilter(filterName); if (filter != null) return filter.Decode(data, parms); return null; } /// <summary> /// Decodes the data with the specified filter. /// </summary> public static byte[] Decode(byte[] data, string filterName) { Filter filter = GetFilter(filterName); if (filter != null) return filter.Decode(data, null); return null; } /// <summary> /// Decodes the data with the specified filter. /// </summary> public static byte[] Decode(byte[] data, PdfItem filterItem) { byte[] result = null; if (filterItem is PdfName) { Filter filter = GetFilter(filterItem.ToString()); if (filter != null) result = filter.Decode(data); } else if (filterItem is PdfArray) { PdfArray array = (PdfArray)filterItem; foreach (PdfItem item in array) data = Filtering.Decode(data, item); result = data; } return result; } /// <summary> /// Decodes to a raw string with the specified filter. /// </summary> public static string DecodeToString(byte[] data, string filterName, FilterParms parms) { Filter filter = GetFilter(filterName); if (filter != null) return filter.DecodeToString(data, parms); return null; } /// <summary> /// Decodes to a raw string with the specified filter. /// </summary> public static string DecodeToString(byte[] data, string filterName) { Filter filter = GetFilter(filterName); if (filter != null) return filter.DecodeToString(data, null); return null; } } }
/* * Copyright 2021 Google LLC All Rights Reserved. * Use of this source code is governed by a BSD-style * license that can be found in the LICENSE file or at * https://developers.google.com/open-source/licenses/bsd */ // <auto-generated> // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/api/endpoint.proto // </auto-generated> #pragma warning disable 1591, 0612, 3021 #region Designer generated code using pb = global::Google.Protobuf; using pbc = global::Google.Protobuf.Collections; using pbr = global::Google.Protobuf.Reflection; using scg = global::System.Collections.Generic; namespace Google.Api { /// <summary>Holder for reflection information generated from google/api/endpoint.proto</summary> public static partial class EndpointReflection { #region Descriptor /// <summary>File descriptor for google/api/endpoint.proto</summary> public static pbr::FileDescriptor Descriptor { get { return descriptor; } } private static pbr::FileDescriptor descriptor; static EndpointReflection() { byte[] descriptorData = global::System.Convert.FromBase64String( string.Concat( "Chlnb29nbGUvYXBpL2VuZHBvaW50LnByb3RvEgpnb29nbGUuYXBpIlEKCEVu", "ZHBvaW50EgwKBG5hbWUYASABKAkSEwoHYWxpYXNlcxgCIAMoCUICGAESDgoG", "dGFyZ2V0GGUgASgJEhIKCmFsbG93X2NvcnMYBSABKAhCbwoOY29tLmdvb2ds", "ZS5hcGlCDUVuZHBvaW50UHJvdG9QAVpFZ29vZ2xlLmdvbGFuZy5vcmcvZ2Vu", "cHJvdG8vZ29vZ2xlYXBpcy9hcGkvc2VydmljZWNvbmZpZztzZXJ2aWNlY29u", "ZmlnogIER0FQSWIGcHJvdG8z")); descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData, new pbr::FileDescriptor[] { }, new pbr::GeneratedClrTypeInfo(null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Api.Endpoint), global::Google.Api.Endpoint.Parser, new[]{ "Name", "Aliases", "Target", "AllowCors" }, null, null, null, null) })); } #endregion } #region Messages /// <summary> /// `Endpoint` describes a network endpoint of a service that serves a set of /// APIs. It is commonly known as a service endpoint. A service may expose /// any number of service endpoints, and all service endpoints share the same /// service definition, such as quota limits and monitoring metrics. /// /// Example service configuration: /// /// name: library-example.googleapis.com /// endpoints: /// # Below entry makes 'google.example.library.v1.Library' /// # API be served from endpoint address library-example.googleapis.com. /// # It also allows HTTP OPTIONS calls to be passed to the backend, for /// # it to decide whether the subsequent cross-origin request is /// # allowed to proceed. /// - name: library-example.googleapis.com /// allow_cors: true /// </summary> public sealed partial class Endpoint : pb::IMessage<Endpoint> #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE , pb::IBufferMessage #endif { private static readonly pb::MessageParser<Endpoint> _parser = new pb::MessageParser<Endpoint>(() => new Endpoint()); private pb::UnknownFieldSet _unknownFields; [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pb::MessageParser<Endpoint> Parser { get { return _parser; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public static pbr::MessageDescriptor Descriptor { get { return global::Google.Api.EndpointReflection.Descriptor.MessageTypes[0]; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] pbr::MessageDescriptor pb::IMessage.Descriptor { get { return Descriptor; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public Endpoint() { OnConstruction(); } partial void OnConstruction(); [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public Endpoint(Endpoint other) : this() { name_ = other.name_; aliases_ = other.aliases_.Clone(); target_ = other.target_; allowCors_ = other.allowCors_; _unknownFields = pb::UnknownFieldSet.Clone(other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public Endpoint Clone() { return new Endpoint(this); } /// <summary>Field number for the "name" field.</summary> public const int NameFieldNumber = 1; private string name_ = ""; /// <summary> /// The canonical name of this endpoint. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public string Name { get { return name_; } set { name_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "aliases" field.</summary> public const int AliasesFieldNumber = 2; private static readonly pb::FieldCodec<string> _repeated_aliases_codec = pb::FieldCodec.ForString(18); private readonly pbc::RepeatedField<string> aliases_ = new pbc::RepeatedField<string>(); /// <summary> /// Unimplemented. Dot not use. /// /// DEPRECATED: This field is no longer supported. Instead of using aliases, /// please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended /// aliases. /// /// Additional names that this endpoint will be hosted on. /// </summary> [global::System.ObsoleteAttribute] [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public pbc::RepeatedField<string> Aliases { get { return aliases_; } } /// <summary>Field number for the "target" field.</summary> public const int TargetFieldNumber = 101; private string target_ = ""; /// <summary> /// The specification of an Internet routable address of API frontend that will /// handle requests to this [API /// Endpoint](https://cloud.google.com/apis/design/glossary). It should be /// either a valid IPv4 address or a fully-qualified domain name. For example, /// "8.8.8.8" or "myservice.appspot.com". /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public string Target { get { return target_; } set { target_ = pb::ProtoPreconditions.CheckNotNull(value, "value"); } } /// <summary>Field number for the "allow_cors" field.</summary> public const int AllowCorsFieldNumber = 5; private bool allowCors_; /// <summary> /// Allowing /// [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka /// cross-domain traffic, would allow the backends served from this endpoint to /// receive and respond to HTTP OPTIONS requests. The response will be used by /// the browser to determine whether the subsequent cross-origin request is /// allowed to proceed. /// </summary> [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public bool AllowCors { get { return allowCors_; } set { allowCors_ = value; } } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override bool Equals(object other) { return Equals(other as Endpoint); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public bool Equals(Endpoint other) { if (ReferenceEquals(other, null)) { return false; } if (ReferenceEquals(other, this)) { return true; } if (Name != other.Name) return false; if(!aliases_.Equals(other.aliases_)) return false; if (Target != other.Target) return false; if (AllowCors != other.AllowCors) return false; return Equals(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override int GetHashCode() { int hash = 1; if (Name.Length != 0) hash ^= Name.GetHashCode(); hash ^= aliases_.GetHashCode(); if (Target.Length != 0) hash ^= Target.GetHashCode(); if (AllowCors != false) hash ^= AllowCors.GetHashCode(); if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return hash; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public override string ToString() { return pb::JsonFormatter.ToDiagnosticString(this); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void WriteTo(pb::CodedOutputStream output) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE output.WriteRawMessage(this); #else if (Name.Length != 0) { output.WriteRawTag(10); output.WriteString(Name); } aliases_.WriteTo(output, _repeated_aliases_codec); if (AllowCors != false) { output.WriteRawTag(40); output.WriteBool(AllowCors); } if (Target.Length != 0) { output.WriteRawTag(170, 6); output.WriteString(Target); } if (_unknownFields != null) { _unknownFields.WriteTo(output); } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalWriteTo(ref pb::WriteContext output) { if (Name.Length != 0) { output.WriteRawTag(10); output.WriteString(Name); } aliases_.WriteTo(ref output, _repeated_aliases_codec); if (AllowCors != false) { output.WriteRawTag(40); output.WriteBool(AllowCors); } if (Target.Length != 0) { output.WriteRawTag(170, 6); output.WriteString(Target); } if (_unknownFields != null) { _unknownFields.WriteTo(ref output); } } #endif [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public int CalculateSize() { int size = 0; if (Name.Length != 0) { size += 1 + pb::CodedOutputStream.ComputeStringSize(Name); } size += aliases_.CalculateSize(_repeated_aliases_codec); if (Target.Length != 0) { size += 2 + pb::CodedOutputStream.ComputeStringSize(Target); } if (AllowCors != false) { size += 1 + 1; } if (_unknownFields != null) { size += _unknownFields.CalculateSize(); } return size; } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(Endpoint other) { if (other == null) { return; } if (other.Name.Length != 0) { Name = other.Name; } aliases_.Add(other.aliases_); if (other.Target.Length != 0) { Target = other.Target; } if (other.AllowCors != false) { AllowCors = other.AllowCors; } _unknownFields = pb::UnknownFieldSet.MergeFrom(_unknownFields, other._unknownFields); } [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] public void MergeFrom(pb::CodedInputStream input) { #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE input.ReadRawMessage(this); #else uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, input); break; case 10: { Name = input.ReadString(); break; } case 18: { aliases_.AddEntriesFrom(input, _repeated_aliases_codec); break; } case 40: { AllowCors = input.ReadBool(); break; } case 810: { Target = input.ReadString(); break; } } } #endif } #if !GOOGLE_PROTOBUF_REFSTRUCT_COMPATIBILITY_MODE [global::System.Diagnostics.DebuggerNonUserCodeAttribute] [global::System.CodeDom.Compiler.GeneratedCode("protoc", null)] void pb::IBufferMessage.InternalMergeFrom(ref pb::ParseContext input) { uint tag; while ((tag = input.ReadTag()) != 0) { switch(tag) { default: _unknownFields = pb::UnknownFieldSet.MergeFieldFrom(_unknownFields, ref input); break; case 10: { Name = input.ReadString(); break; } case 18: { aliases_.AddEntriesFrom(ref input, _repeated_aliases_codec); break; } case 40: { AllowCors = input.ReadBool(); break; } case 810: { Target = input.ReadString(); break; } } } } #endif } #endregion } #endregion Designer generated code