context
stringlengths
2.52k
185k
gt
stringclasses
1 value
/** * (C) Copyright IBM Corp. 2016, 2020. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ /** * IBM OpenAPI SDK Code Generator Version: 99-SNAPSHOT-be3b4618-20201201-123423 */ using System.Collections.Generic; using System.IO; using System.Net.Http; using System.Text; using IBM.Cloud.SDK.Core.Authentication; using IBM.Cloud.SDK.Core.Http; using IBM.Cloud.SDK.Core.Http.Extensions; using IBM.Cloud.SDK.Core.Service; using IBM.Watson.VisualRecognition.v3.Model; using System; namespace IBM.Watson.VisualRecognition.v3 { [System.Obsolete("On 1 December 2021, Visual Recognition will no longer be available. " + "For more information, see Visual Recognition Deprecation " + "(https://github.com/watson-developer-cloud/dotnet-standard-sdk/tree/master#visual-recognition-deprecation).")] public partial class VisualRecognitionService : IBMService, IVisualRecognitionService { const string defaultServiceName = "visual_recognition"; private const string defaultServiceUrl = "https://api.us-south.visual-recognition.watson.cloud.ibm.com"; public string Version { get; set; } public VisualRecognitionService(string version) : this(version, defaultServiceName, ConfigBasedAuthenticatorFactory.GetAuthenticator(defaultServiceName)) { } public VisualRecognitionService(string version, IAuthenticator authenticator) : this(version, defaultServiceName, authenticator) {} public VisualRecognitionService(string version, string serviceName) : this(version, serviceName, ConfigBasedAuthenticatorFactory.GetAuthenticator(serviceName)) { } public VisualRecognitionService(IClient httpClient) : base(defaultServiceName, httpClient) { } public VisualRecognitionService(string version, string serviceName, IAuthenticator authenticator) : base(serviceName, authenticator) { if (string.IsNullOrEmpty(version)) { throw new ArgumentNullException("`version` is required"); } Version = version; if (string.IsNullOrEmpty(ServiceUrl)) { SetServiceUrl(defaultServiceUrl); } } /// <summary> /// Classify images. /// /// Classify images with built-in or custom classifiers. /// </summary> /// <param name="imagesFile">An image file (.gif, .jpg, .png, .tif) or .zip file with images. Maximum image size /// is 10 MB. Include no more than 20 images and limit the .zip file to 100 MB. Encode the image and .zip file /// names in UTF-8 if they contain non-ASCII characters. The service assumes UTF-8 encoding if it encounters /// non-ASCII characters. /// /// You can also include an image with the **url** parameter. (optional)</param> /// <param name="imagesFilename">The filename for imagesFile. (optional)</param> /// <param name="imagesFileContentType">The content type of imagesFile. (optional)</param> /// <param name="url">The URL of an image (.gif, .jpg, .png, .tif) to analyze. The minimum recommended pixel /// density is 32X32 pixels, but the service tends to perform better with images that are at least 224 x 224 /// pixels. The maximum image size is 10 MB. /// /// You can also include images with the **images_file** parameter. (optional)</param> /// <param name="threshold">The minimum score a class must have to be displayed in the response. Set the /// threshold to `0.0` to return all identified classes. (optional)</param> /// <param name="owners">The categories of classifiers to apply. The **classifier_ids** parameter overrides /// **owners**, so make sure that **classifier_ids** is empty. /// - Use `IBM` to classify against the `default` general classifier. You get the same result if both /// **classifier_ids** and **owners** parameters are empty. /// - Use `me` to classify against all your custom classifiers. However, for better performance use /// **classifier_ids** to specify the specific custom classifiers to apply. /// - Use both `IBM` and `me` to analyze the image against both classifier categories. (optional)</param> /// <param name="classifierIds">Which classifiers to apply. Overrides the **owners** parameter. You can specify /// both custom and built-in classifier IDs. The built-in `default` classifier is used if both /// **classifier_ids** and **owners** parameters are empty. /// /// The following built-in classifier IDs require no training: /// - `default`: Returns classes from thousands of general tags. /// - `food`: Enhances specificity and accuracy for images of food items. /// - `explicit`: Evaluates whether the image might be pornographic. (optional)</param> /// <param name="acceptLanguage">The desired language of parts of the response. See the response for details. /// (optional, default to en)</param> /// <returns><see cref="ClassifiedImages" />ClassifiedImages</returns> public DetailedResponse<ClassifiedImages> Classify(System.IO.MemoryStream imagesFile = null, string imagesFilename = null, string imagesFileContentType = null, string url = null, float? threshold = null, List<string> owners = null, List<string> classifierIds = null, string acceptLanguage = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } DetailedResponse<ClassifiedImages> result = null; try { var formData = new MultipartFormDataContent(); if (imagesFile != null) { var imagesFileContent = new ByteArrayContent(imagesFile.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse(imagesFileContentType, out contentType); imagesFileContent.Headers.ContentType = contentType; formData.Add(imagesFileContent, "images_file", imagesFilename); } if (url != null) { var urlContent = new StringContent(url, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); urlContent.Headers.ContentType = null; formData.Add(urlContent, "url"); } if (threshold != null) { var thresholdContent = new StringContent(threshold.ToString(), Encoding.UTF8, HttpMediaType.TEXT_PLAIN); thresholdContent.Headers.ContentType = null; formData.Add(thresholdContent, "threshold"); } if (owners != null) { var ownersContent = new StringContent(string.Join(", ", owners.ToArray()), Encoding.UTF8, HttpMediaType.TEXT_PLAIN); ownersContent.Headers.ContentType = null; formData.Add(ownersContent, "owners"); } if (classifierIds != null) { var classifierIdsContent = new StringContent(string.Join(", ", classifierIds.ToArray()), Encoding.UTF8, HttpMediaType.TEXT_PLAIN); classifierIdsContent.Headers.ContentType = null; formData.Add(classifierIdsContent, "classifier_ids"); } IClient client = this.Client; SetAuthentication(); var restRequest = client.PostAsync($"{this.Endpoint}/v3/classify"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(acceptLanguage)) { restRequest.WithHeader("Accept-Language", acceptLanguage); } if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithBodyContent(formData); restRequest.WithHeaders(Common.GetSdkHeaders("watson_vision_combined", "v3", "Classify")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<ClassifiedImages>().Result; if (result == null) { result = new DetailedResponse<ClassifiedImages>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Enum values for Classify. /// </summary> public class ClassifyEnums { /// <summary> /// The desired language of parts of the response. See the response for details. /// </summary> public class AcceptLanguageValue { /// <summary> /// Constant EN for en /// </summary> public const string EN = "en"; /// <summary> /// Constant AR for ar /// </summary> public const string AR = "ar"; /// <summary> /// Constant DE for de /// </summary> public const string DE = "de"; /// <summary> /// Constant ES for es /// </summary> public const string ES = "es"; /// <summary> /// Constant FR for fr /// </summary> public const string FR = "fr"; /// <summary> /// Constant IT for it /// </summary> public const string IT = "it"; /// <summary> /// Constant JA for ja /// </summary> public const string JA = "ja"; /// <summary> /// Constant KO for ko /// </summary> public const string KO = "ko"; /// <summary> /// Constant PT_BR for pt-br /// </summary> public const string PT_BR = "pt-br"; /// <summary> /// Constant ZH_CN for zh-cn /// </summary> public const string ZH_CN = "zh-cn"; /// <summary> /// Constant ZH_TW for zh-tw /// </summary> public const string ZH_TW = "zh-tw"; } } /// <summary> /// Create a classifier. /// /// Train a new multi-faceted classifier on the uploaded image data. Create your custom classifier with positive /// or negative example training images. Include at least two sets of examples, either two positive example /// files or one positive and one negative file. You can upload a maximum of 256 MB per call. /// /// **Tips when creating:** /// /// - If you set the **X-Watson-Learning-Opt-Out** header parameter to `true` when you create a classifier, the /// example training images are not stored. Save your training images locally. For more information, see [Data /// collection](#data-collection). /// /// - Encode all names in UTF-8 if they contain non-ASCII characters (.zip and image file names, and classifier /// and class names). The service assumes UTF-8 encoding if it encounters non-ASCII characters. /// </summary> /// <param name="name">The name of the new classifier. Encode special characters in UTF-8.</param> /// <param name="positiveExamples">A dictionary that contains the value for each classname. The value is a .zip /// file of images that depict the visual subject of a class in the new classifier. You can include more than /// one positive example file in a call. /// /// Specify the parameter name by appending `_positive_examples` to the class name. For example, /// `goldenretriever_positive_examples` creates the class **goldenretriever**. The string cannot contain the /// following characters: ``$ * - { } \ | / ' " ` [ ]``. /// /// Include at least 10 images in .jpg or .png format. The minimum recommended image resolution is 32X32 pixels. /// The maximum number of images is 10,000 images or 100 MB per .zip file. /// /// Encode special characters in the file name in UTF-8.</param> /// <param name="negativeExamples">A .zip file of images that do not depict the visual subject of any of the /// classes of the new classifier. Must contain a minimum of 10 images. /// /// Encode special characters in the file name in UTF-8. (optional)</param> /// <param name="negativeExamplesFilename">The filename for negativeExamples. (optional)</param> /// <returns><see cref="Classifier" />Classifier</returns> public DetailedResponse<Classifier> CreateClassifier(string name, Dictionary<string, System.IO.MemoryStream> positiveExamples, System.IO.MemoryStream negativeExamples = null, string negativeExamplesFilename = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(name)) { throw new ArgumentNullException("`name` is required for `CreateClassifier`"); } if (positiveExamples == null) { throw new ArgumentNullException("`positiveExamples` is required for `CreateClassifier`"); } if (positiveExamples.Count == 0) { throw new ArgumentException("`positiveExamples` must contain at least one dictionary entry"); } DetailedResponse<Classifier> result = null; try { var formData = new MultipartFormDataContent(); if (name != null) { var nameContent = new StringContent(name, Encoding.UTF8, HttpMediaType.TEXT_PLAIN); nameContent.Headers.ContentType = null; formData.Add(nameContent, "name"); } if (positiveExamples != null && positiveExamples.Count > 0) { foreach (KeyValuePair<string, System.IO.MemoryStream> entry in positiveExamples) { var partName = string.Format("{0}_positive_examples", entry.Key); var partContent = new ByteArrayContent(entry.Value.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse("application/octet-stream", out contentType); partContent.Headers.ContentType = contentType; formData.Add(partContent, partName, entry.Key + ".zip"); } } if (negativeExamples != null) { var negativeExamplesContent = new ByteArrayContent(negativeExamples.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse("application/octet-stream", out contentType); negativeExamplesContent.Headers.ContentType = contentType; formData.Add(negativeExamplesContent, "negative_examples", negativeExamplesFilename); } IClient client = this.Client; SetAuthentication(); var restRequest = client.PostAsync($"{this.Endpoint}/v3/classifiers"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithBodyContent(formData); restRequest.WithHeaders(Common.GetSdkHeaders("watson_vision_combined", "v3", "CreateClassifier")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<Classifier>().Result; if (result == null) { result = new DetailedResponse<Classifier>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Retrieve a list of classifiers. /// </summary> /// <param name="verbose">Specify `true` to return details about the classifiers. Omit this parameter to return /// a brief list of classifiers. (optional)</param> /// <returns><see cref="Classifiers" />Classifiers</returns> public DetailedResponse<Classifiers> ListClassifiers(bool? verbose = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } DetailedResponse<Classifiers> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v3/classifiers"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } if (verbose != null) { restRequest.WithArgument("verbose", verbose); } restRequest.WithHeaders(Common.GetSdkHeaders("watson_vision_combined", "v3", "ListClassifiers")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<Classifiers>().Result; if (result == null) { result = new DetailedResponse<Classifiers>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Retrieve classifier details. /// /// Retrieve information about a custom classifier. /// </summary> /// <param name="classifierId">The ID of the classifier.</param> /// <returns><see cref="Classifier" />Classifier</returns> public DetailedResponse<Classifier> GetClassifier(string classifierId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(classifierId)) { throw new ArgumentNullException("`classifierId` is required for `GetClassifier`"); } else { classifierId = Uri.EscapeDataString(classifierId); } DetailedResponse<Classifier> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v3/classifiers/{classifierId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("watson_vision_combined", "v3", "GetClassifier")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<Classifier>().Result; if (result == null) { result = new DetailedResponse<Classifier>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Update a classifier. /// /// Update a custom classifier by adding new positive or negative classes or by adding new images to existing /// classes. You must supply at least one set of positive or negative examples. For details, see [Updating /// custom /// classifiers](https://cloud.ibm.com/docs/visual-recognition?topic=visual-recognition-customizing#updating-custom-classifiers). /// /// Encode all names in UTF-8 if they contain non-ASCII characters (.zip and image file names, and classifier /// and class names). The service assumes UTF-8 encoding if it encounters non-ASCII characters. /// /// **Tips about retraining:** /// /// - You can't update the classifier if the **X-Watson-Learning-Opt-Out** header parameter was set to `true` /// when the classifier was created. Training images are not stored in that case. Instead, create another /// classifier. For more information, see [Data collection](#data-collection). /// /// - Don't make retraining calls on a classifier until the status is ready. When you submit retraining requests /// in parallel, the last request overwrites the previous requests. The `retrained` property shows the last time /// the classifier retraining finished. /// </summary> /// <param name="classifierId">The ID of the classifier.</param> /// <param name="positiveExamples">A dictionary that contains the value for each classname. The value is a .zip /// file of images that depict the visual subject of a class in the classifier. The positive examples create or /// update classes in the classifier. You can include more than one positive example file in a call. /// /// Specify the parameter name by appending `_positive_examples` to the class name. For example, /// `goldenretriever_positive_examples` creates the class `goldenretriever`. The string cannot contain the /// following characters: ``$ * - { } \ | / ' " ` [ ]``. /// /// Include at least 10 images in .jpg or .png format. The minimum recommended image resolution is 32X32 pixels. /// The maximum number of images is 10,000 images or 100 MB per .zip file. /// /// Encode special characters in the file name in UTF-8. (optional)</param> /// <param name="negativeExamples">A .zip file of images that do not depict the visual subject of any of the /// classes of the new classifier. Must contain a minimum of 10 images. /// /// Encode special characters in the file name in UTF-8. (optional)</param> /// <param name="negativeExamplesFilename">The filename for negativeExamples. (optional)</param> /// <returns><see cref="Classifier" />Classifier</returns> public DetailedResponse<Classifier> UpdateClassifier(string classifierId, Dictionary<string, System.IO.MemoryStream> positiveExamples = null, System.IO.MemoryStream negativeExamples = null, string negativeExamplesFilename = null) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(classifierId)) { throw new ArgumentNullException("`classifierId` is required for `UpdateClassifier`"); } else { classifierId = Uri.EscapeDataString(classifierId); } DetailedResponse<Classifier> result = null; try { var formData = new MultipartFormDataContent(); if (positiveExamples != null && positiveExamples.Count > 0) { foreach (KeyValuePair<string, System.IO.MemoryStream> entry in positiveExamples) { var partName = string.Format("{0}_positive_examples", entry.Key); var partContent = new ByteArrayContent(entry.Value.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse("application/octet-stream", out contentType); partContent.Headers.ContentType = contentType; formData.Add(partContent, partName, entry.Key + ".zip"); } } if (negativeExamples != null) { var negativeExamplesContent = new ByteArrayContent(negativeExamples.ToArray()); System.Net.Http.Headers.MediaTypeHeaderValue contentType; System.Net.Http.Headers.MediaTypeHeaderValue.TryParse("application/octet-stream", out contentType); negativeExamplesContent.Headers.ContentType = contentType; formData.Add(negativeExamplesContent, "negative_examples", negativeExamplesFilename); } IClient client = this.Client; SetAuthentication(); var restRequest = client.PostAsync($"{this.Endpoint}/v3/classifiers/{classifierId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithBodyContent(formData); restRequest.WithHeaders(Common.GetSdkHeaders("watson_vision_combined", "v3", "UpdateClassifier")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<Classifier>().Result; if (result == null) { result = new DetailedResponse<Classifier>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Delete a classifier. /// </summary> /// <param name="classifierId">The ID of the classifier.</param> /// <returns><see cref="object" />object</returns> public DetailedResponse<object> DeleteClassifier(string classifierId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(classifierId)) { throw new ArgumentNullException("`classifierId` is required for `DeleteClassifier`"); } else { classifierId = Uri.EscapeDataString(classifierId); } DetailedResponse<object> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.DeleteAsync($"{this.Endpoint}/v3/classifiers/{classifierId}"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("watson_vision_combined", "v3", "DeleteClassifier")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<object>().Result; if (result == null) { result = new DetailedResponse<object>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Retrieve a Core ML model of a classifier. /// /// Download a Core ML model file (.mlmodel) of a custom classifier that returns <tt>"core_ml_enabled": /// true</tt> in the classifier details. /// </summary> /// <param name="classifierId">The ID of the classifier.</param> /// <returns><see cref="byte[]" />byte[]</returns> public DetailedResponse<System.IO.MemoryStream> GetCoreMlModel(string classifierId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(classifierId)) { throw new ArgumentNullException("`classifierId` is required for `GetCoreMlModel`"); } else { classifierId = Uri.EscapeDataString(classifierId); } DetailedResponse<System.IO.MemoryStream> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.GetAsync($"{this.Endpoint}/v3/classifiers/{classifierId}/core_ml_model"); restRequest.WithHeader("Accept", "application/octet-stream"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } restRequest.WithHeaders(Common.GetSdkHeaders("watson_vision_combined", "v3", "GetCoreMlModel")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = new DetailedResponse<System.IO.MemoryStream>(); result.Result = new System.IO.MemoryStream(restRequest.AsByteArray().Result); } catch (AggregateException ae) { throw ae.Flatten(); } return result; } /// <summary> /// Delete labeled data. /// /// Deletes all data associated with a specified customer ID. The method has no effect if no data is associated /// with the customer ID. /// /// You associate a customer ID with data by passing the `X-Watson-Metadata` header with a request that passes /// data. For more information about personal data and customer IDs, see [Information /// security](https://cloud.ibm.com/docs/visual-recognition?topic=visual-recognition-information-security). /// </summary> /// <param name="customerId">The customer ID for which all data is to be deleted.</param> /// <returns><see cref="object" />object</returns> public DetailedResponse<object> DeleteUserData(string customerId) { if (string.IsNullOrEmpty(Version)) { throw new ArgumentNullException("`Version` is required"); } if (string.IsNullOrEmpty(customerId)) { throw new ArgumentNullException("`customerId` is required for `DeleteUserData`"); } DetailedResponse<object> result = null; try { IClient client = this.Client; SetAuthentication(); var restRequest = client.DeleteAsync($"{this.Endpoint}/v3/user_data"); restRequest.WithHeader("Accept", "application/json"); if (!string.IsNullOrEmpty(Version)) { restRequest.WithArgument("version", Version); } if (!string.IsNullOrEmpty(customerId)) { restRequest.WithArgument("customer_id", customerId); } restRequest.WithHeaders(Common.GetSdkHeaders("watson_vision_combined", "v3", "DeleteUserData")); restRequest.WithHeaders(customRequestHeaders); ClearCustomRequestHeaders(); result = restRequest.As<object>().Result; if (result == null) { result = new DetailedResponse<object>(); } } catch (AggregateException ae) { throw ae.Flatten(); } return result; } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using log4net; using Nini.Config; using System; using System.Collections.Generic; using System.Reflection; using OpenSim.Framework; using OpenSim.Framework.Console; using OpenSim.Server.Base; using OpenSim.Region.Framework.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Services.Interfaces; using GridRegion = OpenSim.Services.Interfaces.GridRegion; using OpenMetaverse; namespace OpenSim.Region.CoreModules.ServiceConnectorsOut.Grid { public class LocalGridServicesConnector : ISharedRegionModule, IGridService { private static readonly ILog m_log = LogManager.GetLogger( MethodBase.GetCurrentMethod().DeclaringType); private static LocalGridServicesConnector m_MainInstance; private IGridService m_GridService; private Dictionary<UUID, RegionCache> m_LocalCache = new Dictionary<UUID, RegionCache>(); private bool m_Enabled = false; public LocalGridServicesConnector() { } public LocalGridServicesConnector(IConfigSource source) { m_log.Debug("[LOCAL GRID CONNECTOR]: LocalGridServicesConnector instantiated"); m_MainInstance = this; InitialiseService(source); } #region ISharedRegionModule public Type ReplaceableInterface { get { return null; } } public string Name { get { return "LocalGridServicesConnector"; } } public void Initialise(IConfigSource source) { IConfig moduleConfig = source.Configs["Modules"]; if (moduleConfig != null) { string name = moduleConfig.GetString("GridServices", ""); if (name == Name) { InitialiseService(source); m_MainInstance = this; m_Enabled = true; m_log.Info("[LOCAL GRID CONNECTOR]: Local grid connector enabled"); } } } private void InitialiseService(IConfigSource source) { IConfig assetConfig = source.Configs["GridService"]; if (assetConfig == null) { m_log.Error("[LOCAL GRID CONNECTOR]: GridService missing from OpenSim.ini"); return; } string serviceDll = assetConfig.GetString("LocalServiceModule", String.Empty); if (serviceDll == String.Empty) { m_log.Error("[LOCAL GRID CONNECTOR]: No LocalServiceModule named in section GridService"); return; } Object[] args = new Object[] { source }; m_GridService = ServerUtils.LoadPlugin<IGridService>(serviceDll, args); if (m_GridService == null) { m_log.Error("[LOCAL GRID CONNECTOR]: Can't load grid service"); return; } } public void PostInitialise() { if (m_MainInstance == this) { MainConsole.Instance.Commands.AddCommand("LocalGridConnector", false, "show neighbours", "show neighbours", "Shows the local regions' neighbours", NeighboursCommand); } } public void Close() { } public void AddRegion(Scene scene) { if (m_Enabled) scene.RegisterModuleInterface<IGridService>(this); if (m_MainInstance == this) { if (m_LocalCache.ContainsKey(scene.RegionInfo.RegionID)) m_log.ErrorFormat("[LOCAL GRID CONNECTOR]: simulator seems to have more than one region with the same UUID. Please correct this!"); else m_LocalCache.Add(scene.RegionInfo.RegionID, new RegionCache(scene)); } } public void RemoveRegion(Scene scene) { if (m_MainInstance == this) { m_LocalCache[scene.RegionInfo.RegionID].Clear(); m_LocalCache.Remove(scene.RegionInfo.RegionID); } } public void RegionLoaded(Scene scene) { } #endregion #region IGridService public string RegisterRegion(UUID scopeID, GridRegion regionInfo) { return m_GridService.RegisterRegion(scopeID, regionInfo); } public bool DeregisterRegion(UUID regionID) { return m_GridService.DeregisterRegion(regionID); } public List<GridRegion> GetNeighbours(UUID scopeID, UUID regionID) { if (m_LocalCache.ContainsKey(regionID)) { List<GridRegion> neighbours = m_LocalCache[regionID].GetNeighbours(); if (neighbours.Count == 0) // try the DB neighbours = m_GridService.GetNeighbours(scopeID, regionID); return neighbours; } else { m_log.WarnFormat("[LOCAL GRID CONNECTOR]: GetNeighbours: Requested region {0} is not on this sim", regionID); return new List<GridRegion>(); } // Don't go to the DB //return m_GridService.GetNeighbours(scopeID, regionID); } public GridRegion GetRegionByUUID(UUID scopeID, UUID regionID) { return m_GridService.GetRegionByUUID(scopeID, regionID); } public GridRegion GetRegionByPosition(UUID scopeID, int x, int y) { GridRegion region = null; // First see if it's a neighbour, even if it isn't on this sim. // Neighbour data is cached in memory, so this is fast foreach (RegionCache rcache in m_LocalCache.Values) { region = rcache.GetRegionByPosition(x, y); if (region != null) { return region; } } // Then try on this sim (may be a lookup in DB if this is using MySql). return m_GridService.GetRegionByPosition(scopeID, x, y); } public GridRegion GetRegionByName(UUID scopeID, string regionName) { return m_GridService.GetRegionByName(scopeID, regionName); } public List<GridRegion> GetRegionsByName(UUID scopeID, string name, int maxNumber) { return m_GridService.GetRegionsByName(scopeID, name, maxNumber); } public List<GridRegion> GetRegionRange(UUID scopeID, int xmin, int xmax, int ymin, int ymax) { return m_GridService.GetRegionRange(scopeID, xmin, xmax, ymin, ymax); } #endregion public void NeighboursCommand(string module, string[] cmdparams) { foreach (KeyValuePair<UUID, RegionCache> kvp in m_LocalCache) { m_log.InfoFormat("*** Neighbours of {0} {1} ***", kvp.Key, kvp.Value.RegionName); List<GridRegion> regions = kvp.Value.GetNeighbours(); foreach (GridRegion r in regions) m_log.InfoFormat(" {0} @ {1}={2}", r.RegionName, r.RegionLocX / Constants.RegionSize, r.RegionLocY / Constants.RegionSize); } } } }
using System; using System.Drawing; using System.Collections; using System.ComponentModel; using System.Windows.Forms; namespace GeekBangCN.InfoPathAnalyzer.WinForm { /// <summary> /// Summary description for ProgressWindow. /// </summary> public class ProgressWindow : System.Windows.Forms.Form, IProgressCallback { private System.Windows.Forms.Label label; private System.Windows.Forms.ProgressBar progressBar; /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.Container components = null; public delegate void SetTextInvoker(String text); public delegate void IncrementInvoker( int val ); public delegate void StepToInvoker( int val ); public delegate void RangeInvoker( int minimum, int maximum ); private String titleRoot = ""; private System.Threading.ManualResetEvent initEvent = new System.Threading.ManualResetEvent(false); private System.Threading.ManualResetEvent abortEvent = new System.Threading.ManualResetEvent(false); private bool requiresClose = true; public ProgressWindow() { // // Required for Windows Form Designer support // InitializeComponent(); } #region Implementation of IProgressCallback /// <summary> /// Call this method from the worker thread to initialize /// the progress meter. /// </summary> /// <param name="minimum">The minimum value in the progress range (e.g. 0)</param> /// <param name="maximum">The maximum value in the progress range (e.g. 100)</param> public void Begin( int minimum, int maximum ) { initEvent.WaitOne(); Invoke( new RangeInvoker( DoBegin ), new object[] { minimum, maximum } ); } /// <summary> /// Call this method from the worker thread to initialize /// the progress callback, without setting the range /// </summary> public void Begin() { initEvent.WaitOne(); Invoke( new MethodInvoker( DoBegin ) ); } /// <summary> /// Call this method from the worker thread to reset the range in the progress callback /// </summary> /// <param name="minimum">The minimum value in the progress range (e.g. 0)</param> /// <param name="maximum">The maximum value in the progress range (e.g. 100)</param> /// <remarks>You must have called one of the Begin() methods prior to this call.</remarks> public void SetRange( int minimum, int maximum ) { initEvent.WaitOne(); Invoke( new RangeInvoker( DoSetRange ), new object[] { minimum, maximum } ); } /// <summary> /// Call this method from the worker thread to update the progress text. /// </summary> /// <param name="text">The progress text to display</param> public void SetText( String text ) { Invoke( new SetTextInvoker(DoSetText), new object[] { text } ); } /// <summary> /// Call this method from the worker thread to increase the progress counter by a specified value. /// </summary> /// <param name="val">The amount by which to increment the progress indicator</param> public void Increment( int val ) { Invoke( new IncrementInvoker( DoIncrement ), new object[] { val } ); } /// <summary> /// Call this method from the worker thread to step the progress meter to a particular value. /// </summary> /// <param name="val"></param> public void StepTo( int val ) { Invoke( new StepToInvoker( DoStepTo ), new object[] { val } ); } /// <summary> /// If this property is true, then you should abort work /// </summary> public bool IsAborting { get { return abortEvent.WaitOne( 0, false ); } } /// <summary> /// Call this method from the worker thread to finalize the progress meter /// </summary> public void End() { if( requiresClose ) { Invoke( new MethodInvoker( DoEnd ) ); } } #endregion #region Implementation members invoked on the owner thread private void DoSetText( String text ) { label.Text = text; } private void DoIncrement( int val ) { progressBar.Increment( val ); UpdateStatusText(); } private void DoStepTo( int val ) { progressBar.Value = val; UpdateStatusText(); } private void DoBegin( int minimum, int maximum ) { DoBegin(); DoSetRange( minimum, maximum ); } private void DoBegin() { // cancelButton.Enabled = true; ControlBox = true; } private void DoSetRange( int minimum, int maximum ) { progressBar.Minimum = minimum; progressBar.Maximum = maximum; progressBar.Value = minimum; titleRoot = Text; } private void DoEnd() { Close(); } #endregion #region Overrides /// <summary> /// Handles the form load, and sets an event to ensure that /// intialization is synchronized with the appearance of the form. /// </summary> /// <param name="e"></param> protected override void OnLoad(System.EventArgs e) { base.OnLoad( e ); ControlBox = false; initEvent.Set(); } /// <summary> /// Disable "Close" button. /// </summary> protected override CreateParams CreateParams { get { CreateParams param = base.CreateParams; param.ClassStyle = param.ClassStyle | 0x200; return param; } } /// <summary> /// Clean up any resources being used. /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } /// <summary> /// Handler for 'Close' clicking /// </summary> /// <param name="e"></param> protected override void OnClosing(System.ComponentModel.CancelEventArgs e) { requiresClose = false; AbortWork(); base.OnClosing( e ); } #endregion #region Implementation Utilities /// <summary> /// Utility function that formats and updates the title bar text /// </summary> private void UpdateStatusText() { Text = titleRoot + String.Format( " - {0}% complete", (progressBar.Value * 100 ) / (progressBar.Maximum - progressBar.Minimum) ); } /// <summary> /// Utility function to terminate the thread /// </summary> private void AbortWork() { abortEvent.Set(); } #endregion #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.progressBar = new System.Windows.Forms.ProgressBar(); this.label = new System.Windows.Forms.Label(); this.SuspendLayout(); // // progressBar // this.progressBar.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.progressBar.Location = new System.Drawing.Point(8, 34); this.progressBar.Name = "progressBar"; this.progressBar.Size = new System.Drawing.Size(272, 23); this.progressBar.TabIndex = 1; // // label // this.label.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right))); this.label.Location = new System.Drawing.Point(8, 8); this.label.Name = "label"; this.label.Size = new System.Drawing.Size(272, 24); this.label.TabIndex = 0; this.label.Text = "Starting operation..."; // // ProgressWindow // this.AutoScaleBaseSize = new System.Drawing.Size(5, 13); this.ClientSize = new System.Drawing.Size(290, 73); this.ControlBox = false; this.Controls.Add(this.progressBar); this.Controls.Add(this.label); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "ProgressWindow"; this.ShowIcon = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent; this.Text = "ProgressWindow"; this.ResumeLayout(false); } #endregion } }
// ******************************************************************************************************** // Product Name: DotSpatial.Symbology.Forms.dll // Description: The Windows Forms user interface layer for the DotSpatial.Symbology library. // ******************************************************************************************************** // The contents of this file are subject to the MIT License (MIT) // you may not use this file except in compliance with the License. You may obtain a copy of the License at // http://dotspatial.codeplex.com/license // // Software distributed under the License is distributed on an "AS IS" basis, WITHOUT WARRANTY OF // ANY KIND, either expressed or implied. See the License for the specific language governing rights and // limitations under the License. // // The Original Code is from MapWindow.dll version 6.0 // // The Initial Developer of this Original Code is Ted Dunsford. Created 5/14/2009 11:22:04 AM // // Contributor(s): (Open source contributors should list themselves and their modifications here). // // ******************************************************************************************************** using System; using System.Collections.Generic; using System.ComponentModel; using System.Drawing; using System.Drawing.Drawing2D; using System.Windows.Forms; using DotSpatial.Serialization; namespace DotSpatial.Symbology.Forms { /// <summary> /// SymbolSizeChooser /// </summary> [DefaultEvent("SelectedSizeChanged"), ToolboxItem(false)] public class SymbolSizeChooser : Control, ISupportInitialize { #region ISupportInitialize Members /// <summary> /// Prevent redundant updates to the boxes every time a property is changed /// </summary> public void BeginInit() { _isInitializing = true; } /// <summary> /// Enable live updating so that from now on, changes rebuild boxes. /// </summary> public void EndInit() { _isInitializing = false; RefreshBoxes(); } #endregion /// <summary> /// Occurs when the selected size has changed /// </summary> public event EventHandler SelectedSizeChanged; #region Private Variables private Color _boxBackColor; private Color _boxSelectionColor; private Size _boxSize; private List<SizeBox> _boxes; private bool _isInitializing; private Size2D _maxSize; private Size2D _minSize; private int _numBoxes; private Orientation _orientation; private int _roundingRadius; private Size2D _selectedSize; private ISymbol _symbol; #endregion #region Constructors /// <summary> /// Creates a new instance of SymbolSizeChooser /// </summary> public SymbolSizeChooser() { Configure(); } /// <summary> /// Gets or sets a new SymbolSize chooser, specifying the symbol as part of the constructor. /// </summary> /// <param name="symbol">The symbol to draw.</param> public SymbolSizeChooser(ISymbol symbol) { Configure(); _symbol = symbol; } private void Configure() { _boxes = new List<SizeBox>(); _numBoxes = 4; _minSize = new Size2D(4, 4); _maxSize = new Size2D(30, 30); _selectedSize = _minSize.Copy(); _boxSize = new Size(36, 36); _boxBackColor = SystemColors.Control; _boxSelectionColor = SystemColors.Highlight; _symbol = new SimpleSymbol(); _orientation = Orientation.Horizontal; _roundingRadius = 6; RefreshBoxes(); } #endregion #region Methods /// <summary> /// Forces the box sub-categories to refresh given the new content. /// </summary> public virtual void RefreshBoxes() { _boxes = new List<SizeBox>(); for (int i = 0; i < NumBoxes; i++) { CreateBox(i); } Invalidate(); } private void CreateBox(int i) { SizeBox sb = new SizeBox(); int x = 1; int y = 1; if (_orientation == Orientation.Horizontal) { x = (_boxSize.Width + 2) * i + 1; } else { y = (_boxSize.Height + 2) * i + 1; } sb.Bounds = new Rectangle(x, y, _boxSize.Width, _boxSize.Height); sb.BackColor = _boxBackColor; sb.SelectionColor = _boxSelectionColor; sb.RoundingRadius = _roundingRadius; if (i == 0) { if (_minSize != null) { sb.Size = _minSize.Copy(); } else { sb.Size = new Size2D(4, 4); } } else if (i == _numBoxes - 1) { if (_maxSize != null) { sb.Size = _maxSize.Copy(); } else { sb.Size = new Size2D(32, 32); } } else { if (_minSize != null && _maxSize != null) { // because of the elses, we know that the number must be greater than 2, and that the current item is not the min or max // Use squaring so that bigger sizes have larger differences between them. double cw = (_maxSize.Width - _minSize.Width) / (_numBoxes); double ch = (_maxSize.Height - _minSize.Height) / (_numBoxes); sb.Size = new Size2D(_minSize.Width + cw * i, _minSize.Height + ch * i); } else { sb.Size = new Size2D(16, 16); } } _boxes.Add(sb); } #endregion #region Properties /// <summary> /// Gets or sets the normal background color for the boxes. /// </summary> [Description("Gets or sets the normal background color for the boxes.")] public Color BoxBackColor { get { return _boxBackColor; } set { _boxBackColor = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the box selection color /// </summary> [Description("Gets or sets the box selection color")] public Color BoxSelectionColor { get { return _boxSelectionColor; } set { _boxSelectionColor = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the rectangular extent for all the boxes. This is not the size of the symbol. /// </summary> [Description("Gets or sets the rectangular extent for all the boxes. This is not the size of the symbol.")] public Size BoxSize { get { return _boxSize; } set { _boxSize = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the maximum symbol size. /// </summary> [Description("Gets or sets the maximum symbol size."), DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public Size2D MaximumSymbolSize { get { return _maxSize; } set { _maxSize = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the minimum symbol size /// </summary> [Description("Gets or sets the minimum symbol size"), DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public Size2D MinimumSymbolSize { get { return _minSize; } set { _minSize = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets whether the boxes are drawn horizontally or vertically. /// </summary> [Description("Gets or sets whether the boxes are drawn horizontally or vertically.")] public Orientation Orientation { get { return _orientation; } set { _orientation = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the number of boxes /// </summary> [Description("Gets or sets the number of boxes")] public int NumBoxes { get { return _numBoxes; } set { _numBoxes = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the symbol to use for this control. /// </summary> [Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)] public ISymbol Symbol { get { return _symbol; } set { _symbol = value; foreach (SizeBox sb in _boxes) { if (_symbol.Size == sb.Size) { sb.IsSelected = true; } else { sb.IsSelected = false; } } if (!_isInitializing) Invalidate(); } } /// <summary> /// Gets or sets the rounding radius for the boxes /// </summary> [Description("Gets or sets the rounding radius for the boxes")] public int RoundingRadius { get { return _roundingRadius; } set { _roundingRadius = value; if (!_isInitializing) RefreshBoxes(); } } /// <summary> /// Gets or sets the currently selected size. /// </summary> [Description("Gets or sets the currently selected size."), DesignerSerializationVisibility(DesignerSerializationVisibility.Content)] public Size2D SelectedSize { get { return _selectedSize; } set { _selectedSize = value; if (!_isInitializing) Invalidate(); } } #endregion #region Protected Methods /// <summary> /// Fires the selected size changed event /// </summary> protected virtual void OnSelectedSizeChanged() { if (SelectedSizeChanged != null) SelectedSizeChanged(this, EventArgs.Empty); } /// <summary> /// Handles the mouse up situation /// </summary> /// <param name="e"></param> protected override void OnMouseUp(MouseEventArgs e) { bool changed = false; foreach (SizeBox sb in _boxes) { if (sb.Bounds.Contains(e.Location)) { if (sb.IsSelected == false) { sb.IsSelected = true; _selectedSize = sb.Size.Copy(); _symbol.Size.Height = sb.Size.Height; _symbol.Size.Width = sb.Size.Width; changed = true; } } else { if (sb.IsSelected) { sb.IsSelected = false; } } } Invalidate(); if (changed) OnSelectedSizeChanged(); base.OnMouseUp(e); } /// <summary> /// Occurs durring drawing but is overridable by subclasses /// </summary> /// <param name="g"></param> /// <param name="clip"></param> protected virtual void OnDraw(Graphics g, Rectangle clip) { foreach (SizeBox sb in _boxes) { sb.Draw(g, clip, _symbol); } } /// <summary> /// Prevent flicker /// </summary> /// <param name="pevent"></param> protected override void OnPaintBackground(PaintEventArgs pevent) { //base.OnPaintBackground(pevent); } /// <summary> /// Occurs as the SymbolSizeChooser control is being drawn. /// </summary> /// <param name="e"></param> protected override void OnPaint(PaintEventArgs e) { Rectangle clip = e.ClipRectangle; if (clip.IsEmpty) clip = ClientRectangle; Bitmap bmp = new Bitmap(clip.Width, clip.Height); Graphics g = Graphics.FromImage(bmp); g.TranslateTransform(-clip.X, -clip.Y); g.Clip = new Region(clip); g.Clear(BackColor); g.SmoothingMode = SmoothingMode.AntiAlias; OnDraw(g, clip); g.Dispose(); e.Graphics.DrawImage(bmp, clip, new Rectangle(0, 0, clip.Width, clip.Height), GraphicsUnit.Pixel); } #endregion } }
#if UNITY_WEBGL || WEBSOCKET || (UNITY_XBOXONE && UNITY_EDITOR) // -------------------------------------------------------------------------------------------------------------------- // <copyright file="SocketWebTcp.cs" company="Exit Games GmbH"> // Copyright (c) Exit Games GmbH. All rights reserved. // </copyright> // <summary> // Internal class to encapsulate the network i/o functionality for the realtime library. // </summary> // <author>developer@exitgames.com</author> // -------------------------------------------------------------------------------------------------------------------- namespace ExitGames.Client.Photon { using System; using System.Collections; using UnityEngine; using SupportClassPun = ExitGames.Client.Photon.SupportClass; /// <summary> /// Yield Instruction to Wait for real seconds. Very important to keep connection working if Time.TimeScale is altered, we still want accurate network events /// </summary> public sealed class WaitForRealSeconds : CustomYieldInstruction { private readonly float _endTime; public override bool keepWaiting { get { return this._endTime > Time.realtimeSinceStartup; } } public WaitForRealSeconds(float seconds) { this._endTime = Time.realtimeSinceStartup + seconds; } } /// <summary> /// Internal class to encapsulate the network i/o functionality for the realtime libary. /// </summary> public class SocketWebTcp : IPhotonSocket, IDisposable { /// <summary>Defines the binary serialization protocol for all WebSocket connections. Defaults to "GpBinaryV18", a Photon protocol.</summary> /// <remarks>This is a temporary workaround, until the serialization protocol becomes available via the PeerBase.</remarks> public static string SerializationProtocol = "GpBinaryV18"; private WebSocket sock; private readonly object syncer = new object(); public SocketWebTcp(PeerBase npeer) : base(npeer) { this.ServerAddress = npeer.ServerAddress; if (this.ReportDebugOfLevel(DebugLevel.INFO)) { this.Listener.DebugReturn(DebugLevel.INFO, "new SocketWebTcp() for Unity. Server: " + this.ServerAddress); } //this.Protocol = ConnectionProtocol.WebSocket; this.PollReceive = false; } public void Dispose() { this.State = PhotonSocketState.Disconnecting; if (this.sock != null) { try { if (this.sock.Connected) this.sock.Close(); } catch (Exception ex) { this.EnqueueDebugReturn(DebugLevel.INFO, "Exception in Dispose(): " + ex); } } this.sock = null; this.State = PhotonSocketState.Disconnected; } GameObject websocketConnectionObject; public override bool Connect() { //bool baseOk = base.Connect(); //if (!baseOk) //{ // return false; //} this.State = PhotonSocketState.Connecting; if (this.websocketConnectionObject != null) { UnityEngine.Object.Destroy(this.websocketConnectionObject); } this.websocketConnectionObject = new GameObject("websocketConnectionObject"); MonoBehaviour mb = this.websocketConnectionObject.AddComponent<MonoBehaviourExt>(); this.websocketConnectionObject.hideFlags = HideFlags.HideInHierarchy; UnityEngine.Object.DontDestroyOnLoad(this.websocketConnectionObject); this.sock = new WebSocket(new Uri(this.ServerAddress), SerializationProtocol); // TODO: The protocol should be set based on current PeerBase value (but that's currently not accessible) this.sock.Connect(); mb.StartCoroutine(this.ReceiveLoop()); return true; } public override bool Disconnect() { if (this.ReportDebugOfLevel(DebugLevel.INFO)) { this.Listener.DebugReturn(DebugLevel.INFO, "SocketWebTcp.Disconnect()"); } this.State = PhotonSocketState.Disconnecting; lock (this.syncer) { if (this.sock != null) { try { this.sock.Close(); } catch (Exception ex) { this.Listener.DebugReturn(DebugLevel.ERROR, "Exception in Disconnect(): " + ex); } this.sock = null; } } if (this.websocketConnectionObject != null) { UnityEngine.Object.Destroy(this.websocketConnectionObject); } this.State = PhotonSocketState.Disconnected; return true; } /// <summary> /// used by TPeer* /// </summary> public override PhotonSocketError Send(byte[] data, int length) { if (this.State != PhotonSocketState.Connected) { return PhotonSocketError.Skipped; } try { if (data.Length > length) { byte[] trimmedData = new byte[length]; Buffer.BlockCopy(data, 0, trimmedData, 0, length); data = trimmedData; } if (this.ReportDebugOfLevel(DebugLevel.ALL)) { this.Listener.DebugReturn(DebugLevel.ALL, "Sending: " + SupportClassPun.ByteArrayToString(data)); } if (this.sock != null) { this.sock.Send(data); } } catch (Exception e) { this.Listener.DebugReturn(DebugLevel.ERROR, "Cannot send to: " + this.ServerAddress + ". " + e.Message); this.HandleException(StatusCode.Exception); return PhotonSocketError.Exception; } return PhotonSocketError.Success; } public override PhotonSocketError Receive(out byte[] data) { data = null; return PhotonSocketError.NoData; } internal const int ALL_HEADER_BYTES = 9; internal const int TCP_HEADER_BYTES = 7; internal const int MSG_HEADER_BYTES = 2; public IEnumerator ReceiveLoop() { //this.Listener.DebugReturn(DebugLevel.INFO, "ReceiveLoop()"); if (this.sock != null) { while (this.sock != null && !this.sock.Connected && this.sock.Error == null) { yield return new WaitForRealSeconds(0.1f); } if (this.sock != null) { if (this.sock.Error != null) { this.Listener.DebugReturn(DebugLevel.ERROR, "Exiting receive thread. Server: " + this.ServerAddress + ":" + this.ServerPort + " Error: " + this.sock.Error); this.HandleException(StatusCode.ExceptionOnConnect); } else { // connected if (this.ReportDebugOfLevel(DebugLevel.ALL)) { this.Listener.DebugReturn(DebugLevel.ALL, "Receiving by websocket. this.State: " + this.State); } this.State = PhotonSocketState.Connected; while (this.State == PhotonSocketState.Connected) { if (this.sock != null) { if (this.sock.Error != null) { this.Listener.DebugReturn(DebugLevel.ERROR, "Exiting receive thread (inside loop). Server: " + this.ServerAddress + ":" + this.ServerPort + " Error: " + this.sock.Error); this.HandleException(StatusCode.ExceptionOnReceive); break; } else { byte[] inBuff = this.sock.Recv(); if (inBuff == null || inBuff.Length == 0) { // nothing received. wait a bit, try again yield return new WaitForRealSeconds(0.02f); continue; } if (this.ReportDebugOfLevel(DebugLevel.ALL)) { this.Listener.DebugReturn(DebugLevel.ALL, "TCP << " + inBuff.Length + " = " + SupportClassPun.ByteArrayToString(inBuff)); } if (inBuff.Length > 0) { try { this.HandleReceivedDatagram(inBuff, inBuff.Length, false); } catch (Exception e) { if (this.State != PhotonSocketState.Disconnecting && this.State != PhotonSocketState.Disconnected) { if (this.ReportDebugOfLevel(DebugLevel.ERROR)) { this.EnqueueDebugReturn(DebugLevel.ERROR, "Receive issue. State: " + this.State + ". Server: '" + this.ServerAddress + "' Exception: " + e); } this.HandleException(StatusCode.ExceptionOnReceive); } } } } } } } } } this.Disconnect(); } private class MonoBehaviourExt : MonoBehaviour { } } } #endif
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.IO; using System.Linq; using System.Text; using System.Threading; using Xunit; namespace System.Security.Cryptography.X509Certificates.Tests { public static class ChainTests { internal static bool CanModifyStores { get; } = TestEnvironmentConfiguration.CanModifyStores; private static bool TrustsMicrosoftDotComRoot { get { // Verifies that the microsoft.com certs build with only the certificates in the root store using (var microsoftDotCom = new X509Certificate2(TestData.MicrosoftDotComSslCertBytes)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; chain.ChainPolicy.VerificationTime = new DateTime(2015, 10, 15, 12, 01, 01, DateTimeKind.Local); chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; return chain.Build(microsoftDotCom); } } } [Fact] public static void BuildChain() { using (var microsoftDotCom = new X509Certificate2(TestData.MicrosoftDotComSslCertBytes)) using (var microsoftDotComIssuer = new X509Certificate2(TestData.MicrosoftDotComIssuerBytes)) using (var microsoftDotComRoot = new X509Certificate2(TestData.MicrosoftDotComRootBytes)) using (var unrelated = new X509Certificate2(TestData.DssCer)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; chain.ChainPolicy.ExtraStore.Add(unrelated); chain.ChainPolicy.ExtraStore.Add(microsoftDotComRoot); chain.ChainPolicy.ExtraStore.Add(microsoftDotComIssuer); chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; // Halfway between microsoftDotCom's NotBefore and NotAfter // This isn't a boundary condition test. chain.ChainPolicy.VerificationTime = new DateTime(2015, 10, 15, 12, 01, 01, DateTimeKind.Local); chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; bool valid = chain.Build(microsoftDotCom); Assert.True(valid, "Chain built validly"); // The chain should have 3 members Assert.Equal(3, chain.ChainElements.Count); // These are the three specific members. Assert.Equal(microsoftDotCom, chain.ChainElements[0].Certificate); Assert.Equal(microsoftDotComIssuer, chain.ChainElements[1].Certificate); Assert.Equal(microsoftDotComRoot, chain.ChainElements[2].Certificate); } } [PlatformSpecific(TestPlatforms.Windows)] [Fact] public static void VerifyChainFromHandle() { using (var microsoftDotCom = new X509Certificate2(TestData.MicrosoftDotComSslCertBytes)) using (var microsoftDotComIssuer = new X509Certificate2(TestData.MicrosoftDotComIssuerBytes)) using (var microsoftDotComRoot = new X509Certificate2(TestData.MicrosoftDotComRootBytes)) using (var unrelated = new X509Certificate2(TestData.DssCer)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; chain.ChainPolicy.ExtraStore.Add(unrelated); chain.ChainPolicy.ExtraStore.Add(microsoftDotComRoot); chain.ChainPolicy.ExtraStore.Add(microsoftDotComIssuer); chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; chain.ChainPolicy.VerificationTime = new DateTime(2015, 10, 15, 12, 01, 01, DateTimeKind.Local); chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; bool valid = chain.Build(microsoftDotCom); Assert.True(valid, "Source chain built validly"); Assert.Equal(3, chain.ChainElements.Count); using (var chainHolder2 = new ChainHolder(chain.ChainContext)) { X509Chain chain2 = chainHolder2.Chain; Assert.NotSame(chain, chain2); Assert.Equal(chain.ChainContext, chain2.ChainContext); Assert.Equal(3, chain2.ChainElements.Count); Assert.NotSame(chain.ChainElements[0], chain2.ChainElements[0]); Assert.NotSame(chain.ChainElements[1], chain2.ChainElements[1]); Assert.NotSame(chain.ChainElements[2], chain2.ChainElements[2]); Assert.Equal(microsoftDotCom, chain2.ChainElements[0].Certificate); Assert.Equal(microsoftDotComIssuer, chain2.ChainElements[1].Certificate); Assert.Equal(microsoftDotComRoot, chain2.ChainElements[2].Certificate); // ChainPolicy is not carried over from the Chain(IntPtr) constructor Assert.NotEqual(chain.ChainPolicy.VerificationFlags, chain2.ChainPolicy.VerificationFlags); Assert.NotEqual(chain.ChainPolicy.VerificationTime, chain2.ChainPolicy.VerificationTime); Assert.NotEqual(chain.ChainPolicy.RevocationMode, chain2.ChainPolicy.RevocationMode); Assert.Equal(X509VerificationFlags.NoFlag, chain2.ChainPolicy.VerificationFlags); // Re-set the ChainPolicy properties chain2.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; chain2.ChainPolicy.VerificationTime = new DateTime(2015, 10, 15, 12, 01, 01, DateTimeKind.Local); chain2.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; valid = chain2.Build(microsoftDotCom); Assert.True(valid, "Cloned chain built validly"); } } } [PlatformSpecific(TestPlatforms.AnyUnix)] [ConditionalFact(nameof(CanModifyStores))] public static void VerifyChainFromHandle_Unix() { using (var microsoftDotCom = new X509Certificate2(TestData.MicrosoftDotComSslCertBytes)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; chain.ChainPolicy.VerificationTime = new DateTime(2015, 10, 15, 12, 01, 01, DateTimeKind.Local); chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; bool valid = chain.Build(microsoftDotCom); Assert.Equal(IntPtr.Zero, chain.ChainContext); } Assert.Throws<PlatformNotSupportedException>(() => new X509Chain(IntPtr.Zero)); } [PlatformSpecific(TestPlatforms.Windows)] [Fact] public static void TestDispose() { X509Chain chain; using (var microsoftDotCom = new X509Certificate2(TestData.MicrosoftDotComSslCertBytes)) using (var chainHolder = new ChainHolder()) { chain = chainHolder.Chain; chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; chain.ChainPolicy.VerificationTime = new DateTime(2015, 10, 15, 12, 01, 01, DateTimeKind.Local); chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; chain.Build(microsoftDotCom); Assert.NotEqual(IntPtr.Zero, chain.ChainContext); } // No exception thrown for accessing ChainContext on disposed chain Assert.Equal(IntPtr.Zero, chain.ChainContext); } [Fact] public static void TestResetMethod() { using (var sampleCert = new X509Certificate2(TestData.DssCer)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; chain.ChainPolicy.ExtraStore.Add(sampleCert); bool valid = chain.Build(sampleCert); Assert.False(valid); chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; chain.ChainPolicy.VerificationTime = new DateTime(2015, 10, 15, 12, 01, 01, DateTimeKind.Local); chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; valid = chain.Build(sampleCert); Assert.True(valid, "Chain built validly"); Assert.Equal(1, chain.ChainElements.Count); chain.Reset(); Assert.Equal(0, chain.ChainElements.Count); // ChainPolicy did not reset (for desktop compat) Assert.Equal(X509VerificationFlags.AllowUnknownCertificateAuthority, chain.ChainPolicy.VerificationFlags); valid = chain.Build(sampleCert); Assert.Equal(1, chain.ChainElements.Count); // This succeeds because ChainPolicy did not reset Assert.True(valid, "Chain built validly after reset"); } } /// <summary> /// Tests that when a certificate chain has a root certification which is not trusted by the trust provider, /// Build returns false and a ChainStatus returns UntrustedRoot /// </summary> [Fact] [OuterLoop] public static void BuildChainExtraStoreUntrustedRoot() { using (var testCert = new X509Certificate2(Path.Combine("TestData", "test.pfx"), TestData.ChainPfxPassword)) using (ImportedCollection ic = Cert.Import(Path.Combine("TestData", "test.pfx"), TestData.ChainPfxPassword, X509KeyStorageFlags.DefaultKeySet)) using (var chainHolder = new ChainHolder()) { X509Certificate2Collection collection = ic.Collection; X509Chain chain = chainHolder.Chain; chain.ChainPolicy.ExtraStore.AddRange(collection); chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; chain.ChainPolicy.VerificationTime = new DateTime(2015, 9, 22, 12, 25, 0); bool valid = chain.Build(testCert); Assert.False(valid); Assert.Contains(chain.ChainStatus, s => s.Status == X509ChainStatusFlags.UntrustedRoot); } } public static IEnumerable<object[]> VerifyExpressionData() { // The test will be using the chain for TestData.MicrosoftDotComSslCertBytes // The leaf cert (microsoft.com) is valid from 2014-10-15 00:00:00Z to 2016-10-15 23:59:59Z DateTime[] validTimes = { // The NotBefore value new DateTime(2014, 10, 15, 0, 0, 0, DateTimeKind.Utc), // One second before the NotAfter value new DateTime(2016, 10, 15, 23, 59, 58, DateTimeKind.Utc), }; // The NotAfter value as a boundary condition differs on Windows and OpenSSL. // Windows considers it valid (<= NotAfter). // OpenSSL considers it invalid (< NotAfter), with a comment along the lines of // "it'll be invalid in a millisecond, why bother with the <=" // So that boundary condition is not being tested. DateTime[] invalidTimes = { // One second before the NotBefore time new DateTime(2014, 10, 14, 23, 59, 59, DateTimeKind.Utc), // One second after the NotAfter time new DateTime(2016, 10, 16, 0, 0, 0, DateTimeKind.Utc), }; List<object[]> testCases = new List<object[]>((validTimes.Length + invalidTimes.Length) * 3); // Build (date, result, kind) tuples. The kind is used to help describe the test case. // The DateTime format that xunit uses does show a difference in the DateTime itself, but // having the Kind be a separate parameter just helps. foreach (DateTime utcTime in validTimes) { DateTime local = utcTime.ToLocalTime(); DateTime unspecified = new DateTime(local.Ticks); testCases.Add(new object[] { utcTime, true, utcTime.Kind }); testCases.Add(new object[] { local, true, local.Kind }); testCases.Add(new object[] { unspecified, true, unspecified.Kind }); } foreach (DateTime utcTime in invalidTimes) { DateTime local = utcTime.ToLocalTime(); DateTime unspecified = new DateTime(local.Ticks); testCases.Add(new object[] { utcTime, false, utcTime.Kind }); testCases.Add(new object[] { local, false, local.Kind }); testCases.Add(new object[] { unspecified, false, unspecified.Kind }); } return testCases; } [Theory] [MemberData(nameof(VerifyExpressionData))] public static void VerifyExpiration_LocalTime(DateTime verificationTime, bool shouldBeValid, DateTimeKind kind) { using (var microsoftDotCom = new X509Certificate2(TestData.MicrosoftDotComSslCertBytes)) using (var microsoftDotComIssuer = new X509Certificate2(TestData.MicrosoftDotComIssuerBytes)) using (var microsoftDotComRoot = new X509Certificate2(TestData.MicrosoftDotComRootBytes)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; chain.ChainPolicy.ExtraStore.Add(microsoftDotComIssuer); chain.ChainPolicy.ExtraStore.Add(microsoftDotComRoot); // Ignore anything except NotTimeValid chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllFlags & ~X509VerificationFlags.IgnoreNotTimeValid; chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; chain.ChainPolicy.VerificationTime = verificationTime; bool builtSuccessfully = chain.Build(microsoftDotCom); Assert.Equal(shouldBeValid, builtSuccessfully); // If we failed to build the chain, ensure that NotTimeValid is one of the reasons. if (!shouldBeValid) { Assert.Contains(chain.ChainStatus, s => s.Status == X509ChainStatusFlags.NotTimeValid); } } } [Fact] public static void BuildChain_WithApplicationPolicy_Match() { using (var msCer = new X509Certificate2(TestData.MsCertificate)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; // Code Signing chain.ChainPolicy.ApplicationPolicy.Add(new Oid("1.3.6.1.5.5.7.3.3")); chain.ChainPolicy.VerificationTime = msCer.NotBefore.AddHours(2); chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; bool valid = chain.Build(msCer); Assert.True(valid, "Chain built validly"); } } [Fact] public static void BuildChain_WithApplicationPolicy_NoMatch() { using (var cert = new X509Certificate2(TestData.MsCertificate)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; // Gibberish. (Code Signing + ".1") chain.ChainPolicy.ApplicationPolicy.Add(new Oid("1.3.6.1.5.5.7.3.3.1")); chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; chain.ChainPolicy.VerificationTime = cert.NotBefore.AddHours(2); bool valid = chain.Build(cert); Assert.False(valid, "Chain built validly"); Assert.InRange(chain.ChainElements.Count, 1, int.MaxValue); Assert.NotSame(cert, chain.ChainElements[0].Certificate); Assert.Equal(cert, chain.ChainElements[0].Certificate); X509ChainStatus[] chainElementStatus = chain.ChainElements[0].ChainElementStatus; Assert.InRange(chainElementStatus.Length, 1, int.MaxValue); Assert.Contains(chainElementStatus, x => x.Status == X509ChainStatusFlags.NotValidForUsage); } } [Fact] public static void BuildChain_WithCertificatePolicy_Match() { using (var cert = new X509Certificate2(TestData.CertWithPolicies)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; // Code Signing chain.ChainPolicy.CertificatePolicy.Add(new Oid("2.18.19")); chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; chain.ChainPolicy.VerificationTime = cert.NotBefore.AddHours(2); chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; bool valid = chain.Build(cert); Assert.True(valid, "Chain built validly"); } } [Fact] public static void BuildChain_WithCertificatePolicy_NoMatch() { using (var cert = new X509Certificate2(TestData.CertWithPolicies)) using (var chainHolder = new ChainHolder()) { X509Chain chain = chainHolder.Chain; chain.ChainPolicy.CertificatePolicy.Add(new Oid("2.999")); chain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; chain.ChainPolicy.VerificationTime = cert.NotBefore.AddHours(2); bool valid = chain.Build(cert); Assert.False(valid, "Chain built validly"); Assert.InRange(chain.ChainElements.Count, 1, int.MaxValue); Assert.NotSame(cert, chain.ChainElements[0].Certificate); Assert.Equal(cert, chain.ChainElements[0].Certificate); X509ChainStatus[] chainElementStatus = chain.ChainElements[0].ChainElementStatus; Assert.InRange(chainElementStatus.Length, 1, int.MaxValue); Assert.Contains(chainElementStatus, x => x.Status == X509ChainStatusFlags.NotValidForUsage); } } [ConditionalFact(nameof(TrustsMicrosoftDotComRoot), nameof(CanModifyStores))] [OuterLoop(/* Modifies user certificate store */)] public static void BuildChain_MicrosoftDotCom_WithRootCertInUserAndSystemRootCertStores() { // Verifies that when the same root cert is placed in both a user and machine root certificate store, // any certs chain building to that root cert will build correctly // // We use a copy of the microsoft.com SSL certs and root certs to validate that the chain can build // successfully bool shouldInstallCertToUserStore = true; bool installedCertToUserStore = false; using (var microsoftDotCom = new X509Certificate2(TestData.MicrosoftDotComSslCertBytes)) using (var microsoftDotComRoot = new X509Certificate2(TestData.MicrosoftDotComRootBytes)) { // Check that microsoft.com's root certificate IS installed in the machine root store as a sanity step using (var machineRootStore = new X509Store(StoreName.Root, StoreLocation.LocalMachine)) { machineRootStore.Open(OpenFlags.ReadOnly); bool foundCert = false; foreach (var machineCert in machineRootStore.Certificates) { if (machineCert.Equals(microsoftDotComRoot)) { foundCert = true; } machineCert.Dispose(); } Assert.True(foundCert, string.Format("Did not find expected certificate with thumbprint '{0}' in the machine root store", microsoftDotComRoot.Thumbprint)); } // Concievably at this point there could still be something wrong and we still don't chain build correctly - if that's // the case, then there's likely something wrong with the machine. Validating that happy path is out of scope // of this particular test. // Check that microsoft.com's root certificate is NOT installed on in the user cert store as a sanity step // We won't try to install the microsoft.com root cert into the user root store if it's already there using (var userRootStore = new X509Store(StoreName.Root, StoreLocation.CurrentUser)) { userRootStore.Open(OpenFlags.ReadOnly); foreach (var userCert in userRootStore.Certificates) { bool foundCert = false; if (userCert.Equals(microsoftDotComRoot)) { foundCert = true; } userCert.Dispose(); if (foundCert) { shouldInstallCertToUserStore = false; } } } using (var userRootStore = new X509Store(StoreName.Root, StoreLocation.CurrentUser)) using (var chainHolder = new ChainHolder()) { try { if (shouldInstallCertToUserStore) { try { userRootStore.Open(OpenFlags.ReadWrite); } catch (CryptographicException) { return; } userRootStore.Add(microsoftDotComRoot); // throws CryptographicException installedCertToUserStore = true; } X509Chain chainValidator = chainHolder.Chain; chainValidator.ChainPolicy.VerificationTime = new DateTime(2015, 10, 15, 12, 01, 01, DateTimeKind.Local); chainValidator.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; bool chainBuildResult = chainValidator.Build(microsoftDotCom); StringBuilder builder = new StringBuilder(); foreach (var status in chainValidator.ChainStatus) { builder.AppendFormat("{0} {1}{2}", status.Status, status.StatusInformation, Environment.NewLine); } Assert.True(chainBuildResult, string.Format("Certificate chain build failed. ChainStatus is:{0}{1}", Environment.NewLine, builder.ToString())); } finally { if (installedCertToUserStore) { userRootStore.Remove(microsoftDotComRoot); } } } } } [Fact] [OuterLoop( /* May require using the network, to download CRLs and intermediates */)] public static void VerifyWithRevocation() { using (var cert = new X509Certificate2(Path.Combine("TestData", "MS.cer"))) using (var onlineChainHolder = new ChainHolder()) using (var offlineChainHolder = new ChainHolder()) { X509Chain onlineChain = onlineChainHolder.Chain; X509Chain offlineChain = offlineChainHolder.Chain; onlineChain.ChainPolicy.VerificationFlags = X509VerificationFlags.AllowUnknownCertificateAuthority; onlineChain.ChainPolicy.VerificationTime = cert.NotBefore.AddHours(2); onlineChain.ChainPolicy.RevocationMode = X509RevocationMode.Online; onlineChain.ChainPolicy.RevocationFlag = X509RevocationFlag.EntireChain; // Attempt the online test a couple of times, in case there was just a CRL // download failure. const int RetryLimit = 3; bool valid = false; for (int i = 0; i < RetryLimit; i++) { valid = onlineChain.Build(cert); if (valid) { break; } for (int j = 0; j < onlineChain.ChainElements.Count; j++) { X509ChainElement chainElement = onlineChain.ChainElements[j]; // Since `NoError` gets mapped as the empty array, just look for non-empty arrays if (chainElement.ChainElementStatus.Length > 0) { X509ChainStatusFlags allFlags = chainElement.ChainElementStatus.Aggregate( X509ChainStatusFlags.NoError, (cur, status) => cur | status.Status); Console.WriteLine( $"{nameof(VerifyWithRevocation)}: online attempt {i} - errors at depth {j}: {allFlags}"); } chainElement.Certificate.Dispose(); } Thread.Sleep(1000); // For network flakiness } if (TestEnvironmentConfiguration.RunManualTests) { Assert.True(valid, $"Online Chain Built Validly within {RetryLimit} tries"); } else if (!valid) { Console.WriteLine($"SKIP [{nameof(VerifyWithRevocation)}]: Chain failed to build within {RetryLimit} tries."); } // Since the network was enabled, we should get the whole chain. Assert.Equal(3, onlineChain.ChainElements.Count); Assert.Equal(0, onlineChain.ChainElements[0].ChainElementStatus.Length); Assert.Equal(0, onlineChain.ChainElements[1].ChainElementStatus.Length); // The root CA is not expected to be installed on everyone's machines, // so allow for it to report UntrustedRoot, but nothing else.. X509ChainStatus[] rootElementStatus = onlineChain.ChainElements[2].ChainElementStatus; if (rootElementStatus.Length != 0) { Assert.Equal(1, rootElementStatus.Length); Assert.Equal(X509ChainStatusFlags.UntrustedRoot, rootElementStatus[0].Status); } // Now that everything is cached, try again in Offline mode. offlineChain.ChainPolicy.VerificationFlags = onlineChain.ChainPolicy.VerificationFlags; offlineChain.ChainPolicy.VerificationTime = onlineChain.ChainPolicy.VerificationTime; offlineChain.ChainPolicy.RevocationMode = X509RevocationMode.Offline; offlineChain.ChainPolicy.RevocationFlag = onlineChain.ChainPolicy.RevocationFlag; valid = offlineChain.Build(cert); Assert.True(valid, "Offline Chain Built Validly"); // Everything should look just like the online chain: Assert.Equal(onlineChain.ChainElements.Count, offlineChain.ChainElements.Count); for (int i = 0; i < offlineChain.ChainElements.Count; i++) { X509ChainElement onlineElement = onlineChain.ChainElements[i]; X509ChainElement offlineElement = offlineChain.ChainElements[i]; Assert.Equal(onlineElement.ChainElementStatus, offlineElement.ChainElementStatus); Assert.Equal(onlineElement.Certificate, offlineElement.Certificate); } } } [Fact] public static void Create() { using (var chain = X509Chain.Create()) Assert.NotNull(chain); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics; using System.Dynamic.Utils; using System.Linq.Expressions; namespace System.Dynamic { /// <summary> /// Represents the dynamic binding and a binding logic of an object participating in the dynamic binding. /// </summary> public class DynamicMetaObject { /// <summary> /// Represents an empty array of type <see cref="DynamicMetaObject"/>. This field is read-only. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2105:ArrayFieldsShouldNotBeReadOnly")] public static readonly DynamicMetaObject[] EmptyMetaObjects = Array.Empty<DynamicMetaObject>(); /// <summary> /// Initializes a new instance of the <see cref="DynamicMetaObject"/> class. /// </summary> /// <param name="expression">The expression representing this <see cref="DynamicMetaObject"/> during the dynamic binding process.</param> /// <param name="restrictions">The set of binding restrictions under which the binding is valid.</param> public DynamicMetaObject(Expression expression, BindingRestrictions restrictions) { ContractUtils.RequiresNotNull(expression, nameof(expression)); ContractUtils.RequiresNotNull(restrictions, nameof(restrictions)); Expression = expression; Restrictions = restrictions; } /// <summary> /// Initializes a new instance of the <see cref="DynamicMetaObject"/> class. /// </summary> /// <param name="expression">The expression representing this <see cref="DynamicMetaObject"/> during the dynamic binding process.</param> /// <param name="restrictions">The set of binding restrictions under which the binding is valid.</param> /// <param name="value">The runtime value represented by the <see cref="DynamicMetaObject"/>.</param> public DynamicMetaObject(Expression expression, BindingRestrictions restrictions, object value) : this(expression, restrictions) { _value = value; } // having sentinel value means having no value. (this way we do not need a separate hasValue field) private static readonly object s_noValueSentinel = new object(); private readonly object _value = s_noValueSentinel; /// <summary> /// The expression representing the <see cref="DynamicMetaObject"/> during the dynamic binding process. /// </summary> public Expression Expression { get; } /// <summary> /// The set of binding restrictions under which the binding is valid. /// </summary> public BindingRestrictions Restrictions { get; } /// <summary> /// The runtime value represented by this <see cref="DynamicMetaObject"/>. /// </summary> public object Value => HasValue ? _value : null; /// <summary> /// Gets a value indicating whether the <see cref="DynamicMetaObject"/> has the runtime value. /// </summary> public bool HasValue => _value != s_noValueSentinel; /// <summary> /// Gets the <see cref="Type"/> of the runtime value or null if the <see cref="DynamicMetaObject"/> has no value associated with it. /// </summary> public Type RuntimeType { get { if (HasValue) { Type ct = Expression.Type; // valuetype at compile time, type cannot change. if (ct.IsValueType) { return ct; } return Value?.GetType(); } else { return null; } } } /// <summary> /// Gets the limit type of the <see cref="DynamicMetaObject"/>. /// </summary> /// <remarks>Represents the most specific type known about the object represented by the <see cref="DynamicMetaObject"/>. <see cref="RuntimeType"/> if runtime value is available, a type of the <see cref="Expression"/> otherwise.</remarks> public Type LimitType => RuntimeType ?? Expression.Type; /// <summary> /// Performs the binding of the dynamic conversion operation. /// </summary> /// <param name="binder">An instance of the <see cref="ConvertBinder"/> that represents the details of the dynamic operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindConvert(ConvertBinder binder) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackConvert(this); } /// <summary> /// Performs the binding of the dynamic get member operation. /// </summary> /// <param name="binder">An instance of the <see cref="GetMemberBinder"/> that represents the details of the dynamic operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindGetMember(GetMemberBinder binder) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackGetMember(this); } /// <summary> /// Performs the binding of the dynamic set member operation. /// </summary> /// <param name="binder">An instance of the <see cref="SetMemberBinder"/> that represents the details of the dynamic operation.</param> /// <param name="value">The <see cref="DynamicMetaObject"/> representing the value for the set member operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindSetMember(SetMemberBinder binder, DynamicMetaObject value) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackSetMember(this, value); } /// <summary> /// Performs the binding of the dynamic delete member operation. /// </summary> /// <param name="binder">An instance of the <see cref="DeleteMemberBinder"/> that represents the details of the dynamic operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindDeleteMember(DeleteMemberBinder binder) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackDeleteMember(this); } /// <summary> /// Performs the binding of the dynamic get index operation. /// </summary> /// <param name="binder">An instance of the <see cref="GetIndexBinder"/> that represents the details of the dynamic operation.</param> /// <param name="indexes">An array of <see cref="DynamicMetaObject"/> instances - indexes for the get index operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindGetIndex(GetIndexBinder binder, DynamicMetaObject[] indexes) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackGetIndex(this, indexes); } /// <summary> /// Performs the binding of the dynamic set index operation. /// </summary> /// <param name="binder">An instance of the <see cref="SetIndexBinder"/> that represents the details of the dynamic operation.</param> /// <param name="indexes">An array of <see cref="DynamicMetaObject"/> instances - indexes for the set index operation.</param> /// <param name="value">The <see cref="DynamicMetaObject"/> representing the value for the set index operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindSetIndex(SetIndexBinder binder, DynamicMetaObject[] indexes, DynamicMetaObject value) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackSetIndex(this, indexes, value); } /// <summary> /// Performs the binding of the dynamic delete index operation. /// </summary> /// <param name="binder">An instance of the <see cref="DeleteIndexBinder"/> that represents the details of the dynamic operation.</param> /// <param name="indexes">An array of <see cref="DynamicMetaObject"/> instances - indexes for the delete index operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindDeleteIndex(DeleteIndexBinder binder, DynamicMetaObject[] indexes) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackDeleteIndex(this, indexes); } /// <summary> /// Performs the binding of the dynamic invoke member operation. /// </summary> /// <param name="binder">An instance of the <see cref="InvokeMemberBinder"/> that represents the details of the dynamic operation.</param> /// <param name="args">An array of <see cref="DynamicMetaObject"/> instances - arguments to the invoke member operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindInvokeMember(InvokeMemberBinder binder, DynamicMetaObject[] args) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackInvokeMember(this, args); } /// <summary> /// Performs the binding of the dynamic invoke operation. /// </summary> /// <param name="binder">An instance of the <see cref="InvokeBinder"/> that represents the details of the dynamic operation.</param> /// <param name="args">An array of <see cref="DynamicMetaObject"/> instances - arguments to the invoke operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindInvoke(InvokeBinder binder, DynamicMetaObject[] args) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackInvoke(this, args); } /// <summary> /// Performs the binding of the dynamic create instance operation. /// </summary> /// <param name="binder">An instance of the <see cref="CreateInstanceBinder"/> that represents the details of the dynamic operation.</param> /// <param name="args">An array of <see cref="DynamicMetaObject"/> instances - arguments to the create instance operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindCreateInstance(CreateInstanceBinder binder, DynamicMetaObject[] args) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackCreateInstance(this, args); } /// <summary> /// Performs the binding of the dynamic unary operation. /// </summary> /// <param name="binder">An instance of the <see cref="UnaryOperationBinder"/> that represents the details of the dynamic operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindUnaryOperation(UnaryOperationBinder binder) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackUnaryOperation(this); } /// <summary> /// Performs the binding of the dynamic binary operation. /// </summary> /// <param name="binder">An instance of the <see cref="BinaryOperationBinder"/> that represents the details of the dynamic operation.</param> /// <param name="arg">An instance of the <see cref="DynamicMetaObject"/> representing the right hand side of the binary operation.</param> /// <returns>The new <see cref="DynamicMetaObject"/> representing the result of the binding.</returns> public virtual DynamicMetaObject BindBinaryOperation(BinaryOperationBinder binder, DynamicMetaObject arg) { ContractUtils.RequiresNotNull(binder, nameof(binder)); return binder.FallbackBinaryOperation(this, arg); } /// <summary> /// Returns the enumeration of all dynamic member names. /// </summary> /// <returns>The list of dynamic member names.</returns> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1024:UsePropertiesWhereAppropriate")] public virtual IEnumerable<string> GetDynamicMemberNames() => Array.Empty<string>(); /// <summary> /// Returns the list of expressions represented by the <see cref="DynamicMetaObject"/> instances. /// </summary> /// <param name="objects">An array of <see cref="DynamicMetaObject"/> instances to extract expressions from.</param> /// <returns>The array of expressions.</returns> internal static Expression[] GetExpressions(DynamicMetaObject[] objects) { ContractUtils.RequiresNotNull(objects, nameof(objects)); Expression[] res = new Expression[objects.Length]; for (int i = 0; i < objects.Length; i++) { DynamicMetaObject mo = objects[i]; ContractUtils.RequiresNotNull(mo, nameof(objects)); Expression expr = mo.Expression; Debug.Assert(expr != null, "Unexpected null expression; ctor should have caught this."); res[i] = expr; } return res; } /// <summary> /// Creates a meta-object for the specified object. /// </summary> /// <param name="value">The object to get a meta-object for.</param> /// <param name="expression">The expression representing this <see cref="DynamicMetaObject"/> during the dynamic binding process.</param> /// <returns> /// If the given object implements <see cref="IDynamicMetaObjectProvider"/> and is not a remote object from outside the current AppDomain, /// returns the object's specific meta-object returned by <see cref="IDynamicMetaObjectProvider.GetMetaObject"/>. Otherwise a plain new meta-object /// with no restrictions is created and returned. /// </returns> public static DynamicMetaObject Create(object value, Expression expression) { ContractUtils.RequiresNotNull(expression, nameof(expression)); IDynamicMetaObjectProvider ido = value as IDynamicMetaObjectProvider; if (ido != null) { var idoMetaObject = ido.GetMetaObject(expression); if (idoMetaObject == null || !idoMetaObject.HasValue || idoMetaObject.Value == null || (object)idoMetaObject.Expression != (object)expression) { throw System.Linq.Expressions.Error.InvalidMetaObjectCreated(ido.GetType()); } return idoMetaObject; } else { return new DynamicMetaObject(expression, BindingRestrictions.Empty, value); } } } }
//----------------------------------------------------------------------- // <copyright file="SessionApi.cs" company="Google LLC"> // // Copyright 2017 Google LLC. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // </copyright> //----------------------------------------------------------------------- namespace GoogleARCoreInternal { using System; using System.Collections.Generic; using System.Runtime.InteropServices; using GoogleARCore; using UnityEngine; #if UNITY_IOS && !UNITY_EDITOR using AndroidImport = GoogleARCoreInternal.DllImportNoop; using IOSImport = System.Runtime.InteropServices.DllImportAttribute; #else using AndroidImport = System.Runtime.InteropServices.DllImportAttribute; using IOSImport = GoogleARCoreInternal.DllImportNoop; #endif internal class SessionApi { private NativeSession m_NativeSession; public SessionApi(NativeSession nativeSession) { m_NativeSession = nativeSession; } public void ReportEngineType() { ExternApi.ArSession_reportEngineType( m_NativeSession.SessionHandle, "Unity", Application.unityVersion); } public void GetSupportedCameraConfigurationsWithFilter( ARCoreCameraConfigFilter cameraConfigFilter, IntPtr cameraConfigListHandle, List<IntPtr> supportedCameraConfigHandles, List<CameraConfig> supportedCameraConfigs, DeviceCameraDirection cameraFacingDirection) { IntPtr cameraConfigFilterHandle = m_NativeSession.CameraConfigFilterApi.Create(cameraConfigFilter); ExternApi.ArSession_getSupportedCameraConfigsWithFilter(m_NativeSession.SessionHandle, cameraConfigFilterHandle, cameraConfigListHandle); m_NativeSession.CameraConfigFilterApi.Destroy(cameraConfigFilterHandle); supportedCameraConfigHandles.Clear(); supportedCameraConfigs.Clear(); int listSize = m_NativeSession.CameraConfigListApi.GetSize(cameraConfigListHandle); for (int i = 0; i < listSize; i++) { IntPtr cameraConfigHandle = m_NativeSession.CameraConfigApi.Create(); m_NativeSession.CameraConfigListApi.GetItemAt( cameraConfigListHandle, i, cameraConfigHandle); // Skip camera config that has a different camera facing direction. DeviceCameraDirection configDirection = m_NativeSession.CameraConfigApi.GetFacingDirection(cameraConfigHandle) .ToDeviceCameraDirection(); if (configDirection != cameraFacingDirection) { continue; } supportedCameraConfigHandles.Add(cameraConfigHandle); supportedCameraConfigs.Add(_CreateCameraConfig(cameraConfigHandle)); } } public ApiArStatus SetCameraConfig(IntPtr cameraConfigHandle) { return ExternApi.ArSession_setCameraConfig( m_NativeSession.SessionHandle, cameraConfigHandle); } public CameraConfig GetCameraConfig() { IntPtr cameraConfigHandle = m_NativeSession.CameraConfigApi.Create(); if (InstantPreviewManager.IsProvidingPlatform) { InstantPreviewManager.LogLimitedSupportMessage("access camera config"); return new CameraConfig(); } ExternApi.ArSession_getCameraConfig(m_NativeSession.SessionHandle, cameraConfigHandle); CameraConfig currentCameraConfig = _CreateCameraConfig(cameraConfigHandle); m_NativeSession.CameraConfigApi.Destroy(cameraConfigHandle); return currentCameraConfig; } public void GetAllTrackables(List<Trackable> trackables) { IntPtr listHandle = m_NativeSession.TrackableListApi.Create(); ExternApi.ArSession_getAllTrackables( m_NativeSession.SessionHandle, ApiTrackableType.BaseTrackable, listHandle); trackables.Clear(); int count = m_NativeSession.TrackableListApi.GetCount(listHandle); for (int i = 0; i < count; i++) { IntPtr trackableHandle = m_NativeSession.TrackableListApi.AcquireItem(listHandle, i); Trackable trackable = m_NativeSession.TrackableFactory(trackableHandle); if (trackable != null) { trackables.Add(trackable); } else { m_NativeSession.TrackableApi.Release(trackableHandle); } } m_NativeSession.TrackableListApi.Destroy(listHandle); } public void SetDisplayGeometry(ScreenOrientation orientation, int width, int height) { const int androidRotation0 = 0; const int androidRotation90 = 1; const int androidRotation180 = 2; const int androidRotation270 = 3; int androidOrientation = 0; switch (orientation) { case ScreenOrientation.LandscapeLeft: androidOrientation = androidRotation90; break; case ScreenOrientation.LandscapeRight: androidOrientation = androidRotation270; break; case ScreenOrientation.Portrait: androidOrientation = androidRotation0; break; case ScreenOrientation.PortraitUpsideDown: androidOrientation = androidRotation180; break; } ExternApi.ArSession_setDisplayGeometry( m_NativeSession.SessionHandle, androidOrientation, width, height); } public Anchor CreateAnchor(Pose pose) { IntPtr poseHandle = m_NativeSession.PoseApi.Create(pose); IntPtr anchorHandle = IntPtr.Zero; ExternApi.ArSession_acquireNewAnchor( m_NativeSession.SessionHandle, poseHandle, ref anchorHandle); var anchorResult = Anchor.Factory(m_NativeSession, anchorHandle); m_NativeSession.PoseApi.Destroy(poseHandle); return anchorResult; } public ApiArStatus CreateCloudAnchor( IntPtr platformAnchorHandle, out IntPtr cloudAnchorHandle) { cloudAnchorHandle = IntPtr.Zero; var result = ExternApi.ArSession_hostAndAcquireNewCloudAnchor( m_NativeSession.SessionHandle, platformAnchorHandle, ref cloudAnchorHandle); return result; } public ApiArStatus ResolveCloudAnchor(String cloudAnchorId, out IntPtr cloudAnchorHandle) { cloudAnchorHandle = IntPtr.Zero; return ExternApi.ArSession_resolveAndAcquireNewCloudAnchor( m_NativeSession.SessionHandle, cloudAnchorId, ref cloudAnchorHandle); } public bool IsDepthModeSupported(ApiDepthMode depthMode) { int isSupported = 0; ExternApi.ArSession_isDepthModeSupported( m_NativeSession.SessionHandle, depthMode, ref isSupported); return isSupported != 0; } private CameraConfig _CreateCameraConfig(IntPtr cameraConfigHandle) { int imageWidth = 0; int imageHeight = 0; int textureWidth = 0; int textureHeight = 0; int minFps = 0; int maxFps = 0; CameraConfigDepthSensorUsages depthSensorUsage = m_NativeSession.CameraConfigApi.GetDepthSensorUsage(cameraConfigHandle); m_NativeSession.CameraConfigApi.GetImageDimensions( cameraConfigHandle, out imageWidth, out imageHeight); m_NativeSession.CameraConfigApi.GetTextureDimensions( cameraConfigHandle, out textureWidth, out textureHeight); m_NativeSession.CameraConfigApi.GetFpsRange( cameraConfigHandle, out minFps, out maxFps); return new CameraConfig(new Vector2(imageWidth, imageHeight), new Vector2(textureWidth, textureHeight), minFps, maxFps, depthSensorUsage); } private struct ExternApi { #pragma warning disable 626 [AndroidImport(ApiConstants.ARCoreNativeApi)] public static extern int ArSession_configure(IntPtr sessionHandle, IntPtr config); [AndroidImport(ApiConstants.ARCoreNativeApi)] public static extern void ArSession_getSupportedCameraConfigsWithFilter( IntPtr sessionHandle, IntPtr cameraConfigFilterHandle, IntPtr cameraConfigListHandle); [AndroidImport(ApiConstants.ARCoreNativeApi)] public static extern ApiArStatus ArSession_setCameraConfig( IntPtr sessionHandle, IntPtr cameraConfigHandle); [AndroidImport(ApiConstants.ARCoreNativeApi)] public static extern void ArSession_getCameraConfig( IntPtr sessionHandle, IntPtr cameraConfigHandle); [AndroidImport(ApiConstants.ARCoreNativeApi)] public static extern void ArSession_getAllTrackables( IntPtr sessionHandle, ApiTrackableType filterType, IntPtr trackableList); [AndroidImport(ApiConstants.ARCoreNativeApi)] public static extern void ArSession_setDisplayGeometry( IntPtr sessionHandle, int rotation, int width, int height); [AndroidImport(ApiConstants.ARCoreNativeApi)] public static extern int ArSession_acquireNewAnchor( IntPtr sessionHandle, IntPtr poseHandle, ref IntPtr anchorHandle); [AndroidImport(ApiConstants.ARCoreNativeApi)] public static extern void ArSession_isDepthModeSupported( IntPtr sessionHandle, ApiDepthMode depthMode, ref int isSupported); #pragma warning restore 626 [DllImport(ApiConstants.ARCoreNativeApi)] public static extern void ArSession_reportEngineType( IntPtr sessionHandle, string engineType, string engineVersion); [DllImport(ApiConstants.ARCoreNativeApi)] public static extern ApiArStatus ArSession_hostAndAcquireNewCloudAnchor( IntPtr sessionHandle, IntPtr anchorHandle, ref IntPtr cloudAnchorHandle); [DllImport(ApiConstants.ARCoreNativeApi)] public static extern ApiArStatus ArSession_resolveAndAcquireNewCloudAnchor( IntPtr sessionHandle, String cloudAnchorId, ref IntPtr cloudAnchorHandle); } } }
// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using DebuggerGrpc; using Grpc.Core; using System; using System.Collections.Concurrent; using System.Diagnostics; using System.IO; using System.IO.Pipes; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; namespace DebuggerGrpcClient { public class PipeCallInvokerFactory { /// <summary> /// Number of Grpc "channels" that can be used concurrently. For optimal performance, there /// should be one for each thread that makes Grpc calls. Right now, there are 4 or more /// different threads that make rpc calls: /// - The main thread /// - The GC Finalizer thread /// - The event manager thread (in particular calls WaitForEvent, blocking for a second) /// - Asynchronous tasks during startup (though they don't seem to overlap) /// In practice, there are often 2 concurrent rpc calls, rarely 3, pretty much never 4. /// </summary> const int NUM_GRPC_PIPE_PAIRS = 4; public virtual PipeCallInvoker Create() { return new PipeCallInvoker(NUM_GRPC_PIPE_PAIRS); } } /// <summary> /// Call invoker that uses anonymous pipes to send rpc messages. Multiple threads can send /// messages concurrently, depending on the number of pipe pairs. /// </summary> public class PipeCallInvoker : CallInvoker, IDisposable { readonly PipePair[] pipePairs; readonly ConcurrentBag<PipePair> availablePipePairs; readonly SemaphoreSlim pipeLock; public PipeCallInvoker(int numPipePairs) { pipePairs = new PipePair[numPipePairs]; availablePipePairs = new ConcurrentBag<PipePair>(); for (int n = 0; n < numPipePairs; ++n) { pipePairs[n] = new PipePair(); availablePipePairs.Add(pipePairs[n]); } // Allow pipePairs.Length threads to run concurrently, so that each one can pick its // own pipe. pipeLock = new SemaphoreSlim(pipePairs.Length, pipePairs.Length); } ~PipeCallInvoker() { Dispose(false); } void Dispose(bool disposing) { if (disposing) { pipeLock.Dispose(); // Disposing the pipes will cause an EndOfStreamException on the server, which it // interprets as client shutdown. foreach (PipePair pipePair in pipePairs) { pipePair.InPipe.Dispose(); pipePair.OutPipe.Dispose(); } } } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } /// <summary> /// Returns the client handles as strings for all input and output pipes. /// </summary> public void GetClientPipeHandles(out string[] inPipeClientHandles, out string[] outPipeClientHandles) { inPipeClientHandles = pipePairs.Select(pp => pp.InPipe.GetClientHandleAsString()).ToArray(); outPipeClientHandles = pipePairs.Select(pp => pp.OutPipe.GetClientHandleAsString()).ToArray(); } /// <summary> /// Disposes the local copies of the client pipe handles. Must be called after starting the /// DebuggerGrpcServer process, so that the pipes in this process are closed when the /// server shuts down. /// </summary> public virtual void DisposeLocalCopyOfClientPipeHandles() { foreach (PipePair pipePair in pipePairs) { pipePair.InPipe.DisposeLocalCopyOfClientHandle(); pipePair.OutPipe.DisposeLocalCopyOfClientHandle(); } } public override TResponse BlockingUnaryCall<TRequest, TResponse>( Method<TRequest, TResponse> method, string host, CallOptions options, TRequest request) { // Wait for an available pipe. pipeLock.Wait(); PipePair pipePair; availablePipePairs.TryTake(out pipePair); Debug.Assert(pipePair != null); try { // Create binary reader/writer, but be sure to leave the stream open! var writer = new BinaryWriter(pipePair.OutPipe, Encoding.Default, true); var reader = new BinaryReader(pipePair.InPipe, Encoding.Default, true); // Send the RPC name. writer.Write(method.FullName.Length); writer.Write(Encoding.ASCII.GetBytes(method.FullName)); // Send the request. using (var serializationContext = new SimpleSerializationContext()) { method.RequestMarshaller.ContextualSerializer(request, serializationContext); byte[] requestBytes = serializationContext.GetPayload(); writer.Write(requestBytes.Length); writer.Write(requestBytes); } // Read the response. int size = reader.ReadInt32(); byte[] responseBytes = reader.ReadBytes(size); var deserializationContext = new SimpleDeserializationContext(responseBytes); return method.ResponseMarshaller.ContextualDeserializer(deserializationContext); } // Unfortunately, RpcExceptions can't be nested with InnerException. catch (EndOfStreamException e) { throw new RpcException(new Status(StatusCode.Unknown, e.ToString()), "Connection to server lost. Did it shut down?"); } catch (Exception e) when (!(e is RpcException)) { throw new RpcException(new Status(StatusCode.Unknown, e.ToString()), "Unknown failure: " + e); } finally { availablePipePairs.Add(pipePair); pipeLock.Release(); } } async Task<TResponse> AsyncCallAsync<TRequest, TResponse>( Method<TRequest, TResponse> method, TRequest request) { await pipeLock.WaitAsync(); PipePair pipePair; availablePipePairs.TryTake(out pipePair); Debug.Assert(pipePair != null); try { var writer = new AsyncBinaryWriter(pipePair.OutPipe); var reader = new AsyncBinaryReader(pipePair.InPipe); // Send the RPC name. await writer.WriteAsync(method.FullName.Length); await writer.WriteAsync(Encoding.ASCII.GetBytes(method.FullName)); // Send the request. using (var serializationContext = new SimpleSerializationContext()) { method.RequestMarshaller.ContextualSerializer(request, serializationContext); byte[] requestBytes = serializationContext.GetPayload(); await writer.WriteAsync(requestBytes.Length); await writer.WriteAsync(requestBytes); } // Read the response. int size = await reader.ReadInt32Async(); byte[] responseBytes = await reader.ReadBytesAsync(size); var deserializationContext = new SimpleDeserializationContext(responseBytes); return method.ResponseMarshaller.ContextualDeserializer(deserializationContext); } // Unfortunately, RpcExceptions can't be nested with InnerException. catch (EndOfStreamException e) { throw new RpcException(new Status(StatusCode.Unknown, e.ToString()), "Connection to server lost. Did it shut down?"); } catch (Exception e) when (!(e is RpcException)) { throw new RpcException(new Status(StatusCode.Unknown, e.ToString()), "Unknown failure: " + e); } finally { availablePipePairs.Add(pipePair); pipeLock.Release(); } } public override AsyncUnaryCall<TResponse> AsyncUnaryCall<TRequest, TResponse>( Method<TRequest, TResponse> method, string host, CallOptions options, TRequest request) { // Not sure what these are used for, but they never seem to be called. Task<Metadata> responseHeadersAsync = Task.FromResult(new Metadata()); Func<Status> getStatusFunc = () => new Status(); Func<Metadata> getTrailersFunc = () => new Metadata(); Action disposeAction = () => { }; return new AsyncUnaryCall<TResponse>(AsyncCallAsync(method, request), responseHeadersAsync, getStatusFunc, getTrailersFunc, disposeAction); } public override AsyncClientStreamingCall<TRequest, TResponse> AsyncClientStreamingCall<TRequest, TResponse>(Method<TRequest, TResponse> method, string host, CallOptions options) { throw new NotImplementedException(); } public override AsyncDuplexStreamingCall<TRequest, TResponse> AsyncDuplexStreamingCall<TRequest, TResponse>(Method<TRequest, TResponse> method, string host, CallOptions options) { throw new NotImplementedException(); } public override AsyncServerStreamingCall<TResponse> AsyncServerStreamingCall<TRequest, TResponse>(Method<TRequest, TResponse> method, string host, CallOptions options, TRequest request) { throw new NotImplementedException(); } /// <summary> /// Input/output pipes for Grpc communication. /// </summary> class PipePair { // Confusingly, the grpc server is the pipe client and vice versa. public AnonymousPipeServerStream InPipe { get; } public AnonymousPipeServerStream OutPipe { get; } public PipePair() { InPipe = new AnonymousPipeServerStream(PipeDirection.In, HandleInheritability.Inheritable); OutPipe = new AnonymousPipeServerStream(PipeDirection.Out, HandleInheritability.Inheritable); } } /// <summary> /// Asynchronous version of BinaryWriter limited to the functionality needed. /// </summary> class AsyncBinaryWriter { readonly AnonymousPipeServerStream pipe; public AsyncBinaryWriter(AnonymousPipeServerStream pipe) { this.pipe = pipe; } public async Task WriteAsync(int value) { byte[] bytes = BitConverter.GetBytes(value); // The server expects little endian. if (!BitConverter.IsLittleEndian) { Array.Reverse(bytes); } await WriteAsync(bytes); } public async Task WriteAsync(byte[] bytes) { await pipe.WriteAsync(bytes, 0, bytes.Length); } } /// <summary> /// Asynchronous version of BinaryReader limited to the functionality needed. /// </summary> class AsyncBinaryReader { readonly AnonymousPipeServerStream pipe; public AsyncBinaryReader(AnonymousPipeServerStream pipe) { this.pipe = pipe; } public async Task<int> ReadInt32Async() { byte[] bytes = await ReadBytesAsync(sizeof(int)); // The server expects little endian. if (!BitConverter.IsLittleEndian) { Array.Reverse(bytes); } return BitConverter.ToInt32(bytes, 0); } public async Task<byte[]> ReadBytesAsync(int size) { byte[] bytes = new byte[size]; await pipe.ReadAsync(bytes, 0, bytes.Length); return bytes; } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Linq; using System.Net.Http.Headers; using System.Text; using Xunit; namespace System.Net.Http.Tests { public class CacheControlHeaderValueTest { [Fact] public void Properties_SetAndGetAllProperties_SetValueReturnedInGetter() { CacheControlHeaderValue cacheControl = new CacheControlHeaderValue(); // Bool properties cacheControl.NoCache = true; Assert.True(cacheControl.NoCache); cacheControl.NoStore = true; Assert.True(cacheControl.NoStore); cacheControl.MaxStale = true; Assert.True(cacheControl.MaxStale); cacheControl.NoTransform = true; Assert.True(cacheControl.NoTransform); cacheControl.OnlyIfCached = true; Assert.True(cacheControl.OnlyIfCached); cacheControl.Public = true; Assert.True(cacheControl.Public); cacheControl.Private = true; Assert.True(cacheControl.Private); cacheControl.MustRevalidate = true; Assert.True(cacheControl.MustRevalidate); cacheControl.ProxyRevalidate = true; Assert.True(cacheControl.ProxyRevalidate); // TimeSpan properties TimeSpan timeSpan = new TimeSpan(1, 2, 3); cacheControl.MaxAge = timeSpan; Assert.Equal(timeSpan, cacheControl.MaxAge); cacheControl.SharedMaxAge = timeSpan; Assert.Equal(timeSpan, cacheControl.SharedMaxAge); cacheControl.MaxStaleLimit = timeSpan; Assert.Equal(timeSpan, cacheControl.MaxStaleLimit); cacheControl.MinFresh = timeSpan; Assert.Equal(timeSpan, cacheControl.MinFresh); // String collection properties Assert.NotNull(cacheControl.NoCacheHeaders); AssertExtensions.Throws<ArgumentException>("item", () => { cacheControl.NoCacheHeaders.Add(null); }); Assert.Throws<FormatException>(() => { cacheControl.NoCacheHeaders.Add("invalid token"); }); cacheControl.NoCacheHeaders.Add("token"); Assert.Equal(1, cacheControl.NoCacheHeaders.Count); Assert.Equal("token", cacheControl.NoCacheHeaders.First()); Assert.NotNull(cacheControl.PrivateHeaders); AssertExtensions.Throws<ArgumentException>("item", () => { cacheControl.PrivateHeaders.Add(null); }); Assert.Throws<FormatException>(() => { cacheControl.PrivateHeaders.Add("invalid token"); }); cacheControl.PrivateHeaders.Add("token"); Assert.Equal(1, cacheControl.PrivateHeaders.Count); Assert.Equal("token", cacheControl.PrivateHeaders.First()); // NameValueHeaderValue collection property Assert.NotNull(cacheControl.Extensions); Assert.Throws<ArgumentNullException>(() => { cacheControl.Extensions.Add(null); }); cacheControl.Extensions.Add(new NameValueHeaderValue("name", "value")); Assert.Equal(1, cacheControl.Extensions.Count); Assert.Equal(new NameValueHeaderValue("name", "value"), cacheControl.Extensions.First()); } [Fact] public void ToString_UseRequestDirectiveValues_AllSerializedCorrectly() { CacheControlHeaderValue cacheControl = new CacheControlHeaderValue(); Assert.Equal("", cacheControl.ToString()); // Note that we allow all combinations of all properties even though the RFC specifies rules what value // can be used together. // Also for property pairs (bool property + collection property) like 'NoCache' and 'NoCacheHeaders' the // caller needs to set the bool property in order for the collection to be populated as string. // Cache Request Directive sample cacheControl.NoStore = true; Assert.Equal("no-store", cacheControl.ToString()); cacheControl.NoCache = true; Assert.Equal("no-store, no-cache", cacheControl.ToString()); cacheControl.MaxAge = new TimeSpan(0, 1, 10); Assert.Equal("no-store, no-cache, max-age=70", cacheControl.ToString()); cacheControl.MaxStale = true; Assert.Equal("no-store, no-cache, max-age=70, max-stale", cacheControl.ToString()); cacheControl.MaxStaleLimit = new TimeSpan(0, 2, 5); Assert.Equal("no-store, no-cache, max-age=70, max-stale=125", cacheControl.ToString()); cacheControl.MinFresh = new TimeSpan(0, 3, 0); Assert.Equal("no-store, no-cache, max-age=70, max-stale=125, min-fresh=180", cacheControl.ToString()); cacheControl = new CacheControlHeaderValue(); cacheControl.NoTransform = true; Assert.Equal("no-transform", cacheControl.ToString()); cacheControl.OnlyIfCached = true; Assert.Equal("no-transform, only-if-cached", cacheControl.ToString()); cacheControl.Extensions.Add(new NameValueHeaderValue("custom")); cacheControl.Extensions.Add(new NameValueHeaderValue("customName", "customValue")); Assert.Equal("no-transform, only-if-cached, custom, customName=customValue", cacheControl.ToString()); cacheControl = new CacheControlHeaderValue(); cacheControl.Extensions.Add(new NameValueHeaderValue("custom")); Assert.Equal("custom", cacheControl.ToString()); } [Fact] public void ToString_UseResponseDirectiveValues_AllSerializedCorrectly() { CacheControlHeaderValue cacheControl = new CacheControlHeaderValue(); Assert.Equal("", cacheControl.ToString()); cacheControl.NoCache = true; Assert.Equal("no-cache", cacheControl.ToString()); cacheControl.NoCacheHeaders.Add("token1"); Assert.Equal("no-cache=\"token1\"", cacheControl.ToString()); cacheControl.Public = true; Assert.Equal("public, no-cache=\"token1\"", cacheControl.ToString()); cacheControl = new CacheControlHeaderValue(); cacheControl.Private = true; Assert.Equal("private", cacheControl.ToString()); cacheControl.PrivateHeaders.Add("token2"); cacheControl.PrivateHeaders.Add("token3"); Assert.Equal("private=\"token2, token3\"", cacheControl.ToString()); cacheControl.MustRevalidate = true; Assert.Equal("must-revalidate, private=\"token2, token3\"", cacheControl.ToString()); cacheControl.ProxyRevalidate = true; Assert.Equal("must-revalidate, proxy-revalidate, private=\"token2, token3\"", cacheControl.ToString()); } [Fact] public void GetHashCode_CompareValuesWithBoolFieldsSet_MatchExpectation() { // Verify that different bool fields return different hash values. CacheControlHeaderValue[] values = new CacheControlHeaderValue[9]; for (int i = 0; i < values.Length; i++) { values[i] = new CacheControlHeaderValue(); } values[0].ProxyRevalidate = true; values[1].NoCache = true; values[2].NoStore = true; values[3].MaxStale = true; values[4].NoTransform = true; values[5].OnlyIfCached = true; values[6].Public = true; values[7].Private = true; values[8].MustRevalidate = true; // Only one bool field set. All hash codes should differ for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { if (i != j) { CompareHashCodes(values[i], values[j], false); } } } // Validate that two instances with the same bool fields set are equal. values[0].NoCache = true; CompareHashCodes(values[0], values[1], false); values[1].ProxyRevalidate = true; CompareHashCodes(values[0], values[1], true); } [Fact] public void GetHashCode_CompareValuesWithTimeSpanFieldsSet_MatchExpectation() { // Verify that different timespan fields return different hash values. CacheControlHeaderValue[] values = new CacheControlHeaderValue[4]; for (int i = 0; i < values.Length; i++) { values[i] = new CacheControlHeaderValue(); } values[0].MaxAge = new TimeSpan(0, 1, 1); values[1].MaxStaleLimit = new TimeSpan(0, 1, 1); values[2].MinFresh = new TimeSpan(0, 1, 1); values[3].SharedMaxAge = new TimeSpan(0, 1, 1); // Only one timespan field set. All hash codes should differ for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { if (i != j) { CompareHashCodes(values[i], values[j], false); } } } values[0].MaxStaleLimit = new TimeSpan(0, 1, 2); CompareHashCodes(values[0], values[1], false); values[1].MaxAge = new TimeSpan(0, 1, 1); values[1].MaxStaleLimit = new TimeSpan(0, 1, 2); CompareHashCodes(values[0], values[1], true); } [Fact] public void GetHashCode_CompareCollectionFieldsSet_MatchExpectation() { CacheControlHeaderValue cacheControl1 = new CacheControlHeaderValue(); CacheControlHeaderValue cacheControl2 = new CacheControlHeaderValue(); CacheControlHeaderValue cacheControl3 = new CacheControlHeaderValue(); CacheControlHeaderValue cacheControl4 = new CacheControlHeaderValue(); CacheControlHeaderValue cacheControl5 = new CacheControlHeaderValue(); cacheControl1.NoCache = true; cacheControl1.NoCacheHeaders.Add("token2"); cacheControl2.NoCache = true; cacheControl2.NoCacheHeaders.Add("token1"); cacheControl2.NoCacheHeaders.Add("token2"); CompareHashCodes(cacheControl1, cacheControl2, false); cacheControl1.NoCacheHeaders.Add("token1"); CompareHashCodes(cacheControl1, cacheControl2, true); // Since NoCache and Private generate different hash codes, even if NoCacheHeaders and PrivateHeaders // have the same values, the hash code will be different. cacheControl3.Private = true; cacheControl3.PrivateHeaders.Add("token2"); CompareHashCodes(cacheControl1, cacheControl3, false); cacheControl4.Extensions.Add(new NameValueHeaderValue("custom")); CompareHashCodes(cacheControl1, cacheControl4, false); cacheControl5.Extensions.Add(new NameValueHeaderValue("customN", "customV")); cacheControl5.Extensions.Add(new NameValueHeaderValue("custom")); CompareHashCodes(cacheControl4, cacheControl5, false); cacheControl4.Extensions.Add(new NameValueHeaderValue("customN", "customV")); CompareHashCodes(cacheControl4, cacheControl5, true); } [Fact] public void Equals_CompareValuesWithBoolFieldsSet_MatchExpectation() { // Verify that different bool fields return different hash values. CacheControlHeaderValue[] values = new CacheControlHeaderValue[9]; for (int i = 0; i < values.Length; i++) { values[i] = new CacheControlHeaderValue(); } values[0].ProxyRevalidate = true; values[1].NoCache = true; values[2].NoStore = true; values[3].MaxStale = true; values[4].NoTransform = true; values[5].OnlyIfCached = true; values[6].Public = true; values[7].Private = true; values[8].MustRevalidate = true; // Only one bool field set. All hash codes should differ for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { if (i != j) { CompareValues(values[i], values[j], false); } } } // Validate that two instances with the same bool fields set are equal. values[0].NoCache = true; CompareValues(values[0], values[1], false); values[1].ProxyRevalidate = true; CompareValues(values[0], values[1], true); } [Fact] public void Equals_CompareValuesWithTimeSpanFieldsSet_MatchExpectation() { // Verify that different timespan fields return different hash values. CacheControlHeaderValue[] values = new CacheControlHeaderValue[4]; for (int i = 0; i < values.Length; i++) { values[i] = new CacheControlHeaderValue(); } values[0].MaxAge = new TimeSpan(0, 1, 1); values[1].MaxStaleLimit = new TimeSpan(0, 1, 1); values[2].MinFresh = new TimeSpan(0, 1, 1); values[3].SharedMaxAge = new TimeSpan(0, 1, 1); // Only one timespan field set. All hash codes should differ for (int i = 0; i < values.Length; i++) { for (int j = 0; j < values.Length; j++) { if (i != j) { CompareValues(values[i], values[j], false); } } } values[0].MaxStaleLimit = new TimeSpan(0, 1, 2); CompareValues(values[0], values[1], false); values[1].MaxAge = new TimeSpan(0, 1, 1); values[1].MaxStaleLimit = new TimeSpan(0, 1, 2); CompareValues(values[0], values[1], true); CacheControlHeaderValue value1 = new CacheControlHeaderValue(); value1.MaxStale = true; CacheControlHeaderValue value2 = new CacheControlHeaderValue(); value2.MaxStale = true; CompareValues(value1, value2, true); value2.MaxStaleLimit = new TimeSpan(1, 2, 3); CompareValues(value1, value2, false); } [Fact] public void Equals_CompareCollectionFieldsSet_MatchExpectation() { CacheControlHeaderValue cacheControl1 = new CacheControlHeaderValue(); CacheControlHeaderValue cacheControl2 = new CacheControlHeaderValue(); CacheControlHeaderValue cacheControl3 = new CacheControlHeaderValue(); CacheControlHeaderValue cacheControl4 = new CacheControlHeaderValue(); CacheControlHeaderValue cacheControl5 = new CacheControlHeaderValue(); CacheControlHeaderValue cacheControl6 = new CacheControlHeaderValue(); cacheControl1.NoCache = true; cacheControl1.NoCacheHeaders.Add("token2"); Assert.False(cacheControl1.Equals(null), "Compare with 'null'"); cacheControl2.NoCache = true; cacheControl2.NoCacheHeaders.Add("token1"); cacheControl2.NoCacheHeaders.Add("token2"); CompareValues(cacheControl1, cacheControl2, false); cacheControl1.NoCacheHeaders.Add("token1"); CompareValues(cacheControl1, cacheControl2, true); // Since NoCache and Private generate different hash codes, even if NoCacheHeaders and PrivateHeaders // have the same values, the hash code will be different. cacheControl3.Private = true; cacheControl3.PrivateHeaders.Add("token2"); CompareValues(cacheControl1, cacheControl3, false); cacheControl4.Private = true; cacheControl4.PrivateHeaders.Add("token3"); CompareValues(cacheControl3, cacheControl4, false); cacheControl5.Extensions.Add(new NameValueHeaderValue("custom")); CompareValues(cacheControl1, cacheControl5, false); cacheControl6.Extensions.Add(new NameValueHeaderValue("customN", "customV")); cacheControl6.Extensions.Add(new NameValueHeaderValue("custom")); CompareValues(cacheControl5, cacheControl6, false); cacheControl5.Extensions.Add(new NameValueHeaderValue("customN", "customV")); CompareValues(cacheControl5, cacheControl6, true); } [Fact] public void Clone_Call_CloneFieldsMatchSourceFields() { CacheControlHeaderValue source = new CacheControlHeaderValue(); source.Extensions.Add(new NameValueHeaderValue("custom")); source.Extensions.Add(new NameValueHeaderValue("customN", "customV")); source.MaxAge = new TimeSpan(1, 1, 1); source.MaxStale = true; source.MaxStaleLimit = new TimeSpan(1, 1, 2); source.MinFresh = new TimeSpan(1, 1, 3); source.MustRevalidate = true; source.NoCache = true; source.NoCacheHeaders.Add("token1"); source.NoStore = true; source.NoTransform = true; source.OnlyIfCached = true; source.Private = true; source.PrivateHeaders.Add("token2"); source.ProxyRevalidate = true; source.Public = true; source.SharedMaxAge = new TimeSpan(1, 1, 4); CacheControlHeaderValue clone = (CacheControlHeaderValue)((ICloneable)source).Clone(); Assert.Equal(source, clone); } [Fact] public void GetCacheControlLength_DifferentValidScenariosAndNoExistingCacheControl_AllReturnNonZero() { CacheControlHeaderValue expected = new CacheControlHeaderValue(); expected.NoCache = true; CheckGetCacheControlLength("X , , no-cache ,,", 1, null, 16, expected); expected = new CacheControlHeaderValue(); expected.NoCache = true; expected.NoCacheHeaders.Add("token1"); expected.NoCacheHeaders.Add("token2"); CheckGetCacheControlLength("no-cache=\"token1, token2\"", 0, null, 25, expected); expected = new CacheControlHeaderValue(); expected.NoStore = true; expected.MaxAge = new TimeSpan(0, 0, 125); expected.MaxStale = true; CheckGetCacheControlLength("X no-store , max-age = 125, max-stale,", 1, null, 37, expected); expected = new CacheControlHeaderValue(); expected.MinFresh = new TimeSpan(0, 0, 123); expected.NoTransform = true; expected.OnlyIfCached = true; expected.Extensions.Add(new NameValueHeaderValue("custom")); CheckGetCacheControlLength("min-fresh=123, no-transform, only-if-cached, custom", 0, null, 51, expected); expected = new CacheControlHeaderValue(); expected.Public = true; expected.Private = true; expected.PrivateHeaders.Add("token1"); expected.MustRevalidate = true; expected.ProxyRevalidate = true; expected.Extensions.Add(new NameValueHeaderValue("c", "d")); expected.Extensions.Add(new NameValueHeaderValue("a", "b")); CheckGetCacheControlLength(",public, , private=\"token1\", must-revalidate, c=d, proxy-revalidate, a=b", 0, null, 72, expected); expected = new CacheControlHeaderValue(); expected.Private = true; expected.SharedMaxAge = new TimeSpan(0, 0, 1234567890); expected.MaxAge = new TimeSpan(0, 0, 987654321); CheckGetCacheControlLength("s-maxage=1234567890, private, max-age = 987654321,", 0, null, 50, expected); } [Fact] public void GetCacheControlLength_DifferentValidScenariosAndExistingCacheControl_AllReturnNonZero() { CacheControlHeaderValue storeValue = new CacheControlHeaderValue(); storeValue.NoStore = true; CacheControlHeaderValue expected = new CacheControlHeaderValue(); expected.NoCache = true; expected.NoStore = true; CheckGetCacheControlLength("X no-cache", 1, storeValue, 9, expected); storeValue = new CacheControlHeaderValue(); storeValue.Private = true; storeValue.PrivateHeaders.Add("token1"); storeValue.NoCache = true; expected.NoCacheHeaders.Add("token1"); expected.NoCacheHeaders.Clear(); // just make sure we have an assigned (empty) collection. expected = new CacheControlHeaderValue(); expected.Private = true; expected.PrivateHeaders.Add("token1"); expected.PrivateHeaders.Add("token2"); expected.NoCache = true; expected.NoCacheHeaders.Add("token1"); expected.NoCacheHeaders.Add("token2"); CheckGetCacheControlLength("private=\"token2\", no-cache=\"token1, , token2,\"", 0, storeValue, 46, expected); storeValue = new CacheControlHeaderValue(); storeValue.Extensions.Add(new NameValueHeaderValue("x", "y")); storeValue.NoTransform = true; storeValue.OnlyIfCached = true; expected = new CacheControlHeaderValue(); expected.Public = true; expected.Private = true; expected.PrivateHeaders.Add("token1"); expected.MustRevalidate = true; expected.ProxyRevalidate = true; expected.NoTransform = true; expected.OnlyIfCached = true; expected.Extensions.Add(new NameValueHeaderValue("a", "\"b\"")); expected.Extensions.Add(new NameValueHeaderValue("c", "d")); expected.Extensions.Add(new NameValueHeaderValue("x", "y")); // from store result CheckGetCacheControlLength(",public, , private=\"token1\", must-revalidate, c=d, proxy-revalidate, a=\"b\"", 0, storeValue, 74, expected); storeValue = new CacheControlHeaderValue(); storeValue.MaxStale = true; storeValue.MinFresh = new TimeSpan(1, 2, 3); expected = new CacheControlHeaderValue(); expected.MaxStale = true; expected.MaxStaleLimit = new TimeSpan(0, 0, 5); expected.MinFresh = new TimeSpan(0, 0, 10); // note that the last header value overwrites existing ones CheckGetCacheControlLength(" ,,max-stale=5,,min-fresh = 10,,", 0, storeValue, 33, expected); storeValue = new CacheControlHeaderValue(); storeValue.SharedMaxAge = new TimeSpan(1, 2, 3); storeValue.NoTransform = true; expected = new CacheControlHeaderValue(); expected.SharedMaxAge = new TimeSpan(1, 2, 3); expected.NoTransform = true; } [Fact] public void GetCacheControlLength_DifferentInvalidScenarios_AllReturnZero() { // Token-only values CheckInvalidCacheControlLength("no-store=15", 0); CheckInvalidCacheControlLength("no-store=", 0); CheckInvalidCacheControlLength("no-transform=a", 0); CheckInvalidCacheControlLength("no-transform=", 0); CheckInvalidCacheControlLength("only-if-cached=\"x\"", 0); CheckInvalidCacheControlLength("only-if-cached=", 0); CheckInvalidCacheControlLength("public=\"x\"", 0); CheckInvalidCacheControlLength("public=", 0); CheckInvalidCacheControlLength("must-revalidate=\"1\"", 0); CheckInvalidCacheControlLength("must-revalidate=", 0); CheckInvalidCacheControlLength("proxy-revalidate=x", 0); CheckInvalidCacheControlLength("proxy-revalidate=", 0); // Token with optional field-name list CheckInvalidCacheControlLength("no-cache=", 0); CheckInvalidCacheControlLength("no-cache=token", 0); CheckInvalidCacheControlLength("no-cache=\"token", 0); CheckInvalidCacheControlLength("no-cache=\"\"", 0); // at least one token expected as value CheckInvalidCacheControlLength("private=", 0); CheckInvalidCacheControlLength("private=token", 0); CheckInvalidCacheControlLength("private=\"token", 0); CheckInvalidCacheControlLength("private=\",\"", 0); // at least one token expected as value CheckInvalidCacheControlLength("private=\"=\"", 0); // Token with delta-seconds value CheckInvalidCacheControlLength("max-age", 0); CheckInvalidCacheControlLength("max-age=", 0); CheckInvalidCacheControlLength("max-age=a", 0); CheckInvalidCacheControlLength("max-age=\"1\"", 0); CheckInvalidCacheControlLength("max-age=1.5", 0); CheckInvalidCacheControlLength("max-stale=", 0); CheckInvalidCacheControlLength("max-stale=a", 0); CheckInvalidCacheControlLength("max-stale=\"1\"", 0); CheckInvalidCacheControlLength("max-stale=1.5", 0); CheckInvalidCacheControlLength("min-fresh", 0); CheckInvalidCacheControlLength("min-fresh=", 0); CheckInvalidCacheControlLength("min-fresh=a", 0); CheckInvalidCacheControlLength("min-fresh=\"1\"", 0); CheckInvalidCacheControlLength("min-fresh=1.5", 0); CheckInvalidCacheControlLength("s-maxage", 0); CheckInvalidCacheControlLength("s-maxage=", 0); CheckInvalidCacheControlLength("s-maxage=a", 0); CheckInvalidCacheControlLength("s-maxage=\"1\"", 0); CheckInvalidCacheControlLength("s-maxage=1.5", 0); // Invalid Extension values CheckInvalidCacheControlLength("custom=", 0); CheckInvalidCacheControlLength("custom value", 0); CheckInvalidCacheControlLength(null, 0); CheckInvalidCacheControlLength("", 0); CheckInvalidCacheControlLength("", 1); } [Fact] public void Parse_SetOfValidValueStrings_ParsedCorrectly() { // Just verify parser is implemented correctly. Don't try to test syntax parsed by CacheControlHeaderValue. CacheControlHeaderValue expected = new CacheControlHeaderValue(); expected.NoStore = true; expected.MinFresh = new TimeSpan(0, 2, 3); CheckValidParse(" , no-store, min-fresh=123", expected); expected = new CacheControlHeaderValue(); expected.MaxStale = true; expected.NoCache = true; expected.NoCacheHeaders.Add("t"); CheckValidParse("max-stale, no-cache=\"t\", ,,", expected); } [Fact] public void Parse_SetOfInvalidValueStrings_Throws() { CheckInvalidParse("no-cache,=", 0); CheckInvalidParse("max-age=123x", 0); CheckInvalidParse("=no-cache", 0); CheckInvalidParse("no-cache no-store", 0); CheckInvalidParse("invalid =", 0); CheckInvalidParse("\u4F1A", 0); } [Fact] public void TryParse_SetOfValidValueStrings_ParsedCorrectly() { // Just verify parser is implemented correctly. Don't try to test syntax parsed by CacheControlHeaderValue. CacheControlHeaderValue expected = new CacheControlHeaderValue(); expected.NoStore = true; expected.MinFresh = new TimeSpan(0, 2, 3); CheckValidTryParse(" , no-store, min-fresh=123", expected); expected = new CacheControlHeaderValue(); expected.MaxStale = true; expected.NoCache = true; expected.NoCacheHeaders.Add("t"); CheckValidTryParse("max-stale, no-cache=\"t\", ,,", expected); } [Fact] public void TryParse_SetOfInvalidValueStrings_ReturnsFalse() { CheckInvalidTryParse("no-cache,=", 0); CheckInvalidTryParse("max-age=123x", 0); CheckInvalidTryParse("=no-cache", 0); CheckInvalidTryParse("no-cache no-store", 0); CheckInvalidTryParse("invalid =", 0); CheckInvalidTryParse("\u4F1A", 0); } #region Helper methods private void CompareHashCodes(CacheControlHeaderValue x, CacheControlHeaderValue y, bool areEqual) { if (areEqual) { Assert.Equal(x.GetHashCode(), y.GetHashCode()); } else { Assert.NotEqual(x.GetHashCode(), y.GetHashCode()); } } private void CompareValues(CacheControlHeaderValue x, CacheControlHeaderValue y, bool areEqual) { Assert.Equal(areEqual, x.Equals(y)); Assert.Equal(areEqual, y.Equals(x)); } private static void CheckGetCacheControlLength(string input, int startIndex, CacheControlHeaderValue storeValue, int expectedLength, CacheControlHeaderValue expectedResult) { CacheControlHeaderValue result = null; Assert.Equal(expectedLength, CacheControlHeaderValue.GetCacheControlLength(input, startIndex, storeValue, out result)); if (storeValue == null) { Assert.Equal(expectedResult, result); } else { // If we provide a 'storeValue', then that instance will be updated and result will be 'null' Assert.Null(result); Assert.Equal(expectedResult, storeValue); } } private static void CheckInvalidCacheControlLength(string input, int startIndex) { CacheControlHeaderValue result = null; Assert.Equal(0, CacheControlHeaderValue.GetCacheControlLength(input, startIndex, null, out result)); Assert.Null(result); } private void CheckValidParse(string input, CacheControlHeaderValue expectedResult) { CacheControlHeaderValue result = CacheControlHeaderValue.Parse(input); Assert.Equal(expectedResult, result); } private void CheckInvalidParse(string input, int startIndex) { Assert.Throws<FormatException>(() => { CacheControlHeaderValue.Parse(input); }); } private void CheckValidTryParse(string input, CacheControlHeaderValue expectedResult) { CacheControlHeaderValue result = null; Assert.True(CacheControlHeaderValue.TryParse(input, out result)); Assert.Equal(expectedResult, result); } private void CheckInvalidTryParse(string input, int startIndex) { CacheControlHeaderValue result = null; Assert.False(CacheControlHeaderValue.TryParse(input, out result)); Assert.Null(result); } #endregion } }
using System; using System.Collections; using System.Collections.Specialized; using System.IO; using System.Text; using System.Web; namespace Cvv.WebUtility.Mvc { public class OnlineHttpResponse : IHttpResponse { private readonly HttpResponse _response; public OnlineHttpResponse(HttpResponse response) { _response = response; } public virtual HttpCookieCollection Cookies { get { return _response.Cookies; } } public virtual string Output { get { throw new NotImplementedException(); } } public virtual int StatusCode { get { return _response.StatusCode; } set { _response.StatusCode = value; } } public virtual string ContentType { get { return _response.ContentType; } set { _response.ContentType = value; } } public virtual Stream OutputStream { get { return _response.OutputStream; } } public virtual bool Buffer { get { return _response.Buffer; } set { _response.Buffer = value; } } public virtual bool BufferOutput { get { return _response.BufferOutput; } set { _response.BufferOutput = value; } } public virtual string CacheControl { get { return _response.CacheControl; } set { _response.CacheControl = value; } } public virtual string Charset { get { return _response.Charset; } set { _response.Charset = value; } } public virtual Encoding ContentEncoding { get { return _response.ContentEncoding; } set { _response.ContentEncoding = value; } } public virtual int Expires { get { return _response.Expires; } set { _response.Expires = value; } } public virtual DateTime ExpiresAbsolute { get { return _response.ExpiresAbsolute; } set { _response.ExpiresAbsolute = value; } } public virtual Stream Filter { get { return _response.Filter; } set { _response.Filter = value; } } public virtual Encoding HeaderEncoding { get { return _response.HeaderEncoding; } set { _response.HeaderEncoding = value; } } public virtual NameValueCollection Headers { get { return _response.Headers; } } public virtual bool IsClientConnected { get { return _response.IsClientConnected; } } public virtual bool IsRequestBeingRedirected { get { return _response.IsRequestBeingRedirected; } } public virtual string RedirectLocation { get { return _response.RedirectLocation; } set { _response.RedirectLocation = value; } } public virtual string Status { get { return _response.Status; } set { _response.Status = value; } } public virtual string StatusDescription { get { return _response.StatusDescription; } set { _response.StatusDescription = value; } } public virtual int SubStatusCode { get { return _response.SubStatusCode; } set { _response.SubStatusCode = value; } } public virtual bool SuppressContent { get { return _response.SuppressContent; } set { _response.SuppressContent = value; } } public virtual bool TrySkipIisCustomErrors { get { return _response.TrySkipIisCustomErrors; } set { _response.TrySkipIisCustomErrors = value; } } public HttpCachePolicy Cache { get { return _response.Cache; } } public virtual void AddCacheItemDependencies(ArrayList cacheKeys) { _response.AddCacheItemDependencies(cacheKeys); } public virtual void AddCacheItemDependencies(string[] cacheKeys) { _response.AddCacheItemDependencies(cacheKeys); } public virtual void AddCacheItemDependency(string cacheKey) { _response.AddCacheItemDependency(cacheKey); } public virtual void AddFileDependencies(ArrayList filenames) { _response.AddFileDependencies(filenames); } public virtual void AddFileDependencies(string[] filenames) { _response.AddFileDependencies(filenames); } public virtual void AddFileDependency(string filename) { _response.AddFileDependency(filename); } public virtual void AddHeader(string name, string value) { _response.AddHeader(name, value); } public virtual void AppendCookie(HttpCookie cookie) { _response.AppendCookie(cookie); } public virtual void AppendToLog(string param) { _response.AppendToLog(param); } public virtual string ApplyAppPathModifier(string virtualPath) { return _response.ApplyAppPathModifier(virtualPath); } public virtual void Clear() { _response.Clear(); } public virtual void ClearContent() { _response.ClearContent(); } public virtual void Close() { _response.Close(); } public virtual void DisableKernelCache() { _response.DisableKernelCache(); } public virtual void Flush() { _response.Flush(); } public virtual void Pics(string value) { _response.Pics(value); } public virtual void Redirect(string url, bool endResponse) { _response.Redirect(url, endResponse); } public virtual void SetCookie(HttpCookie cookie) { _response.SetCookie(cookie); } public virtual void TransmitFile(string filename) { _response.TransmitFile(filename); } public virtual void TransmitFile(string filename, long offset, long length) { _response.TransmitFile(filename, offset, length); } public virtual void Write(string s) { _response.Write(s); } public virtual void Write(char ch) { _response.Write(ch); } public virtual void Write(object obj) { _response.Write(obj); } public virtual void Write(char[] buffer, int index, int count) { _response.Write(buffer, index, count); } public virtual void WriteFile(string filename) { _response.WriteFile(filename); } public virtual void WriteFile(string filename, bool readIntoMemory) { _response.WriteFile(filename, readIntoMemory); } public virtual void WriteFile(IntPtr fileHandle, long offset, long size) { _response.WriteFile(fileHandle, offset, size); } public virtual void WriteFile(string filename, long offset, long size) { _response.WriteFile(filename, offset, size); } public virtual void End() { _response.End(); } public virtual void Redirect(string url) { _response.Redirect(url); } public virtual void BinaryWrite(byte[] bytes) { _response.BinaryWrite(bytes); } public virtual void AppendHeader(string header, string value) { _response.AppendHeader(header, value); } public virtual void DisableCaching() { _response.Cache.SetCacheability(HttpCacheability.NoCache); } public virtual void SetETag(string eTag) { _response.Cache.SetCacheability(HttpCacheability.Public); _response.Cache.SetETag('"' + eTag + '"'); } public virtual void SetLastModified(DateTime timeStamp) { _response.Cache.SetLastModified(timeStamp); } public virtual void ClearHeaders() { _response.ClearHeaders(); } } }
/* * * (c) Copyright Ascensio System Limited 2010-2021 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ using System; using System.Collections.Generic; using System.Diagnostics; using System.Drawing; using System.Drawing.Drawing2D; using System.IO; using System.Security.Cryptography; using System.Text; using System.Threading; using System.Web; using ASC.Data.Storage; using ASC.Web.Studio.Utility; using ASC.Web.UserControls.Bookmarking.Common; using ASC.Web.UserControls.Bookmarking.Common.Presentation; using ASC.Web.UserControls.Bookmarking.Common.Util; namespace ASC.Web.UserControls.Bookmarking.Util { public class WebSiteThumbnailHelper : IThumbnailHelper { private readonly List<Uri> ProcessedUrls = new List<Uri>(); private IDataStore getStore(HttpContext context) { return StorageFactory.GetStorage(TenantProvider.CurrentTenantID.ToString(), BookmarkingRequestConstants.BookmarkingStorageManagerID); } private IDataStore getStore(HttpContext context, int tenant) { return StorageFactory.GetStorage(tenant.ToString(), BookmarkingRequestConstants.BookmarkingStorageManagerID); } public void MakeThumbnail(string url) { MakeThumbnail(url, true, true, HttpContext.Current, TenantProvider.CurrentTenantID); } public void MakeThumbnail(string url, bool async, bool notOverride, HttpContext context, int tenantID) { try { if (string.IsNullOrEmpty(url)) return; if (notOverride) { var fileName = GetFileNameByUrl(HttpUtility.UrlEncode(url), BookmarkingSettings.ThumbSmallSize); if (getStore(context, tenantID).IsFile(string.Empty, fileName)) { return; } } List<object> p = new List<object>(); p.Add(url); p.Add(context); p.Add(tenantID); ThreadPool.QueueUserWorkItem(MakeThumbnailCallback, p); //if (!async) thread.Join(); } catch { } } private void MakeThumbnailCallback(object p) { #region Sanity Check var url = string.Empty; var context = HttpContext.Current; int tenant = 0; try { if (p is List<Object>) { var s = p as List<object>; if (s.Count == 3) { if (s[0] is string) { url = s[0] as string; } if (s[1] is HttpContext) { context = s[1] as HttpContext; } if (s[2] is int) { tenant = (int)s[2]; } } } } catch { } #endregion var outFileName = string.Empty; Process ps = null; int psid = -1; Uri uri = null; try { //Check true url if (!string.IsNullOrEmpty(url) && context != null && Uri.TryCreate(url, UriKind.Absolute, out uri)) { lock (ProcessedUrls) { if (ProcessedUrls.Contains(uri)) return;//Screen ih bin processing go away! ProcessedUrls.Add(uri); } //We got normal url //Map server path var appDataDir = context.Server.MapPath(VirtualPathUtility.ToAbsolute("~/Products/Community/Modules/Bookmarking/App_Data")); var screenShoterName = Path.Combine(appDataDir, "IECapt.exe"); if (File.Exists(screenShoterName)) { outFileName = Path.Combine(appDataDir, Path.Combine("screens", Guid.NewGuid() + ".png")); if (!Directory.Exists(Path.GetDirectoryName(outFileName))) { Directory.CreateDirectory(Path.GetDirectoryName(outFileName)); } var userAgent = context.Request.UserAgent; var arguments = BuildArguments(uri, outFileName, userAgent); //Launch process using (ps = new Process()) { ps.StartInfo = new ProcessStartInfo(screenShoterName, arguments); ps.StartInfo.CreateNoWindow = true; ps.Start(); psid = ps.Id; if (ps.WaitForExit(15000))//Wait 15 sec and close { //Ta da. File created if (File.Exists(outFileName)) { //Upload! //Warning! Huge memory overhead! using (Image image = Image.FromFile(outFileName)) { using ( Image clipImage = new Bitmap(BookmarkingSettings.ThumbSmallSize.Width, BookmarkingSettings.ThumbSmallSize.Height)) { using (var graphics = Graphics.FromImage(clipImage)) { graphics.CompositingQuality = CompositingQuality.HighQuality; graphics.InterpolationMode = InterpolationMode.HighQualityBicubic; //Bicubic is better for minimizing image. graphics.SmoothingMode = SmoothingMode.HighQuality; graphics.DrawImage(image, Rectangle.FromLTRB(0, 0, BookmarkingSettings.ThumbSmallSize . Width, BookmarkingSettings.ThumbSmallSize . Height), Rectangle.FromLTRB(0, 0, BookmarkingSettings.BrowserSize. Width, BookmarkingSettings.BrowserSize. Height), GraphicsUnit.Pixel ); using (var ms = new MemoryStream()) { clipImage.Save(ms, BookmarkingSettings.CaptureImageFormat); ms.Position = 0; IDataStore store = getStore(context, tenant); var fileName = GetFileNameByUrl(HttpUtility.UrlEncode(url), BookmarkingSettings.ThumbSmallSize); store.Save(string.Empty, fileName, ms); } } } } } } else { //Process hasn't exited //finally will kill it } } } } } catch (Exception) { } finally { if (ps != null) { try { ps.Kill(); } catch { //Don't throw } } //try kill if (psid != -1) { try { var proc = Process.GetProcessById(psid); if (proc != null) { proc.Kill(); } } catch { //Don't throw } } if (!string.IsNullOrEmpty(outFileName) && File.Exists(outFileName)) { File.Delete(outFileName); } lock (ProcessedUrls) { if (uri != null && ProcessedUrls.Contains(uri)) { ProcessedUrls.Remove(uri); } } } } private static string BuildArguments(Uri uri, string outFileName, string userAgent) { return string.Format("--url=\"{0}\" --out=\"{1}\" --delay={2} --max-wait={4} --min-width={3} --silent ", uri, outFileName, 1000, BookmarkingSettings.BrowserSize.Width, 10000); } public string GetThumbnailUrl(string Url, BookmarkingThumbnailSize size) { var fileName = GetFileNameByUrl(Url, size); return getStore(HttpContext.Current).IsFile(string.Empty, fileName) ? getStore(HttpContext.Current).GetUri(string.Empty, fileName).ToString() : string.Empty; } public string GetThumbnailUrlForUpdate(string Url, BookmarkingThumbnailSize size) { return GetThumbnailUrl(Url, size); } #region DeleteThumbnail public void DeleteThumbnail(string Url) { try { var fileName = GetFileNameByUrl(Url, BookmarkingSettings.ThumbSmallSize); getStore(HttpContext.Current, TenantProvider.CurrentTenantID).Delete(fileName); } catch (FileNotFoundException) { } } #endregion private static string GetSHA256(string text) { var ue = new UnicodeEncoding(); var message = ue.GetBytes(text); var hashString = new SHA256Managed(); var hashValue = hashString.ComputeHash(message); var hex = new StringBuilder(); foreach (byte x in hashValue) hex.AppendFormat("{0:x2}", x); return hex.ToString(); } private static string GetFileNameByUrl(string url, BookmarkingThumbnailSize size) { string sizeString = size == null ? string.Empty : size.ToString(); return string.Format("{0}{1}.{2}", GetSHA256(url), sizeString, BookmarkingSettings.CaptureImageFormat.ToString()); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Immutable; using System.Linq; using Microsoft.CodeAnalysis.CodeGen; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Test.Utilities; using Microsoft.CodeAnalysis.CSharp.UnitTests; using Microsoft.CodeAnalysis.ExpressionEvaluator; using Microsoft.CodeAnalysis.ExpressionEvaluator.UnitTests; using Microsoft.CodeAnalysis.Test.Utilities; using Roslyn.Test.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.CSharp.ExpressionEvaluator.UnitTests { public class HoistedThisTests : ExpressionCompilerTestBase { [WorkItem(1067379, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1067379")] [Fact] public void InstanceIterator_NoCapturing() { var source = @" class C { System.Collections.IEnumerable F() { yield break; } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<F>d__0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<F>d__0.MoveNext", "C", expectedIL); } [WorkItem(1067379, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1067379")] [Fact] public void InstanceAsync_NoCapturing() { var source = @" using System; using System.Threading.Tasks; class C { async Task F() { await Console.Out.WriteLineAsync('a'); } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0, System.Runtime.CompilerServices.TaskAwaiter V_1, C.<F>d__0 V_2, System.Exception V_3) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<F>d__0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<F>d__0.MoveNext", "C", expectedIL); } [WorkItem(1067379, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1067379")] [Fact] public void InstanceLambda_NoCapturing() { var source = @" class C { void M() { System.Action a = () => 1.Equals(2); a(); } } "; // This test documents the fact that, as in dev12, "this" // is unavailable while stepping through the lambda. It // would be preferable if it were. VerifyNoThis(source, "C.<>c.<M>b__0_0"); } [Fact] public void InstanceLambda_NoCapturingExceptThis() { var source = @" class C { void M() { System.Action a = () => this.ToString(); a(); } } "; var expectedIL = @" { // Code size 2 (0x2) .maxstack 1 IL_0000: ldarg.0 IL_0001: ret }"; VerifyHasThis(source, "C.<M>b__0_0", "C", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceIterator_CapturedThis() { var source = @" class C { System.Collections.IEnumerable F() { yield return this; } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<F>d__0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<F>d__0.MoveNext", "C", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceAsync_CapturedThis() { var source = @" using System; using System.Threading.Tasks; class C { async Task F() { await Console.Out.WriteLineAsync(this.ToString()); } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0, System.Runtime.CompilerServices.TaskAwaiter V_1, C.<F>d__0 V_2, System.Exception V_3) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<F>d__0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<F>d__0.MoveNext", "C", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceLambda_CapturedThis_DisplayClass() { var source = @" class C { int x; void M(int y) { System.Action a = () => x.Equals(y); a(); } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 IL_0000: ldarg.0 IL_0001: ldfld ""C C.<>c__DisplayClass1_0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<>c__DisplayClass1_0.<M>b__0", "C", expectedIL, thisCanBeElided: false); } [WorkItem(1067379, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1067379")] [Fact] public void InstanceLambda_CapturedThis_NoDisplayClass() { var source = @" class C { int x; void M(int y) { System.Action a = () => x.Equals(1); a(); } } "; var expectedIL = @" { // Code size 2 (0x2) .maxstack 1 IL_0000: ldarg.0 IL_0001: ret }"; VerifyHasThis(source, "C.<M>b__1_0", "C", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceIterator_Generic() { var source = @" class C<T> { System.Collections.IEnumerable F<U>() { yield return this; } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0) IL_0000: ldarg.0 IL_0001: ldfld ""C<T> C<T>.<F>d__0<U>.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<F>d__0.MoveNext", "C<T>", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceAsync_Generic() { var source = @" using System; using System.Threading.Tasks; class C<T> { async Task F<U>() { await Console.Out.WriteLineAsync(this.ToString()); } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0, System.Runtime.CompilerServices.TaskAwaiter V_1, C<T>.<F>d__0<U> V_2, System.Exception V_3) IL_0000: ldarg.0 IL_0001: ldfld ""C<T> C<T>.<F>d__0<U>.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<F>d__0.MoveNext", "C<T>", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceLambda_Generic() { var source = @" class C<T> { int x; void M<U>(int y) { System.Action a = () => x.Equals(y); a(); } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 IL_0000: ldarg.0 IL_0001: ldfld ""C<T> C<T>.<>c__DisplayClass1_0<U>.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<>c__DisplayClass1_0.<M>b__0", "C<T>", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceIterator_ExplicitInterfaceImplementation() { var source = @" interface I { System.Collections.IEnumerable F(); } class C : I { System.Collections.IEnumerable I.F() { yield return this; } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<I-F>d__0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<I-F>d__0.MoveNext", "C", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceAsync_ExplicitInterfaceImplementation() { var source = @" using System; using System.Threading.Tasks; interface I { Task F(); } class C : I { async Task I.F() { await Console.Out.WriteLineAsync(this.ToString()); } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0, System.Runtime.CompilerServices.TaskAwaiter V_1, C.<I-F>d__0 V_2, System.Exception V_3) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<I-F>d__0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<I-F>d__0.MoveNext", "C", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceLambda_ExplicitInterfaceImplementation() { var source = @" interface I { void M(int y); } class C : I { int x; void I.M(int y) { System.Action a = () => x.Equals(y); a(); } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 IL_0000: ldarg.0 IL_0001: ldfld ""C C.<>c__DisplayClass1_0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<>c__DisplayClass1_0.<I.M>b__0", "C", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceIterator_ExplicitGenericInterfaceImplementation() { var source = @" interface I<T> { System.Collections.IEnumerable F(); } class C : I<int> { System.Collections.IEnumerable I<int>.F() { yield return this; } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<I<System-Int32>-F>d__0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<I<System-Int32>-F>d__0.MoveNext", "C", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceAsync_ExplicitGenericInterfaceImplementation() { var source = @" using System; using System.Threading.Tasks; interface I<T> { Task F(); } class C : I<int> { async Task I<int>.F() { await Console.Out.WriteLineAsync(this.ToString()); } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 .locals init (int V_0, System.Runtime.CompilerServices.TaskAwaiter V_1, C.<I<System-Int32>-F>d__0 V_2, System.Exception V_3) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<I<System-Int32>-F>d__0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<I<System-Int32>-F>d__0.MoveNext", "C", expectedIL, thisCanBeElided: false); } [Fact] public void InstanceLambda_ExplicitGenericInterfaceImplementation() { var source = @" interface I<T> { void M(int y); } class C : I<int> { int x; void I<int>.M(int y) { System.Action a = () => x.Equals(y); a(); } } "; var expectedIL = @" { // Code size 7 (0x7) .maxstack 1 IL_0000: ldarg.0 IL_0001: ldfld ""C C.<>c__DisplayClass1_0.<>4__this"" IL_0006: ret }"; VerifyHasThis(source, "C.<>c__DisplayClass1_0.<I<System.Int32>.M>b__0", "C", expectedIL, thisCanBeElided: false); } [WorkItem(1066489, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1066489")] [Fact] public void InstanceIterator_ExplicitInterfaceImplementation_OldName() { var ilSource = @" .class interface public abstract auto ansi I`1<T> { .method public hidebysig newslot abstract virtual instance class [mscorlib]System.Collections.IEnumerable F() cil managed { } // end of method I`1::F } // end of class I`1 .class public auto ansi beforefieldinit C extends [mscorlib]System.Object implements class I`1<int32> { .class auto ansi sealed nested private beforefieldinit '<I<System.Int32>'.'F>d__0' extends [mscorlib]System.Object implements class [mscorlib]System.Collections.Generic.IEnumerable`1<object>, [mscorlib]System.Collections.IEnumerable, class [mscorlib]System.Collections.Generic.IEnumerator`1<object>, [mscorlib]System.Collections.IEnumerator, [mscorlib]System.IDisposable { .field private object '<>2__current' .field private int32 '<>1__state' .field private int32 '<>l__initialThreadId' .field public class C '<>4__this' .method private hidebysig newslot virtual final instance class [mscorlib]System.Collections.Generic.IEnumerator`1<object> 'System.Collections.Generic.IEnumerable<System.Object>.GetEnumerator'() cil managed { ldnull throw } .method private hidebysig newslot virtual final instance class [mscorlib]System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() cil managed { ldnull throw } .method private hidebysig newslot virtual final instance bool MoveNext() cil managed { ldnull throw } .method private hidebysig newslot specialname virtual final instance object 'System.Collections.Generic.IEnumerator<System.Object>.get_Current'() cil managed { ldnull throw } .method private hidebysig newslot virtual final instance void System.Collections.IEnumerator.Reset() cil managed { ldnull throw } .method private hidebysig newslot virtual final instance void System.IDisposable.Dispose() cil managed { ldnull throw } .method private hidebysig newslot specialname virtual final instance object System.Collections.IEnumerator.get_Current() cil managed { ldnull throw } .method public hidebysig specialname rtspecialname instance void .ctor(int32 '<>1__state') cil managed { ldarg.0 call instance void [mscorlib]System.Object::.ctor() ret } .property instance object 'System.Collections.Generic.IEnumerator<System.Object>.Current'() { .get instance object C/'<I<System.Int32>'.'F>d__0'::'System.Collections.Generic.IEnumerator<System.Object>.get_Current'() } .property instance object System.Collections.IEnumerator.Current() { .get instance object C/'<I<System.Int32>'.'F>d__0'::System.Collections.IEnumerator.get_Current() } } // end of class '<I<System.Int32>'.'F>d__0' .method private hidebysig newslot virtual final instance class [mscorlib]System.Collections.IEnumerable 'I<System.Int32>.F'() cil managed { ldnull throw } .method public hidebysig specialname rtspecialname instance void .ctor() cil managed { ldarg.0 call instance void [mscorlib]System.Object::.ctor() ret } } // end of class C "; var module = ExpressionCompilerTestHelpers.GetModuleInstanceForIL(ilSource); var runtime = CreateRuntimeInstance(module, new[] { MscorlibRef }); var context = CreateMethodContext(runtime, "C.<I<System.Int32>.F>d__0.MoveNext"); VerifyHasThis(context, "C", @" { // Code size 7 (0x7) .maxstack 1 IL_0000: ldarg.0 IL_0001: ldfld ""C C.<I<System.Int32>.F>d__0.<>4__this"" IL_0006: ret }"); } [Fact] public void StaticIterator() { var source = @" class C { static System.Collections.IEnumerable F() { yield break; } } "; VerifyNoThis(source, "C.<F>d__0.MoveNext"); } [Fact] public void StaticAsync() { var source = @" using System; using System.Threading.Tasks; class C<T> { static async Task F<U>() { await Console.Out.WriteLineAsync('a'); } } "; VerifyNoThis(source, "C.<F>d__0.MoveNext"); } [Fact] public void StaticLambda() { var source = @" using System; class C<T> { static void F<U>(int x) { Action a = () => x.ToString(); a(); } } "; VerifyNoThis(source, "C.<>c__DisplayClass0_0.<F>b__0"); } [Fact] public void ExtensionIterator() { var source = @" static class C { static System.Collections.IEnumerable F(this int x) { yield return x; } } "; VerifyNoThis(source, "C.<F>d__0.MoveNext"); } [Fact] public void ExtensionAsync() { var source = @" using System; using System.Threading.Tasks; static class C { static async Task F(this int x) { await Console.Out.WriteLineAsync(x.ToString()); } } "; VerifyNoThis(source, "C.<F>d__0.MoveNext"); } [Fact] public void ExtensionLambda() { var source = @" using System; static class C { static void F(this int x) { Action a = () => x.ToString(); a(); } } "; VerifyNoThis(source, "C.<>c__DisplayClass0_0.<F>b__0"); } [WorkItem(1072296, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1072296")] [Fact] public void OldStyleNonCapturingLambda() { var ilSource = @" .class public auto ansi beforefieldinit C extends [mscorlib]System.Object { .method public hidebysig instance void M() cil managed { ldnull throw } .method public hidebysig specialname rtspecialname instance void .ctor() cil managed { ldarg.0 call instance void [mscorlib]System.Object::.ctor() ret } .method private hidebysig static int32 '<M>b__0'() cil managed { ldnull throw } } // end of class C "; var module = ExpressionCompilerTestHelpers.GetModuleInstanceForIL(ilSource); var runtime = CreateRuntimeInstance(module, new[] { MscorlibRef }); var context = CreateMethodContext(runtime, "C.<M>b__0"); VerifyNoThis(context); } [WorkItem(1067379, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1067379")] [Fact] public void LambdaLocations_Instance() { var source = @" using System; class C { int _toBeCaptured; C() { int l = ((Func<int, int>)(x => ((Func<int>)(() => _toBeCaptured + x + 4))() + x))(1); } ~C() { int l = ((Func<int, int>)(x => ((Func<int>)(() => _toBeCaptured + x + 6))() + x))(1); } int P { get { return ((Func<int, int>)(x => ((Func<int>)(() => _toBeCaptured + x + 7))() + x))(1); } set { value = ((Func<int, int>)(x => ((Func<int>)(() => _toBeCaptured + x + 8))() + x))(1); } } int this[int p] { get { return ((Func<int, int>)(x => ((Func<int>)(() => _toBeCaptured + x + 9))() + x))(1); } set { value = ((Func<int, int>)(x => ((Func<int>)(() => _toBeCaptured + x + 10))() + x))(1); } } event Action E { add { int l = ((Func<int, int>)(x => ((Func<int>)(() => _toBeCaptured + x + 11))() + x))(1); } remove { int l = ((Func<int, int>)(x => ((Func<int>)(() => _toBeCaptured + x + 12))() + x))(1); } } } "; var expectedILTemplate = @" {{ // Code size 7 (0x7) .maxstack 1 IL_0000: ldarg.0 IL_0001: ldfld ""C C.{0}.<>4__this"" IL_0006: ret }}"; var comp = CreateStandardCompilation(source, options: TestOptions.DebugDll, assemblyName: ExpressionCompilerUtilities.GenerateUniqueName()); WithRuntimeInstance(comp, runtime => { var dummyComp = CreateStandardCompilation("", new[] { comp.EmitToImageReference() }, options: TestOptions.DebugDll.WithMetadataImportOptions(MetadataImportOptions.All)); var typeC = dummyComp.GlobalNamespace.GetMember<NamedTypeSymbol>("C"); var displayClassTypes = typeC.GetMembers().OfType<NamedTypeSymbol>(); Assert.True(displayClassTypes.Any()); foreach (var displayClassType in displayClassTypes) { var displayClassName = displayClassType.Name; Assert.Equal(GeneratedNameKind.LambdaDisplayClass, GeneratedNames.GetKind(displayClassName)); foreach (var displayClassMethod in displayClassType.GetMembers().OfType<MethodSymbol>().Where(m => GeneratedNames.GetKind(m.Name) == GeneratedNameKind.LambdaMethod)) { var lambdaMethodName = string.Format("C.{0}.{1}", displayClassName, displayClassMethod.Name); var context = CreateMethodContext(runtime, lambdaMethodName); var expectedIL = string.Format(expectedILTemplate, displayClassName); VerifyHasThis(context, "C", expectedIL); } } }); } [Fact] public void LambdaLocations_Static() { var source = @" using System; class C { static int f = ((Func<int, int>)(x => ((Func<int>)(() => x + 2))() + x))(1); static C() { int l = ((Func<int, int>)(x => ((Func<int>)(() => x + 4))() + x))(1); } static int P { get { return ((Func<int, int>)(x => ((Func<int>)(() => x + 7))() + x))(1); } set { value = ((Func<int, int>)(x => ((Func<int>)(() => x + 8))() + x))(1); } } static event Action E { add { int l = ((Func<int, int>)(x => ((Func<int>)(() => x + 11))() + x))(1); } remove { int l = ((Func<int, int>)(x => ((Func<int>)(() => x + 12))() + x))(1); } } } "; var comp = CreateStandardCompilation(source, options: TestOptions.DebugDll, assemblyName: ExpressionCompilerUtilities.GenerateUniqueName()); WithRuntimeInstance(comp, runtime => { var dummyComp = CreateStandardCompilation("", new[] { comp.EmitToImageReference() }, options: TestOptions.DebugDll.WithMetadataImportOptions(MetadataImportOptions.All)); var typeC = dummyComp.GlobalNamespace.GetMember<NamedTypeSymbol>("C"); var displayClassTypes = typeC.GetMembers().OfType<NamedTypeSymbol>(); Assert.True(displayClassTypes.Any()); foreach (var displayClassType in displayClassTypes) { var displayClassName = displayClassType.Name; Assert.Equal(GeneratedNameKind.LambdaDisplayClass, GeneratedNames.GetKind(displayClassName)); foreach (var displayClassMethod in displayClassType.GetMembers().OfType<MethodSymbol>().Where(m => GeneratedNames.GetKind(m.Name) == GeneratedNameKind.LambdaMethod)) { var lambdaMethodName = string.Format("C.{0}.{1}", displayClassName, displayClassMethod.Name); var context = CreateMethodContext(runtime, lambdaMethodName); VerifyNoThis(context); } } }); } private void VerifyHasThis(string source, string methodName, string expectedType, string expectedIL, bool thisCanBeElided = true) { var sourceCompilation = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll, assemblyName: ExpressionCompilerUtilities.GenerateUniqueName()); WithRuntimeInstance(sourceCompilation, runtime => { var context = CreateMethodContext(runtime, methodName); VerifyHasThis(context, expectedType, expectedIL); }); // Now recompile and test CompileExpression with optimized code. sourceCompilation = sourceCompilation.WithOptions(sourceCompilation.Options.WithOptimizationLevel(OptimizationLevel.Release)); WithRuntimeInstance(sourceCompilation, runtime => { var context = CreateMethodContext(runtime, methodName); // In C#, "this" may be optimized away. if (thisCanBeElided) { VerifyNoThis(context); } else { VerifyHasThis(context, expectedType, expectedIL: null); } // Verify that binding a trivial expression succeeds. string error; var testData = new CompilationTestData(); context.CompileExpression("42", out error, testData); Assert.Null(error); Assert.Equal(1, testData.Methods.Count); }); } private static void VerifyHasThis(EvaluationContext context, string expectedType, string expectedIL) { var locals = ArrayBuilder<LocalAndMethod>.GetInstance(); string typeName; var testData = new CompilationTestData(); var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData); Assert.NotNull(assembly); Assert.NotEqual(assembly.Count, 0); var methods = testData.GetMethodsByName(); var localAndMethod = locals.Single(l => l.LocalName == "this"); if (expectedIL != null) { VerifyMethodData(methods.Single(m => m.Key.Contains(localAndMethod.MethodName)).Value, expectedType, expectedIL); } locals.Free(); string error; testData = new CompilationTestData(); context.CompileExpression("this", out error, testData); Assert.Null(error); if (expectedIL != null) { VerifyMethodData(methods.Single(m => m.Key.Contains("<>m0")).Value, expectedType, expectedIL); } } private static void VerifyMethodData(CompilationTestData.MethodData methodData, string expectedType, string expectedIL) { methodData.VerifyIL(expectedIL); var method = (MethodSymbol)methodData.Method; VerifyTypeParameters(method); Assert.Equal(expectedType, method.ReturnType.ToTestDisplayString()); } private void VerifyNoThis(string source, string methodName) { var comp = CreateCompilationWithMscorlib45(source, new[] { SystemCoreRef }, options: TestOptions.DebugDll); WithRuntimeInstance(comp, runtime => VerifyNoThis(CreateMethodContext(runtime, methodName))); } private static void VerifyNoThis(EvaluationContext context) { string error; var testData = new CompilationTestData(); context.CompileExpression("this", out error, testData); Assert.Contains(error, new[] { "error CS0026: Keyword 'this' is not valid in a static property, static method, or static field initializer", "error CS0027: Keyword 'this' is not available in the current context", }); testData = new CompilationTestData(); context.CompileExpression("base.ToString()", out error, testData); Assert.Contains(error, new[] { "error CS1511: Keyword 'base' is not available in a static method", "error CS1512: Keyword 'base' is not available in the current context", }); var locals = ArrayBuilder<LocalAndMethod>.GetInstance(); string typeName; testData = new CompilationTestData(); var assembly = context.CompileGetLocals(locals, argumentsOnly: false, typeName: out typeName, testData: testData); Assert.NotNull(assembly); AssertEx.None(locals, l => l.LocalName.Contains("this")); locals.Free(); } [WorkItem(1024137, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1024137")] [Fact] public void InstanceMembersInIterator() { var source = @"class C { object x; System.Collections.IEnumerable F() { yield return this.x; } }"; var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll); WithRuntimeInstance(compilation0, runtime => { var context = CreateMethodContext(runtime, "C.<F>d__1.MoveNext"); string error; var testData = new CompilationTestData(); context.CompileExpression("this.x", out error, testData); testData.GetMethodData("<>x.<>m0").VerifyIL(@" { // Code size 12 (0xc) .maxstack 1 .locals init (int V_0) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<F>d__1.<>4__this"" IL_0006: ldfld ""object C.x"" IL_000b: ret }"); }); } [WorkItem(1024137, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1024137")] [Fact] public void InstanceMembersInAsync() { var source = @" using System; using System.Threading.Tasks; class C { object x; async Task F() { await Console.Out.WriteLineAsync(this.ToString()); } }"; var compilation0 = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll); WithRuntimeInstance(compilation0, runtime => { var context = CreateMethodContext(runtime, "C.<F>d__1.MoveNext"); string error; var testData = new CompilationTestData(); context.CompileExpression("this.x", out error, testData); testData.GetMethodData("<>x.<>m0").VerifyIL(@" { // Code size 12 (0xc) .maxstack 1 .locals init (int V_0, System.Runtime.CompilerServices.TaskAwaiter V_1, C.<F>d__1 V_2, System.Exception V_3) IL_0000: ldarg.0 IL_0001: ldfld ""C C.<F>d__1.<>4__this"" IL_0006: ldfld ""object C.x"" IL_000b: ret }"); }); } [WorkItem(1024137, "http://vstfdevdiv:8080/DevDiv2/DevDiv/_workitems/edit/1024137")] [Fact] public void InstanceMembersInLambda() { var source = @"class C { object x; void F() { System.Action a = () => this.x.ToString(); a(); } }"; var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll); WithRuntimeInstance(compilation0, runtime => { var context = CreateMethodContext(runtime, "C.<F>b__1_0"); string error; var testData = new CompilationTestData(); context.CompileExpression("this.x", out error, testData); testData.GetMethodData("<>x.<>m0").VerifyIL(@" { // Code size 7 (0x7) .maxstack 1 IL_0000: ldarg.0 IL_0001: ldfld ""object C.x"" IL_0006: ret }"); }); } [Fact] public void BaseMembersInIterator() { var source = @" class Base { protected int x; } class Derived : Base { new protected object x; System.Collections.IEnumerable M() { yield return base.x; } }"; var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll); WithRuntimeInstance(compilation0, runtime => { var context = CreateMethodContext(runtime, "Derived.<M>d__1.MoveNext"); string error; var testData = new CompilationTestData(); context.CompileExpression("base.x", out error, testData); testData.GetMethodData("<>x.<>m0").VerifyIL(@" { // Code size 12 (0xc) .maxstack 1 .locals init (int V_0) IL_0000: ldarg.0 IL_0001: ldfld ""Derived Derived.<M>d__1.<>4__this"" IL_0006: ldfld ""int Base.x"" IL_000b: ret }"); }); } [Fact] public void BaseMembersInAsync() { var source = @" using System; using System.Threading.Tasks; class Base { protected int x; } class Derived : Base { new protected object x; async Task M() { await Console.Out.WriteLineAsync(this.ToString()); } }"; var compilation0 = CreateCompilationWithMscorlib45(source, options: TestOptions.DebugDll); WithRuntimeInstance(compilation0, runtime => { var context = CreateMethodContext(runtime, "Derived.<M>d__1.MoveNext"); string error; var testData = new CompilationTestData(); context.CompileExpression("base.x", out error, testData); testData.GetMethodData("<>x.<>m0").VerifyIL(@" { // Code size 12 (0xc) .maxstack 1 .locals init (int V_0, System.Runtime.CompilerServices.TaskAwaiter V_1, Derived.<M>d__1 V_2, System.Exception V_3) IL_0000: ldarg.0 IL_0001: ldfld ""Derived Derived.<M>d__1.<>4__this"" IL_0006: ldfld ""int Base.x"" IL_000b: ret }"); }); } [Fact] public void BaseMembersInLambda() { var source = @" class Base { protected int x; } class Derived : Base { new protected object x; void F() { System.Action a = () => this.x.ToString(); a(); } }"; var compilation0 = CreateStandardCompilation(source, options: TestOptions.DebugDll); WithRuntimeInstance(compilation0, runtime => { var context = CreateMethodContext(runtime, "Derived.<F>b__1_0"); string error; var testData = new CompilationTestData(); context.CompileExpression("this.x", out error, testData); testData.GetMethodData("<>x.<>m0").VerifyIL(@" { // Code size 7 (0x7) .maxstack 1 IL_0000: ldarg.0 IL_0001: ldfld ""object Derived.x"" IL_0006: ret }"); }); } [Fact] public void IteratorOverloading_Parameters1() { var source = @" public class C { public System.Collections.IEnumerable M() { yield return this; } public System.Collections.IEnumerable M(int x) { return null; } }"; CheckIteratorOverloading(source, m => m.ParameterCount == 0); } [Fact] public void IteratorOverloading_Parameters2() // Same as above, but declarations reversed. { var source = @" public class C { public System.Collections.IEnumerable M(int x) { return null; } public System.Collections.IEnumerable M() { yield return this; } }"; // NB: We pick the wrong overload, but it doesn't matter because // the methods have the same characteristics. // Also, we don't require this behavior, we're just documenting it. CheckIteratorOverloading(source, m => m.ParameterCount == 1); } [Fact] public void IteratorOverloading_Staticness() { var source = @" public class C { public static System.Collections.IEnumerable M(int x) { return null; } // NB: We declare the interesting overload last so we know we're not // just picking the first one by mistake. public System.Collections.IEnumerable M() { yield return this; } }"; CheckIteratorOverloading(source, m => !m.IsStatic); } [Fact] public void IteratorOverloading_Abstractness() { var source = @" public abstract class C { public abstract System.Collections.IEnumerable M(int x); // NB: We declare the interesting overload last so we know we're not // just picking the first one by mistake. public System.Collections.IEnumerable M() { yield return this; } }"; CheckIteratorOverloading(source, m => !m.IsAbstract); } [Fact] public void IteratorOverloading_Arity1() { var source = @" public class C { public System.Collections.IEnumerable M<T>(int x) { return null; } // NB: We declare the interesting overload last so we know we're not // just picking the first one by mistake. public System.Collections.IEnumerable M() { yield return this; } }"; CheckIteratorOverloading(source, m => m.Arity == 0); } [Fact] public void IteratorOverloading_Arity2() { var source = @" public class C { public System.Collections.IEnumerable M(int x) { return null; } // NB: We declare the interesting overload last so we know we're not // just picking the first one by mistake. public System.Collections.IEnumerable M<T>() { yield return this; } }"; CheckIteratorOverloading(source, m => m.Arity == 1); } [Fact] public void IteratorOverloading_Constraints1() { var source = @" public class C { public System.Collections.IEnumerable M<T>(int x) where T : struct { return null; } // NB: We declare the interesting overload last so we know we're not // just picking the first one by mistake. public System.Collections.IEnumerable M<T>() where T : class { yield return this; } }"; CheckIteratorOverloading(source, m => m.TypeParameters.Single().HasReferenceTypeConstraint); } [Fact] public void IteratorOverloading_Constraints2() { var source = @" using System.Collections.Generic; public class C { public System.Collections.IEnumerable M<T, U>(int x) where T : class where U : IEnumerable<T> { return null; } // NB: We declare the interesting overload last so we know we're not // just picking the first one by mistake. public System.Collections.IEnumerable M<T, U>() where U : class where T : IEnumerable<U> { yield return this; } }"; // NOTE: This isn't the feature we're switching on, but it is a convenient // differentiator. CheckIteratorOverloading(source, m => m.ParameterCount == 0); } private static void CheckIteratorOverloading(string source, Func<MethodSymbol, bool> isDesiredOverload) { var comp1 = CreateStandardCompilation(source, options: TestOptions.DebugDll); var ref1 = comp1.EmitToImageReference(); var comp2 = CreateStandardCompilation("", new[] { ref1 }, options: TestOptions.DebugDll); var originalType = comp2.GlobalNamespace.GetMember<NamedTypeSymbol>("C"); var iteratorMethod = originalType.GetMembers("M").OfType<MethodSymbol>().Single(isDesiredOverload); var stateMachineType = originalType.GetMembers().OfType<NamedTypeSymbol>().Single(t => GeneratedNames.GetKind(t.Name) == GeneratedNameKind.StateMachineType); var moveNextMethod = stateMachineType.GetMember<MethodSymbol>("MoveNext"); var guessedIterator = CompilationContext.GetSubstitutedSourceMethod(moveNextMethod, sourceMethodMustBeInstance: true); Assert.Equal(iteratorMethod, guessedIterator.OriginalDefinition); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Security.Permissions; namespace System.ComponentModel.Design.Serialization { /// <summary> /// A member relationship service is used by a serializer to announce that one /// property is related to a property on another object. Consider a code /// based serialization scheme where code is of the following form: /// /// object1.Property1 = object2.Property2 /// /// Upon interpretation of this code, Property1 on object1 will be /// set to the return value of object2.Property2. But the relationship /// between these two objects is lost. Serialization schemes that /// wish to maintain this relationship may install a MemberRelationshipService /// into the serialization manager. When an object is deserialized /// this service will be notified of these relationships. It is up to the service /// to act on these notifications if it wishes. During serialization, the /// service is also consulted. If a relationship exists the same /// relationship is maintained by the serializer. /// </summary> public abstract class MemberRelationshipService { private Dictionary<RelationshipEntry, RelationshipEntry> _relationships = new Dictionary<RelationshipEntry, RelationshipEntry>(); /// <summary> /// Returns the current relationship associated with the source, or MemberRelationship.Empty if /// there is no relationship. Also sets a relationship between two objects. Empty /// can also be passed as the property value, in which case the relationship will /// be cleared. /// </summary> [SuppressMessage("Microsoft.Design", "CA1043:UseIntegralOrStringArgumentForIndexers")] public MemberRelationship this[MemberRelationship source] { get { if (source.Owner == null) throw new ArgumentNullException("Owner"); if (source.Member == null) throw new ArgumentNullException("Member"); return GetRelationship(source); } set { if (source.Owner == null) throw new ArgumentNullException("Owner"); if (source.Member == null) throw new ArgumentNullException("Member"); SetRelationship(source, value); } } /// <summary> /// Returns the current relationship associated with the source, or null if /// there is no relationship. Also sets a relationship between two objects. Null /// can be passed as the property value, in which case the relationship will /// be cleared. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design", "CA1023:IndexersShouldNotBeMultidimensional")] public MemberRelationship this[object sourceOwner, MemberDescriptor sourceMember] { get { if (sourceOwner == null) throw new ArgumentNullException(nameof(sourceOwner)); if (sourceMember == null) throw new ArgumentNullException(nameof(sourceMember)); return GetRelationship(new MemberRelationship(sourceOwner, sourceMember)); } set { if (sourceOwner == null) throw new ArgumentNullException(nameof(sourceOwner)); if (sourceMember == null) throw new ArgumentNullException(nameof(sourceMember)); SetRelationship(new MemberRelationship(sourceOwner, sourceMember), value); } } /// <summary> /// This is the implementation API for returning relationships. The default implementation stores the /// relationship in a table. Relationships are stored weakly, so they do not keep an object alive. /// </summary> protected virtual MemberRelationship GetRelationship(MemberRelationship source) { RelationshipEntry retVal; if (_relationships != null && _relationships.TryGetValue(new RelationshipEntry(source), out retVal) && retVal.Owner.IsAlive) { return new MemberRelationship(retVal.Owner.Target, retVal.Member); } return MemberRelationship.Empty; } /// <summary> /// This is the implementation API for returning relationships. The default implementation stores the /// relationship in a table. Relationships are stored weakly, so they do not keep an object alive. Empty can be /// passed in for relationship to remove the relationship. /// </summary> protected virtual void SetRelationship(MemberRelationship source, MemberRelationship relationship) { if (!relationship.IsEmpty && !SupportsRelationship(source, relationship)) { string sourceName = TypeDescriptor.GetComponentName(source.Owner); string relName = TypeDescriptor.GetComponentName(relationship.Owner); if (sourceName == null) { sourceName = source.Owner.ToString(); } if (relName == null) { relName = relationship.Owner.ToString(); } throw new ArgumentException(SR.Format(SR.MemberRelationshipService_RelationshipNotSupported, sourceName, source.Member.Name, relName, relationship.Member.Name)); } if (_relationships == null) { _relationships = new Dictionary<RelationshipEntry, RelationshipEntry>(); } _relationships[new RelationshipEntry(source)] = new RelationshipEntry(relationship); } /// <summary> /// Returns true if the provided relationship is supported. /// </summary> public abstract bool SupportsRelationship(MemberRelationship source, MemberRelationship relationship); /// <summary> /// Used as storage in our relationship table /// </summary> private struct RelationshipEntry { internal WeakReference Owner; internal MemberDescriptor Member; private int _hashCode; internal RelationshipEntry(MemberRelationship rel) { Owner = new WeakReference(rel.Owner); Member = rel.Member; _hashCode = rel.Owner == null ? 0 : rel.Owner.GetHashCode(); } public override bool Equals(object o) { if (o is RelationshipEntry) { RelationshipEntry e = (RelationshipEntry)o; return this == e; } return false; } public static bool operator ==(RelationshipEntry re1, RelationshipEntry re2) { object owner1 = (re1.Owner.IsAlive ? re1.Owner.Target : null); object owner2 = (re2.Owner.IsAlive ? re2.Owner.Target : null); return owner1 == owner2 && re1.Member.Equals(re2.Member); } public static bool operator !=(RelationshipEntry re1, RelationshipEntry re2) { return !(re1 == re2); } public override int GetHashCode() { return _hashCode; } } } /// <summary> /// This class represents a single relationship between an object and a member. /// </summary> public struct MemberRelationship { public static readonly MemberRelationship Empty = new MemberRelationship(); /// <summary> /// Creates a new member relationship. /// </summary> public MemberRelationship(object owner, MemberDescriptor member) { if (owner == null) throw new ArgumentNullException(nameof(owner)); if (member == null) throw new ArgumentNullException(nameof(member)); Owner = owner; Member = member; } /// <summary> /// Returns true if this relationship is empty. /// </summary> public bool IsEmpty => Owner == null; /// <summary> /// The member in this relationship. /// </summary> public MemberDescriptor Member { get; } /// <summary> /// The object owning the member. /// </summary> public object Owner { get; } /// <summary> /// Infrastructure support to make this a first class struct /// </summary> public override bool Equals(object obj) { if (!(obj is MemberRelationship)) return false; MemberRelationship rel = (MemberRelationship)obj; return rel.Owner == Owner && rel.Member == Member; } /// <summary> /// Infrastructure support to make this a first class struct /// </summary> public override int GetHashCode() { if (Owner == null) return base.GetHashCode(); return Owner.GetHashCode() ^ Member.GetHashCode(); } /// <summary> /// Infrastructure support to make this a first class struct /// </summary> public static bool operator ==(MemberRelationship left, MemberRelationship right) { return left.Owner == right.Owner && left.Member == right.Member; } /// <summary> /// Infrastructure support to make this a first class struct /// </summary> public static bool operator !=(MemberRelationship left, MemberRelationship right) { return !(left == right); } } }
using System; using System.Windows; using System.Windows.Controls; using System.Windows.Input; using System.Windows.Automation; using System.Windows.Automation.Peers; using System.Windows.Automation.Provider; using System.Globalization; using System.ComponentModel; namespace TAlex.WPF.Controls { /// <summary> /// Represents an up-down control that displays numeric values. /// </summary> [DefaultProperty("Value"), DefaultEvent("ValueChanged")] public class NumericUpDown : Control { #region Fields private const decimal DefaultMinimum = 0M; private const decimal DefaultMaximum = 100M; private const decimal DefaultIncrement = 1M; private const int DefaultDecimalPlaces = 0; private const NumberStyles NumberStyle = NumberStyles.AllowDecimalPoint | NumberStyles.AllowLeadingSign; /// <summary> /// Identifies the <see cref="TAlex.WPF.Controls.NumericUpDown.Value"/> dependency property. /// </summary> public static readonly DependencyProperty ValueProperty; /// <summary> /// Identifies the <see cref="TAlex.WPF.Controls.NumericUpDown.Minimum"/> dependency property. /// </summary> public static readonly DependencyProperty MinimumProperty; /// <summary> /// Identifies the <see cref="TAlex.WPF.Controls.NumericUpDown.Maximum"/> dependency property. /// </summary> public static readonly DependencyProperty MaximumProperty; /// <summary> /// Identifies the <see cref="TAlex.WPF.Controls.NumericUpDown.Increment"/> dependency property. /// </summary> public static readonly DependencyProperty IncrementProperty; /// <summary> /// Identifies the <see cref="TAlex.WPF.Controls.NumericUpDown.DecimalPlaces"/> dependency property. /// </summary> public static readonly DependencyProperty DecimalPlacesProperty; /// <summary> /// Identifies the <see cref="TAlex.WPF.Controls.NumericUpDown.InterceptArrowKeys"/> dependency property. /// </summary> public static readonly DependencyProperty InterceptArrowKeysProperty; /// <summary> /// Identifies the <see cref="TAlex.WPF.Controls.NumericUpDown.IsReadOnly"/> dependency property. /// </summary> public static readonly DependencyProperty IsReadOnlyProperty; /// <summary> /// Identifies the <see cref="TAlex.WPF.Controls.NumericUpDown.NumberFormatInfo"/> dependency property. /// </summary> public static readonly DependencyProperty NumberFormatInfoProperty; /// <summary> /// Identifies the <see cref="TAlex.WPF.Controls.NumericUpDown.ValueChanged"/> routed event. /// </summary> public static readonly RoutedEvent ValueChangedEvent; /// <summary> /// The command that increases the value assigned to the numeric up-down control. /// </summary> public static RoutedCommand IncreaseCommand; /// <summary> /// The command that decreases the value assigned to the numeric up-down control. /// </summary> public static RoutedCommand DecreaseCommand; private decimal _inputValue; private string _lastInput; private TextBox _textBox; #endregion #region Properties /// <summary> /// Gets or sets the value assigned to the numeric up-down control. /// This is a dependency property. /// </summary> public decimal Value { get { return (decimal)GetValue(ValueProperty); } set { SetValue(ValueProperty, value); } } /// <summary> /// Gets or sets the minimum allowed value for the numeric up-down control. /// This is a dependency property. /// </summary> public decimal Minimum { get { return (decimal)GetValue(MinimumProperty); } set { SetValue(MinimumProperty, value); } } /// <summary> /// Gets or sets the maximum allowed value for the numeric up-down control. /// This is a dependency property. /// </summary> public decimal Maximum { get { return (decimal)GetValue(MaximumProperty); } set { SetValue(MaximumProperty, value); } } /// <summary> /// Gets or sets the value to increment or decrement for the numeric up-down control. /// This is a dependency property. /// </summary> public decimal Increment { get { return (decimal)GetValue(IncrementProperty); } set { SetValue(IncrementProperty, value); } } /// <summary> /// Gets or sets the number of decimal places to display in the numeric up-down control. /// This is a dependency property. /// </summary> public int DecimalPlaces { get { return (int)GetValue(DecimalPlacesProperty); } set { SetValue(DecimalPlacesProperty, value); } } /// <summary> /// Gets or sets a value that indicating whether the user can use the UP ARROW and DOWN ARROW keys to select values. /// This is a dependency property. /// </summary> public bool InterceptArrowKeys { get { return (bool)GetValue(InterceptArrowKeysProperty); } set { SetValue(InterceptArrowKeysProperty, value); } } /// <summary> /// Gets or sets a value that indicating whether the text can be changed by the use of the up or down buttons only. /// This is a dependency property. /// </summary> public bool IsReadOnly { get { return (bool)GetValue(IsReadOnlyProperty); } set { SetValue(IsReadOnlyProperty, value); } } /// <summary> /// Gets or sets the NumberFormatInfo value. /// This is a dependency property. /// </summary> public NumberFormatInfo NumberFormatInfo { get { return (NumberFormatInfo)GetValue(NumberFormatInfoProperty); } set { SetValue(NumberFormatInfoProperty, value); } } /// <summary> /// Gets the current text content held by the text box. /// </summary> public string ContentText { get { if (_textBox != null) { return _textBox.Text; } return null; } } #endregion #region Events /// <summary> /// Occurs when the Value property changes. /// </summary> public event RoutedPropertyChangedEventHandler<decimal> ValueChanged { add { AddHandler(ValueChangedEvent, value); } remove { RemoveHandler(ValueChangedEvent, value); } } #endregion #region Constructors static NumericUpDown() { DefaultStyleKeyProperty.OverrideMetadata(typeof(NumericUpDown), new FrameworkPropertyMetadata(typeof(NumericUpDown))); InitializeCommands(); ValueProperty = DependencyProperty.Register("Value", typeof(decimal), typeof(NumericUpDown), new FrameworkPropertyMetadata(DefaultMinimum, OnValueChanged, CoerceValue)); MinimumProperty = DependencyProperty.Register("Minimum", typeof(decimal), typeof(NumericUpDown), new FrameworkPropertyMetadata(DefaultMinimum, OnMinimumChanged, CoerceMinimum)); MaximumProperty = DependencyProperty.Register("Maximum", typeof(decimal), typeof(NumericUpDown), new FrameworkPropertyMetadata(DefaultMaximum, OnMaximumChanged, CoerceMaximum)); IncrementProperty = DependencyProperty.Register("Increment", typeof(decimal), typeof(NumericUpDown), new FrameworkPropertyMetadata(DefaultIncrement, OnIncrementChanged, CoerceIncrement), ValidateIncrement); DecimalPlacesProperty = DependencyProperty.Register("DecimalPlaces", typeof(int), typeof(NumericUpDown), new FrameworkPropertyMetadata(DefaultDecimalPlaces, OnDecimalPlacesChanged), ValidateDecimalPlaces); InterceptArrowKeysProperty = DependencyProperty.Register("InterceptArrowKeys", typeof(bool), typeof(NumericUpDown), new FrameworkPropertyMetadata(true)); IsReadOnlyProperty = DependencyProperty.Register("IsReadOnly", typeof(bool), typeof(NumericUpDown), new FrameworkPropertyMetadata(false, OnIsReadOnlyChanged)); NumberFormatInfoProperty = DependencyProperty.Register("NumberFormatInfo", typeof(NumberFormatInfo), typeof(NumericUpDown), new FrameworkPropertyMetadata(NumberFormatInfo.CurrentInfo.Clone(), OnNumberFormatInfoChanged)); ValueChangedEvent = EventManager.RegisterRoutedEvent("ValueChanged", RoutingStrategy.Bubble, typeof(RoutedPropertyChangedEventHandler<decimal>), typeof(NumericUpDown)); // Listen to MouseLeftButtonDown event to determine if NumericUpDown should move focus to itself EventManager.RegisterClassHandler(typeof(NumericUpDown), Mouse.MouseDownEvent, new MouseButtonEventHandler(OnMouseLeftButtonDown), true); } /// <summary> /// Initializes a new instance of the <see cref="TAlex.WPF.Controls.NumericUpDown"/> class. /// </summary> public NumericUpDown() : base() { _lastInput = String.Empty; } #endregion #region Methods #region Statics private static void OnValueChanged(DependencyObject obj, DependencyPropertyChangedEventArgs args) { NumericUpDown control = (NumericUpDown)obj; decimal oldValue = (decimal)args.OldValue; decimal newValue = (decimal)args.NewValue; // Fire Automation events NumericUpDownAutomationPeer peer = UIElementAutomationPeer.FromElement(control) as NumericUpDownAutomationPeer; if (peer != null) { peer.RaiseValueChangedEvent(oldValue, newValue); } RoutedPropertyChangedEventArgs<decimal> e = new RoutedPropertyChangedEventArgs<decimal>( oldValue, newValue, ValueChangedEvent); control.OnValueChanged(e); control.UpdateText(); } private static void OnMinimumChanged(DependencyObject element, DependencyPropertyChangedEventArgs args) { element.CoerceValue(MaximumProperty); element.CoerceValue(ValueProperty); } private static void OnMaximumChanged(DependencyObject element, DependencyPropertyChangedEventArgs args) { element.CoerceValue(ValueProperty); } private static void OnIncrementChanged(DependencyObject element, DependencyPropertyChangedEventArgs args) { } private static void OnDecimalPlacesChanged(DependencyObject element, DependencyPropertyChangedEventArgs args) { NumericUpDown control = (NumericUpDown)element; control.CoerceValue(IncrementProperty); control.CoerceValue(MinimumProperty); control.CoerceValue(MaximumProperty); control.CoerceValue(ValueProperty); control.UpdateText(); } private static void OnIsReadOnlyChanged(DependencyObject element, DependencyPropertyChangedEventArgs args) { NumericUpDown control = element as NumericUpDown; bool readOnly = (bool)args.NewValue; if (readOnly != control._textBox.IsReadOnly) { control._textBox.IsReadOnly = readOnly; } } private static void OnNumberFormatInfoChanged(DependencyObject element, DependencyPropertyChangedEventArgs args) { NumericUpDown control = element as NumericUpDown; control.UpdateText(); } private static object CoerceValue(DependencyObject element, object value) { decimal newValue = (decimal)value; NumericUpDown control = (NumericUpDown)element; newValue = Math.Max(control.Minimum, Math.Min(control.Maximum, newValue)); newValue = Decimal.Round(newValue, control.DecimalPlaces); return newValue; } private static object CoerceMinimum(DependencyObject element, object value) { decimal newMinimum = (decimal)value; NumericUpDown control = (NumericUpDown)element; return Decimal.Round(newMinimum, control.DecimalPlaces); } private static object CoerceMaximum(DependencyObject element, object value) { NumericUpDown control = (NumericUpDown)element; decimal newMaximum = (decimal)value; return Decimal.Round(Math.Max(newMaximum, control.Minimum), control.DecimalPlaces); } private static object CoerceIncrement(DependencyObject element, object value) { decimal newIncrement = (decimal)value; NumericUpDown control = (NumericUpDown)element; decimal coercedNewIncrement = Decimal.Round(newIncrement, control.DecimalPlaces); if (coercedNewIncrement == Decimal.Zero) { coercedNewIncrement = SmallestForDecimalPlaces(control.DecimalPlaces); } return coercedNewIncrement; } private static bool ValidateIncrement(object value) { decimal change = (decimal)value; return change > 0; } private static bool ValidateDecimalPlaces(object value) { int decimalPlaces = (int)value; return decimalPlaces >= 0; } private static void InitializeCommands() { IncreaseCommand = new RoutedCommand("IncreaseCommand", typeof(NumericUpDown)); CommandManager.RegisterClassCommandBinding(typeof(NumericUpDown), new CommandBinding(IncreaseCommand, OnIncreaseCommand)); CommandManager.RegisterClassInputBinding(typeof(NumericUpDown), new InputBinding(IncreaseCommand, new KeyGesture(Key.Up))); DecreaseCommand = new RoutedCommand("DecreaseCommand", typeof(NumericUpDown)); CommandManager.RegisterClassCommandBinding(typeof(NumericUpDown), new CommandBinding(DecreaseCommand, OnDecreaseCommand)); CommandManager.RegisterClassInputBinding(typeof(NumericUpDown), new InputBinding(DecreaseCommand, new KeyGesture(Key.Down))); } private static void OnIncreaseCommand(object sender, ExecutedRoutedEventArgs e) { NumericUpDown control = sender as NumericUpDown; if (control != null) { control.OnIncrease(); } } private static void OnDecreaseCommand(object sender, ExecutedRoutedEventArgs e) { NumericUpDown control = sender as NumericUpDown; if (control != null) { control.OnDecrease(); } } /// <summary> /// This is a class handler for MouseLeftButtonDown event. /// The purpose of this handle is to move input focus to NumericUpDown when user pressed /// mouse left button on any part of slider that is not focusable. /// </summary> /// <param name="sender">The object where the event handler is attached.</param> /// <param name="e">The event data.</param> private static void OnMouseLeftButtonDown(object sender, MouseButtonEventArgs e) { NumericUpDown control = (NumericUpDown)sender; // When someone click on a part in the NumericUpDown and it's not focusable // NumericUpDown needs to take the focus in order to process keyboard correctly if (!control.IsKeyboardFocusWithin) { e.Handled = control.Focus() || e.Handled; } } private static decimal SmallestForDecimalPlaces(int decimalPlaces) { if (decimalPlaces < 0) throw new ArgumentOutOfRangeException("decimalPlaces"); decimal d = 1; for (int i = 0; i < decimalPlaces; i++) { d /= 10; } return d; } #endregion #region Dynamics /// <summary> /// Called when the template's tree is generated. /// </summary> public override void OnApplyTemplate() { base.OnApplyTemplate(); if (_textBox != null) { _textBox.TextChanged -= new TextChangedEventHandler(OnTextBoxTextChanged); _textBox.PreviewKeyDown -= new KeyEventHandler(OnTextBoxPreviewKeyDown); } _textBox = (TextBox)base.GetTemplateChild("textbox"); if (_textBox != null) { _textBox.TextChanged += new TextChangedEventHandler(OnTextBoxTextChanged); _textBox.PreviewKeyDown += new KeyEventHandler(OnTextBoxPreviewKeyDown); _textBox.IsReadOnly = false; } UpdateText(); } /// <summary> /// Creates an appropriate NumericUpDownAutomationPeer for this control as part of the WPF infrastructure. /// </summary> /// <returns></returns> protected override AutomationPeer OnCreateAutomationPeer() { return new NumericUpDownAutomationPeer(this); } /// <summary> /// Reports that the IsKeyboardFocusWithin property changed. /// </summary> /// <param name="e">The event data for the IsKeyboardFocusWithinChanged event.</param> protected override void OnIsKeyboardFocusWithinChanged(DependencyPropertyChangedEventArgs e) { if ((bool)e.NewValue) { OnGotFocus(); } else { OnLostFocus(); } } /// <summary> /// Handles the System.Windows.Input.Mouse.MouseWheel routed event. /// </summary> /// <param name="e">The System.Windows.Input.MouseWheelEventArgs that contains the event data.</param> protected override void OnMouseWheel(MouseWheelEventArgs e) { base.OnMouseWheel(e); if (IsKeyboardFocusWithin) { if (e.Delta > 0) { OnIncrease(); } else { OnDecrease(); } } } /// <summary> /// Raises the ValueChanged event. /// </summary> /// <param name="args">Arguments associated with the ValueChanged event.</param> protected virtual void OnValueChanged(RoutedPropertyChangedEventArgs<decimal> args) { RaiseEvent(args); } /// <summary> /// IncreaseCommand event handler. /// </summary> protected virtual void OnIncrease() { UpdateValue(); if (Value + Increment <= Maximum) { Value += Increment; } } /// <summary> /// DecreaseCommand event handler. /// </summary> protected virtual void OnDecrease() { UpdateValue(); if (Value - Increment >= Minimum) { Value -= Increment; } } private void OnGotFocus() { if (_textBox != null) { _textBox.Focus(); } UpdateText(); } private void OnLostFocus() { UpdateValue(); UpdateText(); } private void OnTextBoxTextChanged(object sender, TextChangedEventArgs e) { if (!IsReadOnly) { string text = _textBox.Text; if (String.IsNullOrEmpty(text) || text == NumberFormatInfo.NegativeSign) { return; } decimal parsedValue = 0M; if (decimal.TryParse(text, NumberStyle, NumberFormatInfo, out parsedValue)) { if ((DecimalPlaces == 0) && (text.Contains(NumberFormatInfo.NumberDecimalSeparator))) { ReturnPreviousInput(); return; } _lastInput = text; _inputValue = parsedValue; return; } ReturnPreviousInput(); } else { _lastInput = _textBox.Text; _inputValue = Value; } } private void OnTextBoxPreviewKeyDown(object sender, KeyEventArgs e) { switch (e.Key) { case Key.Up: if (InterceptArrowKeys) OnIncrease(); break; case Key.Down: if (InterceptArrowKeys) OnDecrease(); break; case Key.Return: UpdateValue(); UpdateText(); break; default: return; } e.Handled = true; } /// <summary> /// Displays the current value of the numeric up-down control in the appropriate format. /// </summary> internal void UpdateText() { NumberFormatInfo formatInfo = (NumberFormatInfo)NumberFormatInfo.Clone(); formatInfo.NumberGroupSeparator = String.Empty; string formattedValue = Value.ToString("F" + DecimalPlaces, formatInfo); if (_textBox != null) { _lastInput = formattedValue; _textBox.Text = formattedValue; } } /// <summary> /// Update of the Value property. /// </summary> internal void UpdateValue() { if (_inputValue != Value) { Value = (decimal)CoerceValue(this, _inputValue); } } private void ReturnPreviousInput() { int selectionLenght = _textBox.SelectionLength; int selectionStart = _textBox.SelectionStart; _textBox.Text = _lastInput; _textBox.SelectionStart = (selectionStart == 0) ? 0 : (selectionStart - 1); _textBox.SelectionLength = selectionLenght; } #endregion #endregion } /// <summary> /// Exposes <see cref="TAlex.WPF.Controls.NumericUpDown"/> types to UI Automation. /// </summary> public class NumericUpDownAutomationPeer : FrameworkElementAutomationPeer, IRangeValueProvider { #region Properties /// <summary> /// Gets the value of the control. /// </summary> double IRangeValueProvider.Value { get { return (double)GetOwner().Value; } } /// <summary> /// Gets the minimum range value supported by the control. /// </summary> double IRangeValueProvider.Minimum { get { return (double)GetOwner().Minimum; } } /// <summary> /// Gets the maximum range value supported by the control. /// </summary> double IRangeValueProvider.Maximum { get { return (double)GetOwner().Maximum; } } /// <summary> /// Gets the value that is added to or subtracted from the <see cref="P:System.Windows.Automation.Provider.IRangeValueProvider.Value" /> /// property when a small change is made, such as with an arrow key. /// </summary> double IRangeValueProvider.SmallChange { get { return (double)GetOwner().Increment; } } /// <summary> /// Gets the value that is added to or subtracted from the <see cref="P:System.Windows.Automation.Provider.IRangeValueProvider.Value" /> /// property when a large change is made, such as with the PAGE DOWN key. /// </summary> double IRangeValueProvider.LargeChange { get { return (double)GetOwner().Increment; } } /// <summary> /// Gets a value that specifies whether the value of a control is read-only. /// </summary> bool IRangeValueProvider.IsReadOnly { get { return GetOwner().IsReadOnly; } } #endregion #region Constructors /// <summary> /// Initializes a new instance of the <see cref="TAlex.WPF.Controls.NumericUpDownAutomationPeer"/> class. /// </summary> /// <param name="owner">The <see cref="TAlex.WPF.Controls.NumericUpDown"/> that is associated with this <see cref="TAlex.WPF.Controls.NumericUpDownAutomationPeer"/>.</param> public NumericUpDownAutomationPeer(NumericUpDown owner) : base(owner) { } #endregion #region Methods /// <summary> /// Returns the control pattern for the <see cref="TAlex.WPF.Controls.NumericUpDown"/> that is associated with this <see cref="TAlex.WPF.Controls.NumericUpDownAutomationPeer"/>. /// </summary> /// <param name="patternInterface">A value in the enumeration.</param> /// <returns></returns> public override object GetPattern(PatternInterface patternInterface) { if (patternInterface == PatternInterface.RangeValue) { return this; } return base.GetPattern(patternInterface); } /// <summary> /// Returns the name of the <see cref="TAlex.WPF.Controls.NumericUpDown"/> that is associated with this <see cref="TAlex.WPF.Controls.NumericUpDownAutomationPeer"/>. /// </summary> /// <returns>A string that contains "NumericUpDown".</returns> protected override string GetClassNameCore() { return "NumericUpDown"; } /// <summary> /// Returns the control type for the <see cref="TAlex.WPF.Controls.NumericUpDown"/> that is associated with this <see cref="TAlex.WPF.Controls.NumericUpDownAutomationPeer"/>. /// </summary> /// <returns>Spinner.</returns> protected override AutomationControlType GetAutomationControlTypeCore() { return AutomationControlType.Spinner; } internal void RaiseValueChangedEvent(decimal oldValue, decimal newValue) { base.RaisePropertyChangedEvent(RangeValuePatternIdentifiers.ValueProperty, (double)oldValue, (double)newValue); } /// <summary> /// Returns the <see cref="TAlex.WPF.Controls.NumericUpDown"/> that is associated with this <see cref="TAlex.WPF.Controls.NumericUpDownAutomationPeer"/>. /// </summary> /// <returns>The <see cref="TAlex.WPF.Controls.NumericUpDown"/> that is associated with this <see cref="TAlex.WPF.Controls.NumericUpDownAutomationPeer"/>.</returns> private NumericUpDown GetOwner() { return (NumericUpDown)base.Owner; } /// <summary> /// Sets the value of the control. /// </summary> /// <param name="value">The value to set.</param> void IRangeValueProvider.SetValue(double value) { if (!IsEnabled()) { throw new ElementNotEnabledException(); } decimal val = (decimal)value; NumericUpDown control = GetOwner(); if (val < control.Minimum || val > control.Maximum) { throw new ArgumentOutOfRangeException("value"); } control.Value = val; } #endregion } }
// DeflaterOutputStream.cs // // Copyright (C) 2001 Mike Krueger // // This file was translated from java, it was part of the GNU Classpath // Copyright (C) 2001 Free Software Foundation, Inc. // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // Linking this library statically or dynamically with other modules is // making a combined work based on this library. Thus, the terms and // conditions of the GNU General Public License cover the whole // combination. // // As a special exception, the copyright holders of this library give you // permission to link this library with independent modules to produce an // executable, regardless of the license terms of these independent // modules, and to copy and distribute the resulting executable under // terms of your choice, provided that you also meet, for each linked // independent module, the terms and conditions of the license of that // module. An independent module is a module which is not derived from // or based on this library. If you modify this library, you may extend // this exception to your version of the library, but you are not // obligated to do so. If you do not wish to do so, delete this // exception statement from your version. using System; using System.IO; using ICSharpCode.SharpZipLib.Checksums; using ICSharpCode.SharpZipLib.Zip.Compression; namespace ICSharpCode.SharpZipLib.Zip.Compression.Streams { /// <summary> /// A special stream deflating or compressing the bytes that are /// written to it. It uses a Deflater to perform actual deflating. /// /// authors of the original java version : Tom Tromey, Jochen Hoenicke /// </summary> public class DeflaterOutputStream : Stream { /// <summary> /// This buffer is used temporarily to retrieve the bytes from the /// deflater and write them to the underlying output stream. /// </summary> protected byte[] buf; /// <summary> /// The deflater which is used to deflate the stream. /// </summary> protected Deflater def; /// <summary> /// Base stream the deflater depends on. /// </summary> protected Stream baseOutputStream; /// <summary> /// Allows client to determine if an entry can be patched after its added /// </summary> public bool CanPatchEntries { get { return baseOutputStream.CanSeek; } } /// <summary> /// Gets value indicating stream can be read from /// </summary> public override bool CanRead { get { return baseOutputStream.CanRead; } } /// <summary> /// Gets a value indicating if seeking is supported for this stream /// This property always returns false /// </summary> public override bool CanSeek { get { return false; } } /// <summary> /// Get value indicating if this stream supports writing /// </summary> public override bool CanWrite { get { return baseOutputStream.CanWrite; } } /// <summary> /// Get current length of stream /// </summary> public override long Length { get { return baseOutputStream.Length; } } /// <summary> /// The current position within the stream. /// Always throws a NotSupportedExceptionNotSupportedException /// </summary> /// <exception cref="NotSupportedException">Any attempt to set position</exception> public override long Position { get { return baseOutputStream.Position; } set { // baseOutputStream.Position = value; throw new NotSupportedException("DefalterOutputStream Position not supported"); } } /// <summary> /// Sets the current position of this stream to the given value. Not supported by this class! /// </summary> /// <exception cref="NotSupportedException">Any access</exception> public override long Seek(long offset, SeekOrigin origin) { throw new NotSupportedException("DeflaterOutputStream Seek not supported"); // return baseOutputStream.Seek(offset, origin); } /// <summary> /// Sets the length of this stream to the given value. Not supported by this class! /// </summary> /// <exception cref="NotSupportedException">Any access</exception> public override void SetLength(long val) { // baseOutputStream.SetLength(val); throw new NotSupportedException("DeflaterOutputStream SetLength not supported"); } /// <summary> /// Read a byte from stream advancing position by one /// </summary> /// <exception cref="NotSupportedException">Any access</exception> public override int ReadByte() { // return baseOutputStream.ReadByte(); throw new NotSupportedException("DeflaterOutputStream ReadByte not supported"); } /// <summary> /// Read a block of bytes from stream /// </summary> /// <exception cref="NotSupportedException">Any access</exception> public override int Read(byte[] b, int off, int len) { // return baseOutputStream.Read(b, off, len); throw new NotSupportedException("DeflaterOutputStream Read not supported"); } /// <summary> /// Asynchronous reads are not supported a NotSupportedException is always thrown /// </summary> /// <param name="buffer"></param> /// <param name="offset"></param> /// <param name="count"></param> /// <param name="callback"></param> /// <param name="state"></param> /// <returns></returns> /// <exception cref="NotSupportedException">Any access</exception> public override IAsyncResult BeginRead(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { throw new NotSupportedException("DeflaterOutputStream BeginRead not currently supported"); } /// <summary> /// Asynchronous writes arent supported, a NotSupportedException is always thrown /// </summary> /// <param name="buffer"></param> /// <param name="offset"></param> /// <param name="count"></param> /// <param name="callback"></param> /// <param name="state"></param> /// <returns></returns> /// <exception cref="NotSupportedException">Any access</exception> public override IAsyncResult BeginWrite(byte[] buffer, int offset, int count, AsyncCallback callback, object state) { throw new NotSupportedException("DeflaterOutputStream BeginWrite not currently supported"); } /// <summary> /// Deflates everything in the def's input buffers. This will call /// <code>def.deflate()</code> until all bytes from the input buffers /// are processed. /// </summary> protected void Deflate() { while (!def.IsNeedingInput) { int len = def.Deflate(buf, 0, buf.Length); if (len <= 0) { break; } if (this.Password != null) { this.EncryptBlock(buf, 0, len); } baseOutputStream.Write(buf, 0, len); } if (!def.IsNeedingInput) { throw new ApplicationException("DeflaterOutputStream can't deflate all input?"); } } /// <summary> /// Creates a new DeflaterOutputStream with a default Deflater and default buffer size. /// </summary> /// <param name="baseOutputStream"> /// the output stream where deflated output should be written. /// </param> public DeflaterOutputStream(Stream baseOutputStream) : this(baseOutputStream, new Deflater(), 512) { } /// <summary> /// Creates a new DeflaterOutputStream with the given Deflater and /// default buffer size. /// </summary> /// <param name="baseOutputStream"> /// the output stream where deflated output should be written. /// </param> /// <param name="defl"> /// the underlying deflater. /// </param> public DeflaterOutputStream(Stream baseOutputStream, Deflater defl) : this(baseOutputStream, defl, 512) { } /// <summary> /// Creates a new DeflaterOutputStream with the given Deflater and /// buffer size. /// </summary> /// <param name="baseOutputStream"> /// The output stream where deflated output is written. /// </param> /// <param name="deflater"> /// The underlying deflater to use /// </param> /// <param name="bufsize"> /// The buffer size to use when deflating /// </param> /// <exception cref="ArgumentOutOfRangeException"> /// bufsize is less than or equal to zero. /// </exception> /// <exception cref="ArgumentException"> /// baseOutputStream does not support writing /// </exception> /// <exception cref="ArgumentNullException"> /// deflater instance is null /// </exception> public DeflaterOutputStream(Stream baseOutputStream, Deflater deflater, int bufsize) { if (baseOutputStream.CanWrite == false) { throw new ArgumentException("baseOutputStream", "must support writing"); } if (deflater == null) { throw new ArgumentNullException("deflater"); } if (bufsize <= 0) { throw new ArgumentOutOfRangeException("bufsize"); } this.baseOutputStream = baseOutputStream; buf = new byte[bufsize]; def = deflater; } /// <summary> /// Flushes the stream by calling flush() on the deflater and then /// on the underlying stream. This ensures that all bytes are /// flushed. /// </summary> public override void Flush() { def.Flush(); Deflate(); baseOutputStream.Flush(); } /// <summary> /// Finishes the stream by calling finish() on the deflater. /// </summary> public virtual void Finish() { def.Finish(); while (!def.IsFinished) { int len = def.Deflate(buf, 0, buf.Length); if (len <= 0) { break; } if (this.Password != null) { this.EncryptBlock(buf, 0, len); } baseOutputStream.Write(buf, 0, len); } if (!def.IsFinished) { throw new ApplicationException("Can't deflate all input?"); } baseOutputStream.Flush(); } /// <summary> /// Calls finish() and closes the stream. /// </summary> public override void Close() { Finish(); baseOutputStream.Close(); } /// <summary> /// Writes a single byte to the compressed output stream. /// </summary> /// <param name="bval"> /// The byte value. /// </param> public override void WriteByte(byte bval) { byte[] b = new byte[1]; b[0] = bval; Write(b, 0, 1); } /// <summary> /// Writes bytes from an array to the compressed stream. /// </summary> /// <param name="buf"> /// The byte array /// </param> /// <param name="off"> /// The offset into the byte array where to start. /// </param> /// <param name="len"> /// The number of bytes to write. /// </param> public override void Write(byte[] buf, int off, int len) { def.SetInput(buf, off, len); Deflate(); } #region Encryption // TODO Refactor this code. The presence of Zip specific code in this low level class is wrong string password = null; uint[] keys = null; /// <summary> /// Get/set the password used for encryption. When null no encryption is performed /// </summary> public string Password { get { return password; } set { password = value; } } /// <summary> /// Encrypt a single byte /// </summary> /// <returns> /// The encrypted value /// </returns> protected byte EncryptByte() { uint temp = ((keys[2] & 0xFFFF) | 2); return (byte)((temp * (temp ^ 1)) >> 8); } /// <summary> /// Encrypt a block of data /// </summary> /// <param name="buffer"> /// Data to encrypt. NOTE the original contents of the buffer are lost /// </param> /// <param name="offset"> /// Offset of first byte in buffer to encrypt /// </param> /// <param name="length"> /// Number of bytes in buffer to encrypt /// </param> protected void EncryptBlock(byte[] buffer, int offset, int length) { for (int i = offset; i < offset + length; ++i) { byte oldbyte = buffer[i]; buffer[i] ^= EncryptByte(); UpdateKeys(oldbyte); } } /// <summary> /// Initializes encryption keys based on given password /// </summary> protected void InitializePassword(string password) { keys = new uint[] { 0x12345678, 0x23456789, 0x34567890 }; for (int i = 0; i < password.Length; ++i) { UpdateKeys((byte)password[i]); } } /// <summary> /// Update encryption keys /// </summary> protected void UpdateKeys(byte ch) { keys[0] = Crc32.ComputeCrc32(keys[0], ch); keys[1] = keys[1] + (byte)keys[0]; keys[1] = keys[1] * 134775813 + 1; keys[2] = Crc32.ComputeCrc32(keys[2], (byte)(keys[1] >> 24)); } #endregion } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using NUnit.Framework; using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer; using Document = Lucene.Net.Documents.Document; using Field = Lucene.Net.Documents.Field; using Directory = Lucene.Net.Store.Directory; using MockRAMDirectory = Lucene.Net.Store.MockRAMDirectory; using RAMDirectory = Lucene.Net.Store.RAMDirectory; using PhraseQuery = Lucene.Net.Search.PhraseQuery; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; namespace Lucene.Net.Index { [TestFixture] public class TestAddIndexesNoOptimize:LuceneTestCase { [Test] public virtual void TestSimpleCase() { // main directory Directory dir = new RAMDirectory(); // two auxiliary directories Directory aux = new RAMDirectory(); Directory aux2 = new RAMDirectory(); IndexWriter writer = null; writer = NewWriter(dir, true); // add 100 documents AddDocs(writer, 100); Assert.AreEqual(100, writer.MaxDoc()); writer.Close(); writer = NewWriter(aux, true); writer.UseCompoundFile = false; // use one without a compound file // add 40 documents in separate files AddDocs(writer, 40); Assert.AreEqual(40, writer.MaxDoc()); writer.Close(); writer = NewWriter(aux2, true); // add 40 documents in compound files AddDocs2(writer, 50); Assert.AreEqual(50, writer.MaxDoc()); writer.Close(); // test doc count before segments are merged writer = NewWriter(dir, false); Assert.AreEqual(100, writer.MaxDoc()); writer.AddIndexesNoOptimize(new Directory[]{aux, aux2}); Assert.AreEqual(190, writer.MaxDoc()); writer.Close(); // make sure the old index is correct VerifyNumDocs(aux, 40); // make sure the new index is correct VerifyNumDocs(dir, 190); // now add another set in. Directory aux3 = new RAMDirectory(); writer = NewWriter(aux3, true); // add 40 documents AddDocs(writer, 40); Assert.AreEqual(40, writer.MaxDoc()); writer.Close(); // test doc count before segments are merged/index is optimized writer = NewWriter(dir, false); Assert.AreEqual(190, writer.MaxDoc()); writer.AddIndexesNoOptimize(new Directory[]{aux3}); Assert.AreEqual(230, writer.MaxDoc()); writer.Close(); // make sure the new index is correct VerifyNumDocs(dir, 230); VerifyTermDocs(dir, new Term("content", "aaa"), 180); VerifyTermDocs(dir, new Term("content", "bbb"), 50); // now optimize it. writer = NewWriter(dir, false); writer.Optimize(); writer.Close(); // make sure the new index is correct VerifyNumDocs(dir, 230); VerifyTermDocs(dir, new Term("content", "aaa"), 180); VerifyTermDocs(dir, new Term("content", "bbb"), 50); // now add a single document Directory aux4 = new RAMDirectory(); writer = NewWriter(aux4, true); AddDocs2(writer, 1); writer.Close(); writer = NewWriter(dir, false); Assert.AreEqual(230, writer.MaxDoc()); writer.AddIndexesNoOptimize(new Directory[]{aux4}); Assert.AreEqual(231, writer.MaxDoc()); writer.Close(); VerifyNumDocs(dir, 231); VerifyTermDocs(dir, new Term("content", "bbb"), 51); } [Test] public virtual void TestWithPendingDeletes() { // main directory Directory dir = new RAMDirectory(); // auxiliary directory Directory aux = new RAMDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, false); writer.AddIndexesNoOptimize(new Directory[]{aux}); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: for (int i = 0; i < 20; i++) { Document doc = new Document(); doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED)); doc.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED)); writer.UpdateDocument(new Term("id", "" + (i % 10)), doc); } // Deletes one of the 10 added docs, leaving 9: PhraseQuery q = new PhraseQuery(); q.Add(new Term("content", "bbb")); q.Add(new Term("content", "14")); writer.DeleteDocuments(q); writer.Optimize(); writer.Commit(); VerifyNumDocs(dir, 1039); VerifyTermDocs(dir, new Term("content", "aaa"), 1030); VerifyTermDocs(dir, new Term("content", "bbb"), 9); writer.Close(); dir.Close(); aux.Close(); } [Test] public virtual void TestWithPendingDeletes2() { // main directory Directory dir = new RAMDirectory(); // auxiliary directory Directory aux = new RAMDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, false); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: for (int i = 0; i < 20; i++) { Document doc = new Document(); doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED)); doc.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED)); writer.UpdateDocument(new Term("id", "" + (i % 10)), doc); } writer.AddIndexesNoOptimize(new Directory[]{aux}); // Deletes one of the 10 added docs, leaving 9: PhraseQuery q = new PhraseQuery(); q.Add(new Term("content", "bbb")); q.Add(new Term("content", "14")); writer.DeleteDocuments(q); writer.Optimize(); writer.Commit(); VerifyNumDocs(dir, 1039); VerifyTermDocs(dir, new Term("content", "aaa"), 1030); VerifyTermDocs(dir, new Term("content", "bbb"), 9); writer.Close(); dir.Close(); aux.Close(); } [Test] public virtual void TestWithPendingDeletes3() { // main directory Directory dir = new RAMDirectory(); // auxiliary directory Directory aux = new RAMDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, false); // Adds 10 docs, then replaces them with another 10 // docs, so 10 pending deletes: for (int i = 0; i < 20; i++) { Document doc = new Document(); doc.Add(new Field("id", "" + (i % 10), Field.Store.NO, Field.Index.NOT_ANALYZED)); doc.Add(new Field("content", "bbb " + i, Field.Store.NO, Field.Index.ANALYZED)); writer.UpdateDocument(new Term("id", "" + (i % 10)), doc); } // Deletes one of the 10 added docs, leaving 9: PhraseQuery q = new PhraseQuery(); q.Add(new Term("content", "bbb")); q.Add(new Term("content", "14")); writer.DeleteDocuments(q); writer.AddIndexesNoOptimize(new Directory[]{aux}); writer.Optimize(); writer.Commit(); VerifyNumDocs(dir, 1039); VerifyTermDocs(dir, new Term("content", "aaa"), 1030); VerifyTermDocs(dir, new Term("content", "bbb"), 9); writer.Close(); dir.Close(); aux.Close(); } // case 0: add self or exceed maxMergeDocs, expect exception [Test] public virtual void TestAddSelf() { // main directory Directory dir = new RAMDirectory(); // auxiliary directory Directory aux = new RAMDirectory(); IndexWriter writer = null; writer = NewWriter(dir, true); // add 100 documents AddDocs(writer, 100); Assert.AreEqual(100, writer.MaxDoc()); writer.Close(); writer = NewWriter(aux, true); writer.UseCompoundFile = false; // use one without a compound file writer.SetMaxBufferedDocs(1000); // add 140 documents in separate files AddDocs(writer, 40); writer.Close(); writer = NewWriter(aux, true); writer.UseCompoundFile = false; // use one without a compound file writer.SetMaxBufferedDocs(1000); AddDocs(writer, 100); writer.Close(); writer = NewWriter(dir, false); try { // cannot add self writer.AddIndexesNoOptimize(new Directory[]{aux, dir}); Assert.IsTrue(false); } catch (System.ArgumentException) { Assert.AreEqual(100, writer.MaxDoc()); } writer.Close(); // make sure the index is correct VerifyNumDocs(dir, 100); } // in all the remaining tests, make the doc count of the oldest segment // in dir large so that it is never merged in addIndexesNoOptimize() // case 1: no tail segments [Test] public virtual void TestNoTailSegments() { // main directory Directory dir = new RAMDirectory(); // auxiliary directory Directory aux = new RAMDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, false); writer.SetMaxBufferedDocs(10); writer.MergeFactor = 4; AddDocs(writer, 10); writer.AddIndexesNoOptimize(new Directory[]{aux}); Assert.AreEqual(1040, writer.MaxDoc()); Assert.AreEqual(2, writer.GetSegmentCount()); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Close(); // make sure the index is correct VerifyNumDocs(dir, 1040); } // case 2: tail segments, invariants hold, no copy [Test] public virtual void TestNoCopySegments() { // main directory Directory dir = new RAMDirectory(); // auxiliary directory Directory aux = new RAMDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, false); writer.SetMaxBufferedDocs(9); writer.MergeFactor = 4; AddDocs(writer, 2); writer.AddIndexesNoOptimize(new Directory[]{aux}); Assert.AreEqual(1032, writer.MaxDoc()); Assert.AreEqual(2, writer.GetSegmentCount()); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Close(); // make sure the index is correct VerifyNumDocs(dir, 1032); } // case 3: tail segments, invariants hold, copy, invariants hold [Test] public virtual void TestNoMergeAfterCopy() { // main directory Directory dir = new RAMDirectory(); // auxiliary directory Directory aux = new RAMDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(dir, false); writer.SetMaxBufferedDocs(10); writer.MergeFactor = 4; writer.AddIndexesNoOptimize(new Directory[]{aux, new RAMDirectory(aux)}); Assert.AreEqual(1060, writer.MaxDoc()); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Close(); // make sure the index is correct VerifyNumDocs(dir, 1060); } // case 4: tail segments, invariants hold, copy, invariants not hold [Test] public virtual void TestMergeAfterCopy() { // main directory Directory dir = new RAMDirectory(); // auxiliary directory Directory aux = new RAMDirectory(); SetUpDirs(dir, aux); IndexReader reader = IndexReader.Open(aux, false); for (int i = 0; i < 20; i++) { reader.DeleteDocument(i); } Assert.AreEqual(10, reader.NumDocs()); reader.Close(); IndexWriter writer = NewWriter(dir, false); writer.SetMaxBufferedDocs(4); writer.MergeFactor = 4; writer.AddIndexesNoOptimize(new Directory[]{aux, new RAMDirectory(aux)}); Assert.AreEqual(1020, writer.MaxDoc()); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Close(); // make sure the index is correct VerifyNumDocs(dir, 1020); } // case 5: tail segments, invariants not hold [Test] public virtual void TestMoreMerges() { // main directory Directory dir = new RAMDirectory(); // auxiliary directory Directory aux = new RAMDirectory(); Directory aux2 = new RAMDirectory(); SetUpDirs(dir, aux); IndexWriter writer = NewWriter(aux2, true); writer.SetMaxBufferedDocs(100); writer.MergeFactor = 10; writer.AddIndexesNoOptimize(new Directory[]{aux}); Assert.AreEqual(30, writer.MaxDoc()); Assert.AreEqual(3, writer.GetSegmentCount()); writer.Close(); IndexReader reader = IndexReader.Open(aux, false); for (int i = 0; i < 27; i++) { reader.DeleteDocument(i); } Assert.AreEqual(3, reader.NumDocs()); reader.Close(); reader = IndexReader.Open(aux2, false); for (int i = 0; i < 8; i++) { reader.DeleteDocument(i); } Assert.AreEqual(22, reader.NumDocs()); reader.Close(); writer = NewWriter(dir, false); writer.SetMaxBufferedDocs(6); writer.MergeFactor = 4; writer.AddIndexesNoOptimize(new Directory[]{aux, aux2}); Assert.AreEqual(1025, writer.MaxDoc()); Assert.AreEqual(1000, writer.GetDocCount(0)); writer.Close(); // make sure the index is correct VerifyNumDocs(dir, 1025); } private IndexWriter NewWriter(Directory dir, bool create) { IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), create, IndexWriter.MaxFieldLength.UNLIMITED); writer.SetMergePolicy(new LogDocMergePolicy(writer)); return writer; } private void AddDocs(IndexWriter writer, int numDocs) { for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.Add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED)); writer.AddDocument(doc); } } private void AddDocs2(IndexWriter writer, int numDocs) { for (int i = 0; i < numDocs; i++) { Document doc = new Document(); doc.Add(new Field("content", "bbb", Field.Store.NO, Field.Index.ANALYZED)); writer.AddDocument(doc); } } private void VerifyNumDocs(Directory dir, int numDocs) { IndexReader reader = IndexReader.Open(dir, true); Assert.AreEqual(numDocs, reader.MaxDoc); Assert.AreEqual(numDocs, reader.NumDocs()); reader.Close(); } private void VerifyTermDocs(Directory dir, Term term, int numDocs) { IndexReader reader = IndexReader.Open(dir, true); TermDocs termDocs = reader.TermDocs(term); int count = 0; while (termDocs.Next()) count++; Assert.AreEqual(numDocs, count); reader.Close(); } private void SetUpDirs(Directory dir, Directory aux) { IndexWriter writer = null; writer = NewWriter(dir, true); writer.SetMaxBufferedDocs(1000); // add 1000 documents in 1 segment AddDocs(writer, 1000); Assert.AreEqual(1000, writer.MaxDoc()); Assert.AreEqual(1, writer.GetSegmentCount()); writer.Close(); writer = NewWriter(aux, true); writer.UseCompoundFile = false; // use one without a compound file writer.SetMaxBufferedDocs(100); writer.MergeFactor = 10; // add 30 documents in 3 segments for (int i = 0; i < 3; i++) { AddDocs(writer, 10); writer.Close(); writer = NewWriter(aux, false); writer.UseCompoundFile = false; // use one without a compound file writer.SetMaxBufferedDocs(100); writer.MergeFactor = 10; } Assert.AreEqual(30, writer.MaxDoc()); Assert.AreEqual(3, writer.GetSegmentCount()); writer.Close(); } // LUCENE-1270 [Test] public virtual void TestHangOnClose() { Directory dir = new MockRAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); writer.SetMergePolicy(new LogByteSizeMergePolicy(writer)); writer.SetMaxBufferedDocs(5); writer.UseCompoundFile = false; writer.MergeFactor = 100; Document doc = new Document(); doc.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.ANALYZED, Field.TermVector.WITH_POSITIONS_OFFSETS)); for (int i = 0; i < 60; i++) writer.AddDocument(doc); writer.SetMaxBufferedDocs(200); Document doc2 = new Document(); doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO)); doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO)); doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO)); doc2.Add(new Field("content", "aaa bbb ccc ddd eee fff ggg hhh iii", Field.Store.YES, Field.Index.NO)); for (int i = 0; i < 10; i++) writer.AddDocument(doc2); writer.Close(); Directory dir2 = new MockRAMDirectory(); writer = new IndexWriter(dir2, new WhitespaceAnalyzer(), true, IndexWriter.MaxFieldLength.LIMITED); LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(writer); lmp.MinMergeMB = 0.0001; writer.SetMergePolicy(lmp); writer.MergeFactor = 4; writer.UseCompoundFile = false; writer.SetMergeScheduler(new SerialMergeScheduler()); writer.AddIndexesNoOptimize(new Directory[]{dir}); writer.Close(); dir.Close(); dir2.Close(); } // LUCENE-1642: make sure CFS of destination indexwriter // is respected when copying tail segments [Test] public virtual void TestTargetCFS() { Directory dir = new RAMDirectory(); IndexWriter writer = NewWriter(dir, true); writer.UseCompoundFile = false; AddDocs(writer, 1); writer.Close(); Directory other = new RAMDirectory(); writer = NewWriter(other, true); writer.UseCompoundFile = true; writer.AddIndexesNoOptimize(new Directory[]{dir}); Assert.IsTrue(writer.NewestSegment().GetUseCompoundFile()); writer.Close(); } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using System.Xml; using System.Xml.Serialization; using System.Text; using System.IO; using System; public delegate void onLoadXmlFinish(string xmlfile, XmlDocument doc); public class Config { #if PUBLISHED public static bool published = true; #else public static bool published = false; #endif public static string error_msg = ""; public static string res_url; #if UNITY_ANDROID public static string platform = "android"; #elif UNITY_IPHONE public static string platform = "ios"; #else public static string platform = "standalone"; #endif // Use this for initialization public static string version = "0.0.0.3"; public static float version_value = 0.003f; //public static string update_url = "http://192.168.1.105:8080/tiandao"; public static string update_url = "http://115.29.141.185/coc/tiandao"; public Config () { } public static string UTF8ByteArrayToString(byte[] characters) { UTF8Encoding encoding = new UTF8Encoding(); string constructedString = encoding.GetString(characters); return (constructedString); } public static byte[] StringToUTF8ByteArray(string pXmlString) { UTF8Encoding encoding = new UTF8Encoding(); byte[] byteArray = encoding.GetBytes(pXmlString); return byteArray; } public static string ConvertXmlToString(XmlDocument xmlDoc) { MemoryStream stream = new MemoryStream(); XmlTextWriter writer = new XmlTextWriter(stream, null); writer.Formatting = Formatting.Indented; xmlDoc.Save(writer); StreamReader sr = new StreamReader(stream, System.Text.Encoding.UTF8); stream.Position = 0; string xmlString = sr.ReadToEnd(); sr.Close(); stream.Close(); return xmlString; } public static string objectToString (object obj) { System.Type type = obj.GetType(); if (type == typeof(int)) return obj.ToString(); else if (type == typeof(float)) return obj.ToString(); else if (type == typeof(bool)) return obj.ToString(); else if (type == typeof(string)) return (string) obj; else if (type == typeof(Vector3)) { Vector3 val = (Vector3) obj; return val.x + "," + val.y + "," + val.z; } else if (type == typeof(Vector2)) { Vector2 val = (Vector2) obj; return val.x + "," + val.y; } else if (type == typeof(AnimationCurve)) { AnimationCurve val = (AnimationCurve) obj; string str = ""; foreach (Keyframe frame in val.keys) { string v = frame.time.ToString() + "|" + frame.value.ToString() + "|" + frame.tangentMode.ToString() + "|" + frame.inTangent.ToString() + "|" + frame.outTangent.ToString(); if (string.IsNullOrEmpty(str)) str = v; else str += "," + v; } return str; } /* else if (type == typeof(MonoScript)) { return type.Name; }*/ return type.Name; } public static object stringToObject(System.Type type, string val) { if (type == typeof(int)) return int.Parse(val); else if (type == typeof(float)) return float.Parse(val); else if (type == typeof(bool)) return bool.Parse(val); else if (type == typeof(string)) return val; else if (type == typeof(Vector3)) { string[] values = val.Split(','); return new Vector3(float.Parse(values[0]), float.Parse(values[1]), float.Parse(values[2])); } else if (type == typeof(Vector2)) { string[] values = val.Split(','); return new Vector2(float.Parse(values[0]), float.Parse(values[1])); } else if (type == typeof(AnimationCurve)) { AnimationCurve cur = new AnimationCurve(); if (!string.IsNullOrEmpty(val)) { string[] frame_strs = val.Split(','); foreach (string frame_str in frame_strs) { string[] strs = frame_str.Split('|'); Keyframe frame = new Keyframe(float.Parse(strs[0]), float.Parse(strs[1]), float.Parse(strs[3]), float.Parse(strs[4])); frame.tangentMode = int.Parse(strs[2]); cur.AddKey(frame); } } return cur; } /* else if (type == typeof(MonoScript)) { return type; } */ return null; } public static System.Type findType(string str) { System.Type type = System.Type.GetType(str); if (type != null) return type; switch (str) { case "UnityEngine.AnimationCurve": return typeof(AnimationCurve); case "UnityEngine.Vector3": return typeof(Vector3); case "UnityEngine.Vector2": return typeof(Vector2); case "UnityEngine.Vector4": return typeof(Vector4); case "UnityEngine.Quaternion": return typeof(Quaternion); } return null; } public static Dictionary<string, object> fromStromgToParams (string str) { Dictionary<string, object> script_params = new Dictionary<string, object>(); string[] all_words = str.Split(new string[] { "##" }, StringSplitOptions.None); foreach (string words in all_words) { string[] all_vars = words.Split(new string[] { "||" }, StringSplitOptions.None); if (all_vars.Length >= 3) { string key = all_vars[0]; System.Type type = Config.findType(all_vars[1]); object val = Config.stringToObject(type, all_vars[2]); script_params.Add(key, val); } } return script_params; } public static XmlDocument fromStringToXmlDoc (byte[] bs) { XmlDocument doc = new XmlDocument(); string text = UTF8ByteArrayToString(bs); doc.LoadXml(text); return doc; } public static XmlDocument getLocalXmlDoc (string xmlfile) { #if REMOTE string url = Application.persistentDataPath + xmlfile; byte[] bs = File.ReadAllBytes(url); XmlDocument doc = fromStringToXmlDoc(bs); #else XmlDocument doc = new XmlDocument(); if (Application.platform == RuntimePlatform.WindowsEditor || Application.platform == RuntimePlatform.OSXEditor) doc.Load(Application.dataPath + "/StreamingAssets" + xmlfile); else { #if UNITY_ANDROID WWW www = new WWW("jar:file://" + Application.dataPath + "!/assets" + xmlfile); while (!www.isDone) {} try { /* System.IO.StringReader stringReader = new System.IO.StringReader(www.text); stringReader.Read(); // skip BOM System.Xml.XmlReader reader = System.Xml.XmlReader.Create(stringReader); */ doc.LoadXml(www.text); //doc.LoadXml(stringReader.ReadToEnd()); } catch (Exception ex) { //error_msg += "LOAD " + xmlfile + " ERROR!" + "\n"; } #elif UNITY_IPHONE doc.Load(Application.dataPath + "/Raw" + xmlfile); #endif } #endif return doc; } public static XmlDocument getStreamAssetsXmlDoc (string xmlfile) { XmlDocument doc = new XmlDocument(); if (Application.platform == RuntimePlatform.WindowsEditor || Application.platform == RuntimePlatform.OSXEditor) doc.Load(Application.dataPath + "/StreamingAssets" + xmlfile); else { #if UNITY_ANDROID WWW www = new WWW("jar:file://" + Application.dataPath + "!/assets" + xmlfile); while (!www.isDone) {} try { /* System.IO.StringReader stringReader = new System.IO.StringReader(www.text); stringReader.Read(); // skip BOM System.Xml.XmlReader reader = System.Xml.XmlReader.Create(stringReader); */ doc.LoadXml(www.text); //doc.LoadXml(stringReader.ReadToEnd()); } catch (Exception ex) { //error_msg += "LOAD " + xmlfile + " ERROR!" + "\n"; } #elif UNITY_IPHONE doc.Load(Application.dataPath + "/Raw" + xmlfile); #endif } return doc; } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for Additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ using System.Collections.Generic; using NPOI.SS.Util; using NPOI.SS.Formula.PTG; using NPOI.SS.Formula; using System; using NPOI.SS.UserModel; using NPOI.OpenXmlFormats.Spreadsheet; using System.Linq; using NPOI.SS.UserModel.Helpers; namespace NPOI.XSSF.UserModel.Helpers { /** * @author Yegor Kozlov */ public class XSSFRowShifter : RowShifter { public XSSFRowShifter(XSSFSheet sh) : base(sh) { sheet = sh; } /** * Shift merged regions * * @param startRow the row to start Shifting * @param endRow the row to end Shifting * @param n the number of rows to shift * @return an array of affected cell regions */ [Obsolete("deprecated POI 3.15 beta 2. Use ShiftMergedRegions(int, int, int) instead.")] public List<CellRangeAddress> ShiftMerged(int startRow, int endRow, int n) { return ShiftMergedRegions(startRow, endRow, n); } /** * Updated named ranges */ public override void UpdateNamedRanges(FormulaShifter shifter) { IWorkbook wb = sheet.Workbook; XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.Create(wb); foreach (IName name in wb.GetAllNames()) { String formula = name.RefersToFormula; int sheetIndex = name.SheetIndex; Ptg[] ptgs = FormulaParser.Parse(formula, fpb, FormulaType.NamedRange, sheetIndex, -1); if (shifter.AdjustFormula(ptgs, sheetIndex)) { String shiftedFmla = FormulaRenderer.ToFormulaString(fpb, ptgs); name.RefersToFormula = shiftedFmla; } } } /** * Update formulas. */ public override void UpdateFormulas(FormulaShifter shifter) { //update formulas on the parent sheet UpdateSheetFormulas(sheet, shifter); //update formulas on other sheets IWorkbook wb = sheet.Workbook; foreach (XSSFSheet sh in wb) { if (sheet == sh) continue; UpdateSheetFormulas(sh, shifter); } } private void UpdateSheetFormulas(ISheet sh, FormulaShifter Shifter) { foreach (IRow r in sh) { XSSFRow row = (XSSFRow)r; UpdateRowFormulas(row, Shifter); } } /// <summary> /// Update the formulas in specified row using the formula shifting policy specified by shifter /// </summary> /// <param name="row">the row to update the formulas on</param> /// <param name="Shifter">the formula shifting policy</param> public override void UpdateRowFormulas(IRow row, FormulaShifter Shifter) { XSSFSheet sheet = (XSSFSheet)row.Sheet; foreach (ICell c in row) { XSSFCell cell = (XSSFCell)c; CT_Cell ctCell = cell.GetCTCell(); if (ctCell.IsSetF()) { CT_CellFormula f = ctCell.f; String formula = f.Value; if (formula.Length > 0) { String ShiftedFormula = ShiftFormula(row, formula, Shifter); if (ShiftedFormula != null) { f.Value = (ShiftedFormula); if (f.t == ST_CellFormulaType.shared) { int si = (int)f.si; CT_CellFormula sf = sheet.GetSharedFormula(si); sf.Value = (ShiftedFormula); } } } if (f.isSetRef()) { //Range of cells which the formula applies to. String ref1 = f.@ref; String ShiftedRef = ShiftFormula(row, ref1, Shifter); if (ShiftedRef != null) f.@ref = ShiftedRef; } } } } /** * Shift a formula using the supplied FormulaShifter * * @param row the row of the cell this formula belongs to. Used to get a reference to the parent workbook. * @param formula the formula to shift * @param Shifter the FormulaShifter object that operates on the Parsed formula tokens * @return the Shifted formula if the formula was Changed, * <code>null</code> if the formula wasn't modified */ private static String ShiftFormula(IRow row, String formula, FormulaShifter Shifter) { ISheet sheet = row.Sheet; IWorkbook wb = sheet.Workbook; int sheetIndex = wb.GetSheetIndex(sheet); XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.Create(wb); try { Ptg[] ptgs = FormulaParser.Parse(formula, fpb, FormulaType.Cell, sheetIndex, -1); String ShiftedFmla = null; if (Shifter.AdjustFormula(ptgs, sheetIndex)) { ShiftedFmla = FormulaRenderer.ToFormulaString(fpb, ptgs); } return ShiftedFmla; } catch (FormulaParseException fpe) { // Log, but don't change, rather than breaking Console.WriteLine("Error shifting formula on row {0}, {1}", row.RowNum, fpe); return formula; } } public override void UpdateConditionalFormatting(FormulaShifter Shifter) { XSSFSheet xsheet = (XSSFSheet)sheet; XSSFWorkbook wb = xsheet.Workbook as XSSFWorkbook; int sheetIndex = wb.GetSheetIndex(sheet); XSSFEvaluationWorkbook fpb = XSSFEvaluationWorkbook.Create(wb); CT_Worksheet ctWorksheet = xsheet.GetCTWorksheet(); List<CT_ConditionalFormatting> conditionalFormattingArray = ctWorksheet.conditionalFormatting; // iterate backwards due to possible calls to ctWorksheet.removeConditionalFormatting(j) for (int j = conditionalFormattingArray.Count - 1; j >= 0; j--) { CT_ConditionalFormatting cf = conditionalFormattingArray[j]; List<CellRangeAddress> cellRanges = new List<CellRangeAddress>(); String[] regions = cf.sqref.ToString().Split(new char[] { ' ' }); for (int i = 0; i < regions.Length; i++) { cellRanges.Add(CellRangeAddress.ValueOf(regions[i])); } bool Changed = false; List<CellRangeAddress> temp = new List<CellRangeAddress>(); for (int i = 0; i < cellRanges.Count; i++) { CellRangeAddress craOld = cellRanges[i]; CellRangeAddress craNew = ShiftRange(Shifter, craOld, sheetIndex); if (craNew == null) { Changed = true; continue; } temp.Add(craNew); if (craNew != craOld) { Changed = true; } } if (Changed) { int nRanges = temp.Count; if (nRanges == 0) { conditionalFormattingArray.RemoveAt(j); continue; } string refs = string.Empty; foreach (CellRangeAddress a in temp) { if (refs.Length == 0) refs = a.FormatAsString(); else refs += " " + a.FormatAsString(); } cf.sqref = refs; } foreach (CT_CfRule cfRule in cf.cfRule) { List<String> formulas = cfRule.formula; for (int i = 0; i < formulas.Count; i++) { String formula = formulas[i]; Ptg[] ptgs = FormulaParser.Parse(formula, fpb, FormulaType.Cell, sheetIndex, -1); if (Shifter.AdjustFormula(ptgs, sheetIndex)) { String ShiftedFmla = FormulaRenderer.ToFormulaString(fpb, ptgs); formulas[i] = ShiftedFmla; } } } } } /** * Shift the Hyperlink anchors (not the hyperlink text, even if the hyperlink * is of type LINK_DOCUMENT and refers to a cell that was shifted). Hyperlinks * do not track the content they point to. * * @param shifter */ public override void UpdateHyperlinks(FormulaShifter shifter) { XSSFSheet xsheet = (XSSFSheet)sheet; int sheetIndex = xsheet.GetWorkbook().GetSheetIndex(sheet); List<IHyperlink> hyperlinkList = sheet.GetHyperlinkList(); foreach (IHyperlink hyperlink1 in hyperlinkList) { XSSFHyperlink hyperlink = hyperlink1 as XSSFHyperlink; String cellRef = hyperlink.CellRef; CellRangeAddress cra = CellRangeAddress.ValueOf(cellRef); CellRangeAddress shiftedRange = ShiftRange(shifter, cra, sheetIndex); if (shiftedRange != null && shiftedRange != cra) { // shiftedRange should not be null. If shiftedRange is null, that means // that a hyperlink wasn't deleted at the beginning of shiftRows when // identifying rows that should be removed because they will be overwritten hyperlink.SetCellReference(shiftedRange.FormatAsString()); } } } private static CellRangeAddress ShiftRange(FormulaShifter Shifter, CellRangeAddress cra, int currentExternSheetIx) { // FormulaShifter works well in terms of Ptgs - so convert CellRangeAddress to AreaPtg (and back) here AreaPtg aptg = new AreaPtg(cra.FirstRow, cra.LastRow, cra.FirstColumn, cra.LastColumn, false, false, false, false); Ptg[] ptgs = { aptg, }; if (!Shifter.AdjustFormula(ptgs, currentExternSheetIx)) { return cra; } Ptg ptg0 = ptgs[0]; if (ptg0 is AreaPtg) { AreaPtg bptg = (AreaPtg)ptg0; return new CellRangeAddress(bptg.FirstRow, bptg.LastRow, bptg.FirstColumn, bptg.LastColumn); } if (ptg0 is AreaErrPtg) { return null; } throw new InvalidOperationException("Unexpected Shifted ptg class (" + ptg0.GetType().Name + ")"); } } }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Reflection; using System.Threading; using JavaScriptEngineSwitcher.Core; using JSPool; using React.Exceptions; namespace React { /// <summary> /// Handles creation of JavaScript engines. All methods are thread-safe. /// </summary> public class JavaScriptEngineFactory : IDisposable, IJavaScriptEngineFactory { /// <summary> /// React configuration for the current site /// </summary> protected readonly IReactSiteConfiguration _config; /// <summary> /// Cache used for storing the pre-compiled scripts /// </summary> protected readonly ICache _cache; /// <summary> /// File system wrapper /// </summary> protected readonly IFileSystem _fileSystem; /// <summary> /// Function used to create new JavaScript engine instances. /// </summary> protected readonly Func<IJsEngine> _factory; /// <summary> /// The JavaScript Engine Switcher instance used by ReactJS.NET /// </summary> protected readonly IJsEngineSwitcher _jsEngineSwitcher; /// <summary> /// Contains all current JavaScript engine instances. One per thread, keyed on thread ID. /// </summary> protected readonly ConcurrentDictionary<int, IJsEngine> _engines = new ConcurrentDictionary<int, IJsEngine>(); /// <summary> /// Pool of JavaScript engines to use /// </summary> protected IJsPool _pool; /// <summary> /// Whether this class has been disposed. /// </summary> protected bool _disposed; /// <summary> /// The exception that was thrown during the most recent recycle of the pool. /// </summary> protected Exception _scriptLoadException; /// <summary> /// Initializes a new instance of the <see cref="JavaScriptEngineFactory"/> class. /// </summary> public JavaScriptEngineFactory( IJsEngineSwitcher jsEngineSwitcher, IReactSiteConfiguration config, ICache cache, IFileSystem fileSystem ) { _jsEngineSwitcher = jsEngineSwitcher; _config = config; _cache = cache; _fileSystem = fileSystem; #pragma warning disable 618 _factory = GetFactory(_jsEngineSwitcher); #pragma warning restore 618 if (_config.ReuseJavaScriptEngines) { _pool = CreatePool(); } } /// <summary> /// Creates a new JavaScript engine pool. /// </summary> protected virtual IJsPool CreatePool() { var allFiles = _config.Scripts .Concat(_config.ScriptsWithoutTransform) .Concat(_config.ReactAppBuildPath != null ? new[] { $"{_config.ReactAppBuildPath}/asset-manifest.json"} : Enumerable.Empty<string>()) .Select(_fileSystem.MapPath); var poolConfig = new JsPoolConfig { EngineFactory = _factory, Initializer = InitialiseEngine, WatchPath = _fileSystem.MapPath("~/"), WatchFiles = allFiles }; if (_config.MaxEngines != null) { poolConfig.MaxEngines = _config.MaxEngines.Value; } if (_config.StartEngines != null) { poolConfig.StartEngines = _config.StartEngines.Value; } if (_config.MaxUsagesPerEngine != null) { poolConfig.MaxUsagesPerEngine = _config.MaxUsagesPerEngine.Value; } var pool = new JsPool(poolConfig); // Reset the recycle exception on recycle. If there *are* errors loading the scripts // during recycle, the errors will be caught in the initializer. pool.Recycled += (sender, args) => _scriptLoadException = null; return pool; } /// <summary> /// Loads standard React and Babel scripts into the engine. /// </summary> protected virtual void InitialiseEngine(IJsEngine engine) { #if NET40 var thisAssembly = typeof(ReactEnvironment).Assembly; #else var thisAssembly = typeof(ReactEnvironment).GetTypeInfo().Assembly; #endif LoadResource(engine, "React.Core.Resources.shims.js", thisAssembly); if (_config.LoadReact) { LoadResource( engine, _config.UseDebugReact ? "React.Core.Resources.react.generated.js" : "React.Core.Resources.react.generated.min.js", thisAssembly ); } LoadUserScripts(engine); if (!_config.LoadReact && _scriptLoadException == null) { // We expect the user to have loaded their own version of React in the scripts that // were loaded above, let's ensure that's the case. EnsureReactLoaded(engine); } } /// <summary> /// Loads code from embedded JavaScript resource into the engine. /// </summary> /// <param name="engine">Engine to load a code from embedded JavaScript resource</param> /// <param name="resourceName">The case-sensitive resource name</param> /// <param name="assembly">The assembly, which contains the embedded resource</param> private void LoadResource(IJsEngine engine, string resourceName, Assembly assembly) { if (_config.AllowJavaScriptPrecompilation && engine.TryExecuteResourceWithPrecompilation(_cache, resourceName, assembly)) { // Do nothing. } else { engine.ExecuteResource(resourceName, assembly); } } /// <summary> /// Loads any user-provided scripts. Only scripts that don't need JSX transformation can /// run immediately here. JSX files are loaded in ReactEnvironment. /// </summary> /// <param name="engine">Engine to load scripts into</param> private void LoadUserScripts(IJsEngine engine) { try { IEnumerable<string> manifestFiles = Enumerable.Empty<string>(); if (_config.ReactAppBuildPath != null) { var manifest = ReactAppAssetManifest.LoadManifest(_config, _fileSystem, _cache, useCacheRead: false); manifestFiles = (manifest?.Entrypoints?.Where(x => x != null && x.EndsWith(".js"))) ?? Enumerable.Empty<string>(); } foreach (var file in _config.ScriptsWithoutTransform.Concat(manifestFiles)) { try { if (_config.AllowJavaScriptPrecompilation && engine.TryExecuteFileWithPrecompilation(_cache, _fileSystem, file)) { // Do nothing. } else { engine.ExecuteFile(_fileSystem, file); } } catch (JsException ex) { // We can't simply rethrow the exception here, as it's possible this is running // on a background thread (ie. as a response to a file changing). If we did // throw the exception here, it would terminate the entire process. Instead, // save the exception, and then just rethrow it later when getting the engine. _scriptLoadException = new ReactScriptLoadException(string.Format( "Error while loading \"{0}\": {1}", file, ex.Message ), ex); } } } catch (IOException ex) { // Files could be in the process of being rebuilt by JS build tooling _scriptLoadException = new ReactScriptLoadException(ex.Message, ex);; } } /// <summary> /// Ensures that React has been correctly loaded into the specified engine. /// </summary> /// <param name="engine">Engine to check</param> private void EnsureReactLoaded(IJsEngine engine) { var globalsString = engine.CallFunction<string>("ReactNET_initReact"); string[] globals = globalsString.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries); if (globals.Length != 0) { _scriptLoadException = new ReactNotInitialisedException( $"React has not been loaded correctly: missing ({string.Join(", ", globals)})." + "Please expose your version of React as global variables named " + "'React', 'ReactDOM', and 'ReactDOMServer', or enable the 'LoadReact'" + "configuration option to use the built-in version of React." ); } } /// <summary> /// Gets the JavaScript engine for the current thread. It is recommended to use /// <see cref="GetEngine"/> instead, which will pool/reuse engines. /// </summary> /// <returns>The JavaScript engine</returns> public virtual IJsEngine GetEngineForCurrentThread() { EnsureValidState(); return _engines.GetOrAdd(Thread.CurrentThread.ManagedThreadId, id => { var engine = _factory(); InitialiseEngine(engine); EnsureValidState(); return engine; }); } /// <summary> /// Disposes the JavaScript engine for the current thread. /// </summary> public virtual void DisposeEngineForCurrentThread() { IJsEngine engine; if (_engines.TryRemove(Thread.CurrentThread.ManagedThreadId, out engine)) { if (engine != null) { engine.Dispose(); } } } /// <summary> /// Gets a JavaScript engine from the pool. /// </summary> /// <returns>The JavaScript engine</returns> public virtual PooledJsEngine GetEngine() { EnsureValidState(); return _pool.GetEngine(); } /// <summary> /// Gets a factory for the most appropriate JavaScript engine for the current environment. /// The first functioning JavaScript engine with the lowest priority will be used. /// </summary> /// <returns>Function to create JavaScript engine</returns> private static Func<IJsEngine> GetFactory(IJsEngineSwitcher jsEngineSwitcher) { string defaultEngineName = jsEngineSwitcher.DefaultEngineName; if (!string.IsNullOrWhiteSpace(defaultEngineName)) { var engineFactory = jsEngineSwitcher.EngineFactories.Get(defaultEngineName); if (engineFactory != null) { return engineFactory.CreateEngine; } else { throw new ReactEngineNotFoundException( "Could not find a factory that creates an instance of the JavaScript " + "engine with name `" + defaultEngineName + "`."); } } if (jsEngineSwitcher.EngineFactories.Count == 0) { throw new ReactException("No JS engines were registered. Visit https://reactjs.net/docs for more information."); } var exceptionMessages = new List<string>(); foreach (var engineFactory in jsEngineSwitcher.EngineFactories.GetRegisteredFactories()) { IJsEngine engine = null; try { engine = engineFactory.CreateEngine(); if (EngineIsUsable(engine)) { // Success! Use this one. return engineFactory.CreateEngine; } } catch (JsEngineLoadException ex) { Trace.WriteLine(string.Format("Error initialising {0}: {1}", engineFactory, ex.Message)); exceptionMessages.Add(ex.Message); } catch (Exception ex) { Trace.WriteLine(string.Format("Error initialising {0}: {1}", engineFactory, ex)); exceptionMessages.Add(ex.ToString()); } finally { if (engine != null) { engine.Dispose(); } } } throw new ReactEngineNotFoundException("There was an error initializing the registered JS engines. " + string.Join(Environment.NewLine, exceptionMessages)); } /// <summary> /// Performs a sanity check to ensure the specified engine type is usable. /// </summary> /// <param name="engine">Engine to test</param> /// <returns></returns> private static bool EngineIsUsable(IJsEngine engine) { // Perform a sanity test to ensure this engine is usable return engine.Evaluate<int>("1 + 1") == 2; } /// <summary> /// Clean up all engines /// </summary> public virtual void Dispose() { _disposed = true; foreach (var engine in _engines) { if (engine.Value != null) { engine.Value.Dispose(); } } if (_pool != null) { _pool.Dispose(); _pool = null; } } /// <summary> /// Ensures that this object has not been disposed, and that no error was thrown while /// loading the scripts. /// </summary> public void EnsureValidState() { if (_disposed) { throw new ObjectDisposedException(GetType().Name); } if (_scriptLoadException != null) { // This means an exception occurred while loading the script (eg. syntax error in the file) throw _scriptLoadException; } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace FuelEconomyWSC.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { private const int DefaultCollectionSize = 3; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
/******************************************************************** The Multiverse Platform is made available under the MIT License. Copyright (c) 2012 The Multiverse Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *********************************************************************/ using System; using System.Text; using System.Xml; using Axiom.Graphics; using Axiom.MathLib; namespace Axiom.SceneManagers.Multiverse { public delegate void TerrainSplatModificationStateChangedHandler(ITerrainMaterialConfig config, bool state); public delegate void TerrainSplatChangedHandler(ITerrainMaterialConfig config, MosaicTile tile, int worldXMeters, int worldZMeters, int sizeXMeters, int sizeZMeters); public class AlphaSplatTerrainConfig : ITerrainMaterialConfig { public static readonly int MAX_LAYER_TEXTURES = 8; public static readonly int NUM_ALPHA_MAPS = MAX_LAYER_TEXTURES / 4; // 4 channels per map private const string PRE_ALPHA_NAME = "Alpha"; private const string POST_ALPHA_NAME = "MosaicName"; private const string PRE_LAYER_NAME = "Layer"; private const string POST_LAYER_NAME = "TextureName"; public event TerrainSplatModificationStateChangedHandler TerrainSplatModificationStateChanged; public event TerrainSplatChangedHandler TerrainSplatChanged; private bool m_wasModifiedBeforeSuspend; private bool m_wasChangedDuringSuspend; protected bool useParams = true; protected float textureTileSize = 5; private readonly string[] layerTextureNames = new string[MAX_LAYER_TEXTURES]; private readonly string[] alphaMapMosaicNames = new string[NUM_ALPHA_MAPS]; private readonly TextureMosaic[] alphaMapMosaics = new TextureMosaic[NUM_ALPHA_MAPS]; private string detailTextureName = "terrain_detail.dds"; public AutoSplatRules AutoSplatRules { get; set;} private bool m_changeNotificationEnabled = true; private bool m_locallyModified; public AlphaSplatTerrainConfig() { InitTextureNames(); } public AlphaSplatTerrainConfig(string title, AutoSplatRules autoSplatRules, MosaicDescription srcdesc) { InitTextureNames(); // Override texture names from rules, if applicable for (int i = 0; i < autoSplatRules.layerTextureNames.Length; i++) { if (!string.IsNullOrEmpty(autoSplatRules.layerTextureNames[i])) { layerTextureNames[i] = autoSplatRules.layerTextureNames[i]; } } // Create alpha map texture mosaics for (int i = 0; i < NUM_ALPHA_MAPS; i++) { string alphaMapMosaicName = title + "AlphaMap" + i; MosaicDescription desc = new MosaicDescription(alphaMapMosaicName, srcdesc); alphaMapMosaicNames[i] = alphaMapMosaicName; alphaMapMosaics[i] = new TextureMosaic(alphaMapMosaicName, 0, desc); alphaMapMosaics[i].MosaicModificationStateChanged += Mosaic_OnMosaicModificationStateChanged; alphaMapMosaics[i].MosaicChanged += Mosaic_OnMosaicChanged; } AutoSplatRules = autoSplatRules; InitializeAutoSplatRules(srcdesc); } // DEPRECATED: WorldManager uses this overload, but it is not sufficient for // constructing reasonalbe AutoSplatRules. To construct the rules we need // the global min/max height values, and these values do not necessarily // appear in the mosaic descriptions for the alpha mosaics. The mosaic // description we really want is the one for the height-f public AlphaSplatTerrainConfig(XmlReader r) { InitTextureNames(); FromXml(r); InitializeAutoSplatRules(alphaMapMosaics == null ? null : alphaMapMosaics[0].MosaicDesc); } public AlphaSplatTerrainConfig( XmlReader r, MosaicDescription heightfieldMosaic ) { InitTextureNames(); FromXml( r ); InitializeAutoSplatRules( heightfieldMosaic ); } // XXXMLM - probably belongs in a AlphaSplatTerrainGenerator class, consider refactor public void Save(bool force) { foreach (TextureMosaic tm in alphaMapMosaics) { tm.Save(force); } } private void InitializeAutoSplatRules(MosaicDescription desc) { if (AutoSplatRules != null) { return; } long minHeightMM; long maxHeightMM; if (desc == null) { // Set arbitrary min/max heights minHeightMM = -10000 * (long) TerrainManager.oneMeter; maxHeightMM = 10000*(long) TerrainManager.oneMeter; } else { minHeightMM = (long) (desc.GlobalMinHeightMeters * TerrainManager.oneMeter); maxHeightMM = (long) (desc.GlobalMaxHeightMeters * TerrainManager.oneMeter); } AutoSplatRules = new AutoSplatRules(minHeightMM, maxHeightMM, new AutoSplatConfig()); } private void InitTextureNames() { for (int i = 0; i < layerTextureNames.Length; i++) { layerTextureNames[i] = ""; } } public string GetLayerTextureAttributeName(int index) { return PRE_LAYER_NAME + (index + 1) + POST_LAYER_NAME; } public string GetAlphaMapAttributeName(int index) { return PRE_ALPHA_NAME + index + POST_ALPHA_NAME; } public void FromXml(XmlReader r) { for (int i = 0; i < r.AttributeCount; i++) { r.MoveToAttribute(i); if (r.Name.StartsWith(PRE_ALPHA_NAME) && r.Name.EndsWith(POST_ALPHA_NAME)) { string indexStr = r.Name.Substring(PRE_ALPHA_NAME.Length, r.Name.Length - PRE_ALPHA_NAME.Length - POST_ALPHA_NAME.Length); int index = int.Parse(indexStr); SetAlphaMapName(index, r.Value); } else if (r.Name.StartsWith(PRE_LAYER_NAME) && r.Name.EndsWith(POST_LAYER_NAME)) { string indexStr = r.Name.Substring(PRE_LAYER_NAME.Length, r.Name.Length - PRE_LAYER_NAME.Length - POST_LAYER_NAME.Length); int index = int.Parse(indexStr) - 1; SetLayerTextureName(index, r.Value); } else { switch (r.Name) { case "Type": break; case "UseParams": useParams = (r.Value == "True"); break; case "TextureTileSize": textureTileSize = float.Parse(r.Value); break; case "DetailTextureName": detailTextureName = r.Value; break; } } } r.MoveToElement(); Modified = false; } private void Mosaic_OnMosaicModificationStateChanged(Mosaic mosaic, bool state) { FireTerrainSplatModificationStateChanged(); } private void Mosaic_OnMosaicChanged(Mosaic mosaic, MosaicTile tile, int worldXMeters, int worldZMeters, int sizeXMeters, int sizeZMeters) { FireTerrainSplatChanged(tile, worldXMeters, worldZMeters, sizeXMeters, sizeZMeters); } private void FireTerrainSplatModificationStateChanged() { if (m_changeNotificationEnabled && TerrainSplatModificationStateChanged != null) { TerrainSplatModificationStateChanged(this, Modified); } } private void FireTerrainSplatChanged() { FireTerrainSplatChanged(null, 0, 0, 0, 0); } private void FireTerrainSplatChanged(int worldXMeters, int worldZMeters, int sizeXMeters, int sizeZMeters) { FireTerrainSplatChanged(null, worldXMeters, worldZMeters, sizeXMeters, sizeZMeters); } private void FireTerrainSplatChanged(MosaicTile tile, int worldXMeters, int worldZMeters, int sizeXMeters, int sizeZMeters) { if (m_changeNotificationEnabled && TerrainSplatChanged != null) { TextureMosaicTile textile = tile as TextureMosaicTile; if (textile == null) { // Refresh all textures within the mosaic // that have dirty texture images RefreshTextures(); } else { // Only refresh the given tile textile.RefreshTexture(); } TerrainSplatChanged(this, tile, worldXMeters, worldZMeters, sizeXMeters, sizeZMeters); } } public bool Modified { get { if (m_locallyModified) { return true; } foreach (TextureMosaic alphaMosaic in alphaMapMosaics) { if (alphaMosaic != null && alphaMosaic.Modified) { return true; } } return false; } private set { if (m_locallyModified == value) { FireTerrainSplatChanged(); return; } bool oldAggregateState = Modified; m_locallyModified = value; if (oldAggregateState != m_locallyModified) { FireTerrainSplatModificationStateChanged(); } FireTerrainSplatChanged(); } } public bool UseParams { get { return useParams; } set { if (useParams != value) { useParams = value; Modified = true; } } } public float TextureTileSize { get { return textureTileSize; } set { if (textureTileSize != value) { textureTileSize = value; Modified = true; } } } public string GetAlphaMapName(int index) { return alphaMapMosaicNames[index]; } public void SetAlphaMapName(int index, string alphaMapName) { if (string.IsNullOrEmpty(alphaMapName)) { alphaMapName = null; } if (alphaMapMosaicNames[index] == alphaMapName) { return; } if (alphaMapMosaics[index] != null) { alphaMapMosaics[index].MosaicModificationStateChanged -= Mosaic_OnMosaicModificationStateChanged; alphaMapMosaics[index].MosaicChanged -= Mosaic_OnMosaicChanged; } alphaMapMosaicNames[index] = alphaMapName; alphaMapMosaics[index] = alphaMapName == null ? null : new TextureMosaic(alphaMapName, 0); if (alphaMapMosaics[index] != null) { alphaMapMosaics[index].MosaicModificationStateChanged += Mosaic_OnMosaicModificationStateChanged; alphaMapMosaics[index].MosaicChanged += Mosaic_OnMosaicChanged; } Modified = true; } public TextureMosaic GetAlphaMap(int index) { return alphaMapMosaics[index]; } public string GetLayerTextureName(int index) { return layerTextureNames[index]; } public void SetLayerTextureName(int index, string layerTextureName) { if (string.IsNullOrEmpty(layerTextureName)) { layerTextureName = ""; } if (layerTextureNames[index] == layerTextureName) { return; } layerTextureNames[index] = layerTextureName; Modified = true; } public string DetailTextureName { get { return detailTextureName; } set { detailTextureName = value; Modified = true; } } #region ITerrainMaterialConfig Members public ITerrainMaterial NewTerrainMaterial(int pageX, int pageZ) { return new AlphaSplatTerrainMaterial(this, pageX, pageZ); } public void UpdateMaterial(Material material) { if (useParams) { // Note: If we change the number of alpha map mosaics, we may need to update the GPU // shader to support Pass pass = material.GetTechnique(0).GetPass(0); GpuProgramParameters vertexParams = pass.VertexProgramParameters; vertexParams.SetNamedConstant("textureTileSize", new Vector3(textureTileSize, 0, 0)); // set splatting textures int offset = alphaMapMosaicNames.Length; for (int i = 0; i < layerTextureNames.Length; i++) { pass.GetTextureUnitState(offset + i).SetTextureName(layerTextureNames[i]); } pass.GetTextureUnitState(10).SetTextureName(detailTextureName); } } #endregion /// <summary> /// Get the texture map for a point specified by world coordinates /// A 8-byte array is returned as the map. The map can have 8 possible /// mappings associated with it with one byte per map. Ultimately, the /// first 4 bytes correspond with Alpha Map 0 and the latter 4 bytes /// correspond with Alpha Map 1. /// </summary> /// <param name="worldXMeters"></param> /// <param name="worldZMeters"></param> /// <returns></returns> public byte[] GetWorldTextureMap(int worldXMeters, int worldZMeters) { byte[] textureMap = new byte[MAX_LAYER_TEXTURES]; for (int i=0; i < alphaMapMosaics.Length; i++) { TextureMosaic alphaMapMosaic = alphaMapMosaics[i]; byte[] mosaicAlphaMap = alphaMapMosaic.GetWorldTextureMap(worldXMeters, worldZMeters); Array.Copy(mosaicAlphaMap, 0, textureMap, i * 4, 4); } return textureMap; } /// <summary> /// Set the texture map for a point specified by world coordinates. /// The 8-byte array is map. The map can have 8 possible /// mappings associated with it with one byte per map. Ultimately, the /// first 4 bytes correspond with Alpha Map 0 and the latter 4 bytes /// correspond with Alpha Map 1. /// </summary> /// <param name="worldXMeters"></param> /// <param name="worldZMeters"></param> /// <param name="textureMap"></param> /// <returns></returns> public void SetWorldTextureMap(int worldXMeters, int worldZMeters, byte[] textureMap) { for (int i = 0; i < alphaMapMosaics.Length; i++) { byte[] mosaicAlphaMap = new byte[4]; Array.Copy(textureMap, i * 4, mosaicAlphaMap, 0, 4); TextureMosaic alphaMapMosaic = alphaMapMosaics[i]; alphaMapMosaic.SetWorldTextureMap(worldXMeters, worldZMeters, mosaicAlphaMap); } } /// <summary> /// Get the texture map for a point specified by world coordinates. /// The 8-float array is map. The map can have 8 possible /// mappings associated with it with one float per map. The floats /// are normalized between the values of 0 and 1 inclusive. /// Ultimately, the first 4 floats correspond with Alpha Map 0 and /// the latter 4 floats correspond with Alpha Map 1. /// /// Each normalized float gets converted into a byte, so the range /// of precision is fairly narrow (1/255th or around 0.0039). So, /// calling set followed by get may show a slight variance due to /// the precision. /// </summary> /// <param name="worldXMeters"></param> /// <param name="worldZMeters"></param> /// <returns></returns> public float[] GetWorldTextureMapNormalized(int worldXMeters, int worldZMeters) { byte[] textureMap = GetWorldTextureMap(worldXMeters, worldZMeters); float[] textureMapNormalized = new float[textureMap.Length]; for (int i=0; i < textureMapNormalized.Length; i++) { textureMapNormalized[i] = textureMap[i]/255f; } return textureMapNormalized; } /// <summary> /// Set the texture map for a point specified by world coordinates. /// The 8-float array is map. The map can have 8 possible /// mappings associated with it with one float per map. The floats /// should be normalized between the values of 0 and 1 inclusive. /// Ultimately, the first 4 floats correspond with Alpha Map 0 and /// the latter 4 floats correspond with Alpha Map 1. /// /// Each normalized float gets converted into a byte, so the range /// of precision is fairly narrow (1/255th or around 0.0039). So, /// calling set followed by get may show a slight variance due to /// the precision. /// </summary> /// <param name="worldXMeters"></param> /// <param name="worldZMeters"></param> /// <param name="textureMapNormalized"></param> /// <returns></returns> public void SetWorldTextureMapNormalized(int worldXMeters, int worldZMeters, float[] textureMapNormalized) { byte[] textureMap = new byte[textureMapNormalized.Length]; for (int i = 0; i < textureMap.Length; i++) { textureMap[i] = (byte) Math.Round(255f * textureMap[i]); } SetWorldTextureMap(worldXMeters, worldZMeters, textureMap); } public void SuspendChangeNotifications() { m_wasModifiedBeforeSuspend = Modified; m_wasChangedDuringSuspend = false; m_changeNotificationEnabled = false; } public void ResumeChangeNotifications() { m_changeNotificationEnabled = true; bool shouldFire = Modified != m_wasModifiedBeforeSuspend; m_locallyModified = m_wasModifiedBeforeSuspend; if (shouldFire) { FireTerrainSplatModificationStateChanged(); } if (m_wasChangedDuringSuspend) { FireTerrainSplatChanged(); } } public void RefreshTextures() { foreach (TextureMosaic mosaic in alphaMapMosaics) { mosaic.RefreshTextures(); } } public float[,][] GetNormalizedSamples(int xWorldLocationMeters, int zWorldLocationMeters, int sizeXSamples, int sizeZSamples, int metersPerSample) { float[,][] normalizedSamples = new float[sizeXSamples, sizeZSamples][]; int worldZMeters = zWorldLocationMeters; for (int sampleZ = 0; sampleZ < sizeZSamples; sampleZ++) { int worldXMeters = xWorldLocationMeters; for (int sampleX = 0; sampleX < sizeXSamples; sampleX++) { byte[] textureMapping = GetWorldTextureMap(worldXMeters, worldZMeters); float[] normalizedMapping = ConvertMappingByteToNormalized(textureMapping); normalizedSamples[sampleX, sampleZ] = normalizedMapping; worldXMeters += metersPerSample; } worldZMeters += metersPerSample; } return normalizedSamples; } public void SetNormalizedSamples(int xWorldLocationMeters, int zWorldLocationMeters, int metersPerSample, float[,][] normalizedSamples) { int sizeXSamples = normalizedSamples.GetLength(0); int sizeZSamples = normalizedSamples.GetLength(1); // We use shorts instead of bytes because we need signed values float[,][] diffArray = new float[sizeXSamples, sizeZSamples][]; bool wasModified = Modified; try { m_changeNotificationEnabled = false; // Calculate the heights and set them { int worldZMeters = zWorldLocationMeters; for (int sampleZ = 0; sampleZ < sizeZSamples; sampleZ++) { int worldXMeters = xWorldLocationMeters; for (int sampleX = 0; sampleX < sizeXSamples; sampleX++) { float[] mappingNormalized = normalizedSamples[sampleX, sampleZ]; //todo: figure out whether we really want to do these checks here, //todo: as they may be a performance limiter // Make sure the normalized maps are within the proper bounds float total = 0; for (int i = 0; i < mappingNormalized.Length; i++) { if (mappingNormalized[i] < 0) { mappingNormalized[i] = 0f; } else if (mappingNormalized[i] > 1) { mappingNormalized[i] = 1f; } total += mappingNormalized[i]; } // Prevent oversaturation of textures if (total > 1f) { float reductionFactor = 1/total; for (int i=0; i < MAX_LAYER_TEXTURES; i++) { mappingNormalized[i] *= reductionFactor; } } float[] originalMappingNormalized = GetWorldTextureMapNormalized(worldXMeters, worldZMeters); float[] mappingDiff = CalculateMappingDiff(mappingNormalized, originalMappingNormalized); diffArray[sampleX, sampleZ] = mappingDiff; worldXMeters += metersPerSample; } worldZMeters += metersPerSample; } // DumpDiffMap("Diffs", diffArray); // DumpMapSaturation("Before apply", GetNormalizedSamples(xWorldLocationMeters, zWorldLocationMeters, sizeXSamples, sizeZSamples, metersPerSample)); AdjustWorldSamples(xWorldLocationMeters, zWorldLocationMeters, metersPerSample, diffArray); // DumpMapSaturation("After apply", GetNormalizedSamples(xWorldLocationMeters, zWorldLocationMeters, sizeXSamples, sizeZSamples, metersPerSample)); // float[,][] postSet = GetNormalizedSamples(xWorldLocationMeters, zWorldLocationMeters, sizeXSamples, sizeZSamples, 1); // CompareMaps("Set vs. Actual", normalizedSamples, postSet); } } finally { m_changeNotificationEnabled = true; if (Modified != wasModified) { FireTerrainSplatModificationStateChanged(); } FireTerrainSplatChanged(xWorldLocationMeters, zWorldLocationMeters, sizeXSamples * metersPerSample, sizeZSamples * metersPerSample); } } private static void CompareMaps(string title, float[,][] map1, float[,][] map2) { StringBuilder builder = new StringBuilder(); builder.AppendLine(title); for (int x = 0; x < map1.GetLength(0); x++) { for (int z = 0; z < map1.GetLength(1); z++) { CompareMappingInternal(builder, map1[x, z], map2[x, z]); } builder.AppendLine(); } Console.WriteLine(builder); } private static void CompareMapping(string title, float[] map1, float[] map2) { StringBuilder builder = new StringBuilder(); builder.AppendLine(title); CompareMappingInternal(builder, map1, map2); Console.WriteLine(builder); } private static void CompareMappingInternal(StringBuilder builder, float[] map1, float[] map2) { for (int i = 0; i < map1.Length; i++) { if (map1[i] == map2[i]) { builder.Append("="); } else if (Math.Abs(map1[i] - map2[i]) < 0.01) { builder.Append("~"); // Close enough } else if (map1[i] < map2[i]) { builder.Append("<"); } else { builder.Append(">"); } } builder.Append(" "); } private float[] CalculateMappingDiff(float[] newMapping, float[] oldMapping) { float[] diff = new float[oldMapping.Length]; for (int i=0; i < diff.Length; i++) { diff[i] = newMapping[i] - oldMapping[i]; } return diff; } private float[] ConvertMappingByteToNormalized(byte[] byteMapping) { int length = byteMapping.Length; float[] normalizedMapping = new float[length]; for (int i=0; i < length; i++) { normalizedMapping[i] = byteMapping[i]/255f; } return normalizedMapping; } private byte[][] ConvertUpdateNormalizedToByte(float[][] normalizedMapping) { byte[][] byteMapping = new byte[normalizedMapping.Length][]; byte biggestValue = 0; int biggestValueOuterIndex = 0; int biggestValueInnerIndex = 0; int saturation = 0; for (int outer=0; outer < normalizedMapping.Length; outer++) { byteMapping[outer] = new byte[normalizedMapping[outer].Length]; for (int inner = 0; inner < normalizedMapping[outer].Length; inner++) { if (normalizedMapping[outer][inner] > 1) { byteMapping[outer][inner] = 255; } else if (normalizedMapping[outer][inner] < 0) { byteMapping[outer][inner] = 0; } else { byteMapping[outer][inner] = (byte) (normalizedMapping[outer][inner]*255f); } if (byteMapping[outer][inner] > biggestValue) { biggestValue = byteMapping[outer][inner]; biggestValueOuterIndex = outer; biggestValueInnerIndex = inner; } saturation += byteMapping[outer][inner]; } } // If the saturation isn't perfect, nudge the biggest value // up until it is. This helps account for round off error // during the conversion. Values that are off by just 1 have // noticable saturation difference. if (saturation != 0 && saturation < 255) { byteMapping[biggestValueOuterIndex][biggestValueInnerIndex] += (byte) (255 - saturation); } return byteMapping; } private void AdjustWorldSamples(int worldXMeters, int worldZMeters, int metersPerSample, float[,][] textureMapDiffs) { int sampleX; int sampleZ; float sampleXfrac; float sampleZfrac; alphaMapMosaics[0].WorldToSampleCoords(worldXMeters, worldZMeters, out sampleX, out sampleZ, out sampleXfrac, out sampleZfrac); int alphaMapMps = alphaMapMosaics[0].MosaicDesc.MetersPerSample; if ((sampleXfrac == 0) && (sampleZfrac == 0) && (metersPerSample == alphaMapMps)) { for (int z = 0; z < textureMapDiffs.GetLength(1); z++) { for (int x = 0; x < textureMapDiffs.GetLength(0); x++) { byte[][] existing = new byte[NUM_ALPHA_MAPS][]; float[][] updateNormalized = new float[NUM_ALPHA_MAPS][]; for (int i = 0; i < NUM_ALPHA_MAPS; i++) { existing[i] = alphaMapMosaics[i].GetSampleTextureMap(sampleX + x, sampleZ + z); updateNormalized[i] = ApplyDiffs(existing[i], textureMapDiffs[x, z], i * 4); } // Make sure the resulting change is normalized NormalizeUpdate(updateNormalized, existing); byte[][] updateBytes = ConvertUpdateNormalizedToByte(updateNormalized); for (int i = 0; i < NUM_ALPHA_MAPS; i++) { alphaMapMosaics[i].SetSampleTextureMap(sampleX + x, sampleZ + z, updateBytes[i]); } } } } else { int upperLeftSampleX = sampleX; int upperLeftSampleZ = sampleZ; alphaMapMosaics[0].WorldToSampleCoords( worldXMeters + textureMapDiffs.GetLength(0) * metersPerSample, worldZMeters + textureMapDiffs.GetLength(1) * metersPerSample, out sampleX, out sampleZ, out sampleXfrac, out sampleZfrac); int lowerRightSampleX = (sampleXfrac == 0) ? sampleX : sampleX + 1; int lowerRightSampleZ = (sampleZfrac == 0) ? sampleZ : sampleZ + 1; float[,][] diffArray = new float[lowerRightSampleX - upperLeftSampleX + 1, lowerRightSampleZ - upperLeftSampleZ + 1][]; float[,] appliedWeight = new float[lowerRightSampleX - upperLeftSampleX + 1, lowerRightSampleZ - upperLeftSampleZ + 1]; for (int z = 0; z < diffArray.GetLength(1); z++) { for (int x = 0; x < diffArray.GetLength(0); x++) { diffArray[x, z] = new float[MAX_LAYER_TEXTURES]; } } int currentWorldZMeters = worldZMeters; for (int z = 0; z < textureMapDiffs.GetLength(1); z++) { int currentWorldXMeters = worldXMeters; for (int x = 0; x < textureMapDiffs.GetLength(0); x++) { alphaMapMosaics[0].WorldToSampleCoords( currentWorldXMeters, currentWorldZMeters, out sampleX, out sampleZ, out sampleXfrac, out sampleZfrac); int xpos = sampleX - upperLeftSampleX; int zpos = sampleZ - upperLeftSampleZ; UpdateWeightedDiffs(diffArray[xpos, zpos], CalculateWeightedDiffs(textureMapDiffs[x, z], (1f - sampleXfrac) * (1f - sampleZfrac))); UpdateWeightedDiffs(diffArray[xpos + 1, zpos], CalculateWeightedDiffs(textureMapDiffs[x, z], (sampleXfrac) * (1f - sampleZfrac))); UpdateWeightedDiffs(diffArray[xpos, zpos + 1], CalculateWeightedDiffs(textureMapDiffs[x, z], (1f - sampleXfrac) * (sampleZfrac))); UpdateWeightedDiffs(diffArray[xpos + 1, zpos + 1], CalculateWeightedDiffs(textureMapDiffs[x, z], (sampleXfrac) * (sampleZfrac))); appliedWeight[xpos, zpos] += (1f - sampleXfrac) * (1f - sampleZfrac); appliedWeight[xpos + 1, zpos] += sampleXfrac * (1f - sampleZfrac); appliedWeight[xpos, zpos + 1] += (1f - sampleXfrac) * sampleZfrac; appliedWeight[xpos + 1, zpos + 1] += sampleXfrac * sampleZfrac; currentWorldXMeters += metersPerSample; } currentWorldZMeters += metersPerSample; } float[,][] averagedDiffArray = new float[lowerRightSampleX - upperLeftSampleX + 1, lowerRightSampleZ - upperLeftSampleZ + 1][]; for (int z = 0; z < diffArray.GetLength(1); z++) { for (int x = 0; x < diffArray.GetLength(0); x++) { averagedDiffArray[x,z] = new float[MAX_LAYER_TEXTURES]; for (int i = 0; i < averagedDiffArray[x,z].Length; i++) { if (appliedWeight[x, z] == 0) { averagedDiffArray[x, z][i] = 0; } else { averagedDiffArray[x, z][i] = diffArray[x, z][i]/appliedWeight[x, z]; } } } } for (int z = 0; z < averagedDiffArray.GetLength(1); z++) { for (int x = 0; x < averagedDiffArray.GetLength(0); x++) { byte[][] existing = new byte[NUM_ALPHA_MAPS][]; float[][] updateNormalized = new float[NUM_ALPHA_MAPS][]; for (int i = 0; i < NUM_ALPHA_MAPS; i++) { existing[i] = alphaMapMosaics[i].GetSampleTextureMap(upperLeftSampleX + x, upperLeftSampleZ + z); updateNormalized[i] = ApplyDiffs(existing[i], averagedDiffArray[x, z], i * 4); } // Make sure the resulting change is normalized NormalizeUpdate(updateNormalized, existing); byte[][] updateBytes = ConvertUpdateNormalizedToByte(updateNormalized); for (int i = 0; i < NUM_ALPHA_MAPS; i++) { alphaMapMosaics[i].SetSampleTextureMap(upperLeftSampleX + x, upperLeftSampleZ + z, updateBytes[i]); } } } } } private float[] ApplyDiffs(byte[] source, float[] diffs, int diffItemOffset) { float[] update = new float[4]; for (int i = 0; i < 4; i++) { float diff = diffs[diffItemOffset + i]; float normalizedSource = source[i]/255f; float value = normalizedSource + diff; if (value < 0) { value = 0; } if (value > 1) { value = 1; } update[i] = value; } return update; } private float[] CalculateWeightedDiffs(float[] diffs, float weight) { float[] weightedDiffs = new float[MAX_LAYER_TEXTURES]; for (int i = 0; i < MAX_LAYER_TEXTURES; i++) { weightedDiffs[i] = diffs[i] * weight; } return weightedDiffs; } private void UpdateWeightedDiffs(float[] diffs1, float[] diffs2) { for (int i = 0; i < diffs1.Length; i++) { diffs1[i] += diffs2[i]; } } private void NormalizeUpdate(float[][] updateNormalized, byte[][] existingBytes) { float saturation = 0; for (int i = 0; i < updateNormalized.Length; i++) { for (int j = 0; j < updateNormalized[i].Length; j++) { float value = updateNormalized[i][j]; saturation += value; } } // If the saturation is 1, then no need to adjust. if (saturation == 1) { return; } // If the saturation is 0, then we're probably in // an unitialized state. Don't perform the update, // but make sure the existing state doesn't change if (saturation == 0) { for (int i = 0; i < updateNormalized.Length; i++) { for (int j = 0; j < updateNormalized[i].Length; j++) { updateNormalized[i][j] = existingBytes[i][j] * 255; } } return; } // Normalize the update for (int i = 0; i < updateNormalized.Length; i++) { for (int j = 0; j < updateNormalized[i].Length; j++) { updateNormalized[i][j] = updateNormalized[i][j] / saturation; } } } public static void DumpDiffMap(string title, short[,][] map) { StringBuilder builder = new StringBuilder(); builder.AppendLine(title); for (int x = 0; x < map.GetLength(0); x++) { for (int z = 0; z < map.GetLength(1); z++) { AppendMap(builder, map[x, z]); builder.Append(" "); } builder.AppendLine(); } Console.WriteLine(builder); } private static void AppendMap(StringBuilder builder, short[] map) { builder.Append("["); for (int i = 0; i < map.Length; i++) { builder.Append(map[i].ToString("####")); builder.Append(" "); } builder.Append("] "); } public static void DumpMapSaturation(string title, float[,][] map) { StringBuilder builder = new StringBuilder(); builder.AppendLine(title); bool perfectSaturation = true; for (int x = 0; x < map.GetLength(0); x++) { for (int z = 0; z < map.GetLength(1); z++) { float saturation = 0; for (int i = 0; i < map[x, z].Length; i++) { saturation += map[x, z][i]; } builder.Append(saturation.ToString("F3")); builder.Append(" "); if (saturation != 1) { perfectSaturation = false; } } builder.AppendLine(); } if (perfectSaturation) { Console.WriteLine(title + ": Perfect saturation"); } else { Console.WriteLine(builder); } } } }
/* * CCControlSwitch.h * * Copyright 2012 Yannick Loriot. All rights reserved. * http://yannickloriot.com * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ using System.Diagnostics; using Microsoft.Xna.Framework.Graphics; namespace Cocos2D { public delegate void CCSwitchValueChangedDelegate(object sender, bool bState); /** @class CCControlSwitch Switch control for Cocos2D. */ public class CCControlSwitch : CCControl { /** Initializes a switch with a mask sprite, on/off sprites for on/off states and a thumb sprite. */ protected bool _moved; /** A Boolean value that determines the off/on state of the switch. */ protected bool _on; protected float _initialTouchXPosition; protected CCControlSwitchSprite _switchSprite; public event CCSwitchValueChangedDelegate OnValueChanged; public override bool Enabled { get { return base.Enabled; } set { _enabled = value; if (_switchSprite != null) { _switchSprite.Opacity = (byte) (value ? 255 : 128); } } } protected virtual bool InitWithMaskSprite(CCSprite maskSprite, CCSprite onSprite, CCSprite offSprite, CCSprite thumbSprite) { return InitWithMaskSprite(maskSprite, onSprite, offSprite, thumbSprite, null, null); } /** Creates a switch with a mask sprite, on/off sprites for on/off states and a thumb sprite. */ public CCControlSwitch(CCSprite maskSprite, CCSprite onSprite, CCSprite offSprite, CCSprite thumbSprite) { InitWithMaskSprite(maskSprite, onSprite, offSprite, thumbSprite, null, null); } /** Initializes a switch with a mask sprite, on/off sprites for on/off states, a thumb sprite and an on/off labels. */ protected virtual bool InitWithMaskSprite(CCSprite maskSprite, CCSprite onSprite, CCSprite offSprite, CCSprite thumbSprite, CCLabelTTF onLabel, CCLabelTTF offLabel) { if (base.Init()) { Debug.Assert(maskSprite != null, "Mask must not be nil."); Debug.Assert(onSprite != null, "onSprite must not be nil."); Debug.Assert(offSprite != null, "offSprite must not be nil."); Debug.Assert(thumbSprite != null, "thumbSprite must not be nil."); TouchEnabled = true; _on = true; _switchSprite = new CCControlSwitchSprite(); _switchSprite.InitWithMaskSprite(maskSprite, onSprite, offSprite, thumbSprite, onLabel, offLabel); _switchSprite.Position = new CCPoint(_switchSprite.ContentSize.Width / 2, _switchSprite.ContentSize.Height / 2); AddChild(_switchSprite); IgnoreAnchorPointForPosition = false; AnchorPoint = new CCPoint(0.5f, 0.5f); ContentSize = _switchSprite.ContentSize; return true; } return false; } /** Creates a switch with a mask sprite, on/off sprites for on/off states, a thumb sprite and an on/off labels. */ public CCControlSwitch(CCSprite maskSprite, CCSprite onSprite, CCSprite offSprite, CCSprite thumbSprite, CCLabelTTF onLabel, CCLabelTTF offLabel) { InitWithMaskSprite(maskSprite, onSprite, offSprite, thumbSprite, onLabel, offLabel); } /** * Set the state of the switch to On or Off, optionally animating the transition. * * @param isOn YES if the switch should be turned to the On position; NO if it * should be turned to the Off position. If the switch is already in the * designated position, nothing happens. * @param animated YES to animate the "flipping" of the switch; otherwise NO. */ public void SetOn(bool isOn) { SetOn(isOn, false); } public void SetOn(bool isOn, bool animated) { bool bNotify = false; if (_on != isOn) { _on = isOn; bNotify = true; } _switchSprite.RunAction( new CCActionTween ( 0.2f, "sliderXPosition", _switchSprite.SliderXPosition, (_on) ? _switchSprite.OnPosition : _switchSprite.OffPosition ) ); if (bNotify) { SendActionsForControlEvents(CCControlEvent.ValueChanged); if (OnValueChanged != null) { OnValueChanged(this, _on); } } } public bool IsOn() { return _on; } public bool HasMoved() { return _moved; } public CCPoint LocationFromTouch(CCTouch touch) { CCPoint touchLocation = touch.Location; // Get the touch position touchLocation = ConvertToNodeSpace(touchLocation); // Convert to the node space of this class return touchLocation; } //events public override bool TouchBegan(CCTouch pTouch) { if (!IsTouchInside(pTouch) || !Enabled) { return false; } _moved = false; CCPoint location = LocationFromTouch(pTouch); _initialTouchXPosition = location.X - _switchSprite.SliderXPosition; _switchSprite.ThumbSprite.Color = new CCColor3B(166, 166, 166); _switchSprite.NeedsLayout(); return true; } public override void TouchMoved(CCTouch pTouch) { CCPoint location = LocationFromTouch(pTouch); location = new CCPoint(location.X - _initialTouchXPosition, 0); _moved = true; _switchSprite.SliderXPosition = location.X; } public override void TouchEnded(CCTouch pTouch) { CCPoint location = LocationFromTouch(pTouch); _switchSprite.ThumbSprite.Color = new CCColor3B(255, 255, 255); if (HasMoved()) { SetOn(!(location.X < _switchSprite.ContentSize.Width / 2), true); } else { SetOn(!_on, true); } } public override void TouchCancelled(CCTouch pTouch) { CCPoint location = LocationFromTouch(pTouch); _switchSprite.ThumbSprite.Color = new CCColor3B(255, 255, 255); if (HasMoved()) { SetOn(!(location.X < _switchSprite.ContentSize.Width / 2), true); } else { SetOn(!_on, true); } } /** Sprite which represents the view. */ } public class CCControlSwitchSprite : CCSprite, ICCActionTweenDelegate { private CCSprite _thumbSprite; private float _offPosition; private float _onPosition; private float _sliderXPosition; private CCSprite _maskSprite; private CCTexture2D _maskTexture; private CCLabelTTF _offLabel; private CCSprite _offSprite; private CCLabelTTF _onLabel; private CCSprite _onSprite; public CCControlSwitchSprite() { _sliderXPosition = 0.0f; _onPosition = 0.0f; _offPosition = 0.0f; _maskTexture = null; TextureLocation = 0; MaskLocation = 0; _onSprite = null; _offSprite = null; _thumbSprite = null; _onLabel = null; _offLabel = null; } public float OnPosition { get { return _onPosition; } set { _onPosition = value; } } public float OffPosition { get { return _offPosition; } set { _offPosition = value; } } public CCTexture2D MaskTexture { get { return _maskTexture; } set { _maskTexture = value; } } public uint TextureLocation { get; set; } public uint MaskLocation { get; set; } public CCSprite OnSprite { get { return _onSprite; } set { _onSprite = value; } } public CCSprite OffSprite { get { return _offSprite; } set { _offSprite = value; } } public CCSprite ThumbSprite { get { return _thumbSprite; } set { _thumbSprite = value; } } public CCLabelTTF OnLabel { get { return _onLabel; } set { _onLabel = value; } } public CCLabelTTF OffLabel { get { return _offLabel; } set { _offLabel = value; } } public float SliderXPosition { get { return _sliderXPosition; } set { if (value <= _offPosition) { // Off value = _offPosition; } else if (value >= _onPosition) { // On value = _onPosition; } _sliderXPosition = value; NeedsLayout(); } } public float OnSideWidth { get { return _onSprite.ContentSize.Width; } } public float OffSideWidth { get { return _offSprite.ContentSize.Height; } } #region CCActionTweenDelegate Members public virtual void UpdateTweenAction(float value, string key) { //CCLog.Log("key = {0}, value = {1}", key, value); SliderXPosition = value; } #endregion public bool InitWithMaskSprite(CCSprite maskSprite, CCSprite onSprite, CCSprite offSprite, CCSprite thumbSprite, CCLabelTTF onLabel, CCLabelTTF offLabel) { if (base.InitWithTexture(maskSprite.Texture)) { // Sets the default values _onPosition = 0; _offPosition = -onSprite.ContentSize.Width + thumbSprite.ContentSize.Width / 2; _sliderXPosition = _onPosition; OnSprite = onSprite; OffSprite = offSprite; ThumbSprite = thumbSprite; OnLabel = onLabel; OffLabel = offLabel; AddChild(_thumbSprite); // Set up the mask with the Mask shader MaskTexture = maskSprite.Texture; /* CCGLProgram* pProgram = new CCGLProgram(); pProgram->initWithVertexShaderByteArray(ccPositionTextureColor_vert, ccExSwitchMask_frag); setShaderProgram(pProgram); pProgram->release(); CHECK_GL_ERROR_DEBUG(); getShaderProgram()->addAttribute(kCCAttributeNamePosition, kCCVertexAttrib_Position); getShaderProgram()->addAttribute(kCCAttributeNameColor, kCCVertexAttrib_Color); getShaderProgram()->addAttribute(kCCAttributeNameTexCoord, kCCVertexAttrib_TexCoords); CHECK_GL_ERROR_DEBUG(); getShaderProgram()->link(); CHECK_GL_ERROR_DEBUG(); getShaderProgram()->updateUniforms(); CHECK_GL_ERROR_DEBUG(); m_uTextureLocation = glGetUniformLocation( getShaderProgram()->getProgram(), "u_texture"); m_uMaskLocation = glGetUniformLocation( getShaderProgram()->getProgram(), "u_mask"); CHECK_GL_ERROR_DEBUG(); */ ContentSize = _maskTexture.ContentSize; NeedsLayout(); return true; } return false; } public override void Draw() { CCDrawManager.BlendFunc(CCBlendFunc.AlphaBlend); CCDrawManager.BindTexture(Texture); CCDrawManager.DrawQuad(ref m_sQuad); // /* // CC_NODE_DRAW_SETUP(); // ccGLEnableVertexAttribs(kCCVertexAttribFlag_PosColorTex); // ccGLBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); // getShaderProgram()->setUniformForModelViewProjectionMatrix(); // glActiveTexture(GL_TEXTURE0); // glBindTexture( GL_TEXTURE_2D, getTexture()->getName()); // glUniform1i(m_uTextureLocation, 0); // glActiveTexture(GL_TEXTURE1); // glBindTexture( GL_TEXTURE_2D, m_pMaskTexture->getName() ); // glUniform1i(m_uMaskLocation, 1); //#define kQuadSize sizeof(m_sQuad.bl) // long offset = (long)&m_sQuad; // // vertex // int diff = offsetof( ccV3F_C4B_T2F, vertices); // glVertexAttribPointer(kCCVertexAttrib_Position, 3, GL_FLOAT, GL_FALSE, kQuadSize, (void*) (offset + diff)); // // texCoods // diff = offsetof( ccV3F_C4B_T2F, texCoords); // glVertexAttribPointer(kCCVertexAttrib_TexCoords, 2, GL_FLOAT, GL_FALSE, kQuadSize, (void*)(offset + diff)); // // color // diff = offsetof( ccV3F_C4B_T2F, colors); // glVertexAttribPointer(kCCVertexAttrib_Color, 4, GL_UNSIGNED_BYTE, GL_TRUE, kQuadSize, (void*)(offset + diff)); // glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); // glActiveTexture(GL_TEXTURE0); // */ } public void NeedsLayout() { _onSprite.Position = new CCPoint(_onSprite.ContentSize.Width / 2 + _sliderXPosition, _onSprite.ContentSize.Height / 2); _offSprite.Position = new CCPoint(_onSprite.ContentSize.Width + _offSprite.ContentSize.Width / 2 + _sliderXPosition, _offSprite.ContentSize.Height / 2); _thumbSprite.Position = new CCPoint(_onSprite.ContentSize.Width + _sliderXPosition, _maskTexture.ContentSize.Height / 2); if (_onLabel != null) { _onLabel.Position = new CCPoint(_onSprite.Position.X - _thumbSprite.ContentSize.Width / 6, _onSprite.ContentSize.Height / 2); } if (_offLabel != null) { _offLabel.Position = new CCPoint(_offSprite.Position.X + _thumbSprite.ContentSize.Width / 6, _offSprite.ContentSize.Height / 2); } CCRenderTexture rt = new CCRenderTexture((int) _maskTexture.ContentSize.Width, (int) _maskTexture.ContentSize.Height, SurfaceFormat.Color, DepthFormat.None, RenderTargetUsage.DiscardContents); rt.BeginWithClear(0, 0, 0, 0); _onSprite.Visit(); _offSprite.Visit(); if (_onLabel != null) { _onLabel.Visit(); } if (_offLabel != null) { _offLabel.Visit(); } if (_maskSprite == null) { _maskSprite = new CCSprite(_maskTexture); _maskSprite.AnchorPoint = new CCPoint(0, 0); _maskSprite.BlendFunc = new CCBlendFunc(CCOGLES.GL_ZERO, CCOGLES.GL_SRC_ALPHA); } else { _maskSprite.Texture = _maskTexture; } _maskSprite.Visit(); rt.End(); Texture = rt.Sprite.Texture; //IsFlipY = true; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Reflection; using System.Diagnostics; using System.Globalization; using System.Collections.Generic; using System.Runtime.Serialization; using System.Reflection.Runtime.General; using System.Reflection.Runtime.TypeInfos; using System.Reflection.Runtime.ParameterInfos; using Internal.Reflection.Core.Execution; using Internal.Reflection.Tracing; namespace System.Reflection.Runtime.MethodInfos { // // The runtime's implementation of ConstructorInfo. // [Serializable] internal abstract partial class RuntimeConstructorInfo : ConstructorInfo, ISerializable { public abstract override MethodAttributes Attributes { get; } public abstract override CallingConventions CallingConvention { get; } public sealed override bool ContainsGenericParameters { get { return DeclaringType.ContainsGenericParameters; } } public abstract override IEnumerable<CustomAttributeData> CustomAttributes { get; } public abstract override Type DeclaringType { get; } public sealed override Type[] GetGenericArguments() { // Constructors cannot be generic. Desktop compat dictates that We throw NotSupported rather than returning a 0-length array. throw new NotSupportedException(); } public sealed override MethodBody GetMethodBody() { throw new PlatformNotSupportedException(); } public void GetObjectData(SerializationInfo info, StreamingContext context) { if (info == null) throw new ArgumentNullException(nameof(info)); MemberInfoSerializationHolder.GetSerializationInfo(info, this); } public sealed override ParameterInfo[] GetParameters() { #if ENABLE_REFLECTION_TRACE if (ReflectionTrace.Enabled) ReflectionTrace.MethodBase_GetParameters(this); #endif RuntimeParameterInfo[] parameters = RuntimeParameters; if (parameters.Length == 0) return Array.Empty<ParameterInfo>(); ParameterInfo[] result = new ParameterInfo[parameters.Length]; for (int i = 0; i < result.Length; i++) result[i] = parameters[i]; return result; } public sealed override ParameterInfo[] GetParametersNoCopy() { return RuntimeParameters; } public abstract override object Invoke(BindingFlags invokeAttr, Binder binder, object[] parameters, CultureInfo culture); [DebuggerGuidedStepThrough] public sealed override object Invoke(object obj, BindingFlags invokeAttr, Binder binder, object[] parameters, CultureInfo culture) { #if ENABLE_REFLECTION_TRACE if (ReflectionTrace.Enabled) ReflectionTrace.MethodBase_Invoke(this, obj, parameters); #endif if (parameters == null) parameters = Array.Empty<Object>(); MethodInvoker methodInvoker; try { methodInvoker = this.MethodInvoker; } catch (Exception) { // // Project N compat note: On the desktop, ConstructorInfo.Invoke(Object[]) specifically forbids invoking static constructors (and // for us, that check is embedded inside the MethodInvoker property call.) Howver, MethodBase.Invoke(Object, Object[]) allows it. This was // probably an oversight on the desktop. We choose not to support this loophole on Project N for the following reasons: // // 1. The Project N toolchain aggressively replaces static constructors with static initialization data whenever possible. // So the static constructor may no longer exist. // // 2. Invoking the static constructor through Reflection is not very useful as it invokes the static constructor whether or not // it was already run. Since static constructors are specifically one-shot deals, this will almost certainly mess up the // type's internal assumptions. // if (this.IsStatic) throw new PlatformNotSupportedException(SR.Acc_NotClassInit); throw; } object result = methodInvoker.Invoke(obj, parameters, binder, invokeAttr, culture); System.Diagnostics.DebugAnnotations.PreviousCallContainsDebuggerStepInCode(); return result; } public abstract override MethodBase MetadataDefinitionMethod { get; } public abstract override int MetadataToken { get; } public sealed override Module Module { get { return DeclaringType.Module; } } public sealed override bool IsConstructedGenericMethod { get { return false; } } public sealed override bool IsGenericMethod { get { return false; } } public sealed override bool IsGenericMethodDefinition { get { return false; } } public abstract override MethodImplAttributes MethodImplementationFlags { get; } public abstract override String Name { get; } public abstract override bool Equals(Object obj); public abstract override int GetHashCode(); public sealed override Type ReflectedType { get { // Constructors are always looked up as if BindingFlags.DeclaredOnly were specified. Thus, the ReflectedType will always be the DeclaringType. return DeclaringType; } } public abstract override String ToString(); public abstract override RuntimeMethodHandle MethodHandle { get; } protected MethodInvoker MethodInvoker { get { if (_lazyMethodInvoker == null) { _lazyMethodInvoker = UncachedMethodInvoker; } return _lazyMethodInvoker; } } internal IntPtr LdFtnResult => MethodInvoker.LdFtnResult; protected abstract RuntimeParameterInfo[] RuntimeParameters { get; } protected abstract MethodInvoker UncachedMethodInvoker { get; } private volatile MethodInvoker _lazyMethodInvoker = null; } }
using System; using System.Text; using System.Data; using System.Data.SqlClient; using System.Data.Common; using System.Collections; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Xml; using System.Xml.Serialization; using SubSonic; using SubSonic.Utilities; namespace DalSic{ /// <summary> /// Strongly-typed collection for the VwLabScreeningHojaTrabajo class. /// </summary> [Serializable] public partial class VwLabScreeningHojaTrabajoCollection : ReadOnlyList<VwLabScreeningHojaTrabajo, VwLabScreeningHojaTrabajoCollection> { public VwLabScreeningHojaTrabajoCollection() {} } /// <summary> /// This is Read-only wrapper class for the vw_LAB_ScreeningHojaTrabajo view. /// </summary> [Serializable] public partial class VwLabScreeningHojaTrabajo : ReadOnlyRecord<VwLabScreeningHojaTrabajo>, IReadOnlyRecord { #region Default Settings protected static void SetSQLProps() { GetTableSchema(); } #endregion #region Schema Accessor public static TableSchema.Table Schema { get { if (BaseSchema == null) { SetSQLProps(); } return BaseSchema; } } private static void GetTableSchema() { if(!IsSchemaInitialized) { //Schema declaration TableSchema.Table schema = new TableSchema.Table("vw_LAB_ScreeningHojaTrabajo", TableType.View, DataService.GetInstance("sicProvider")); schema.Columns = new TableSchema.TableColumnCollection(); schema.SchemaName = @"dbo"; //columns TableSchema.TableColumn colvarNumero = new TableSchema.TableColumn(schema); colvarNumero.ColumnName = "numero"; colvarNumero.DataType = DbType.Int32; colvarNumero.MaxLength = 0; colvarNumero.AutoIncrement = false; colvarNumero.IsNullable = false; colvarNumero.IsPrimaryKey = false; colvarNumero.IsForeignKey = false; colvarNumero.IsReadOnly = false; schema.Columns.Add(colvarNumero); TableSchema.TableColumn colvarIditem = new TableSchema.TableColumn(schema); colvarIditem.ColumnName = "iditem"; colvarIditem.DataType = DbType.Int32; colvarIditem.MaxLength = 0; colvarIditem.AutoIncrement = false; colvarIditem.IsNullable = false; colvarIditem.IsPrimaryKey = false; colvarIditem.IsForeignKey = false; colvarIditem.IsReadOnly = false; schema.Columns.Add(colvarIditem); TableSchema.TableColumn colvarItem = new TableSchema.TableColumn(schema); colvarItem.ColumnName = "item"; colvarItem.DataType = DbType.String; colvarItem.MaxLength = 50; colvarItem.AutoIncrement = false; colvarItem.IsNullable = false; colvarItem.IsPrimaryKey = false; colvarItem.IsForeignKey = false; colvarItem.IsReadOnly = false; schema.Columns.Add(colvarItem); TableSchema.TableColumn colvarResultado = new TableSchema.TableColumn(schema); colvarResultado.ColumnName = "resultado"; colvarResultado.DataType = DbType.String; colvarResultado.MaxLength = 500; colvarResultado.AutoIncrement = false; colvarResultado.IsNullable = false; colvarResultado.IsPrimaryKey = false; colvarResultado.IsForeignKey = false; colvarResultado.IsReadOnly = false; schema.Columns.Add(colvarResultado); BaseSchema = schema; //add this schema to the provider //so we can query it later DataService.Providers["sicProvider"].AddSchema("vw_LAB_ScreeningHojaTrabajo",schema); } } #endregion #region Query Accessor public static Query CreateQuery() { return new Query(Schema); } #endregion #region .ctors public VwLabScreeningHojaTrabajo() { SetSQLProps(); SetDefaults(); MarkNew(); } public VwLabScreeningHojaTrabajo(bool useDatabaseDefaults) { SetSQLProps(); if(useDatabaseDefaults) { ForceDefaults(); } MarkNew(); } public VwLabScreeningHojaTrabajo(object keyID) { SetSQLProps(); LoadByKey(keyID); } public VwLabScreeningHojaTrabajo(string columnName, object columnValue) { SetSQLProps(); LoadByParam(columnName,columnValue); } #endregion #region Props [XmlAttribute("Numero")] [Bindable(true)] public int Numero { get { return GetColumnValue<int>("numero"); } set { SetColumnValue("numero", value); } } [XmlAttribute("Iditem")] [Bindable(true)] public int Iditem { get { return GetColumnValue<int>("iditem"); } set { SetColumnValue("iditem", value); } } [XmlAttribute("Item")] [Bindable(true)] public string Item { get { return GetColumnValue<string>("item"); } set { SetColumnValue("item", value); } } [XmlAttribute("Resultado")] [Bindable(true)] public string Resultado { get { return GetColumnValue<string>("resultado"); } set { SetColumnValue("resultado", value); } } #endregion #region Columns Struct public struct Columns { public static string Numero = @"numero"; public static string Iditem = @"iditem"; public static string Item = @"item"; public static string Resultado = @"resultado"; } #endregion #region IAbstractRecord Members public new CT GetColumnValue<CT>(string columnName) { return base.GetColumnValue<CT>(columnName); } public object GetColumnValue(string columnName) { return base.GetColumnValue<object>(columnName); } #endregion } }
using System; using System.Globalization; using Orleans.Core; using Orleans.Serialization; namespace Orleans.Runtime { [Serializable] internal class GrainId : UniqueIdentifier, IEquatable<GrainId>, IGrainIdentity { private static readonly object lockable = new object(); private const int INTERN_CACHE_INITIAL_SIZE = InternerConstants.SIZE_LARGE; private static readonly TimeSpan internCacheCleanupInterval = InternerConstants.DefaultCacheCleanupFreq; private static Interner<UniqueKey, GrainId> grainIdInternCache; public UniqueKey.Category Category { get { return Key.IdCategory; } } public bool IsSystemTarget { get { return Key.IsSystemTargetKey; } } public bool IsGrain { get { return Category == UniqueKey.Category.Grain || Category == UniqueKey.Category.KeyExtGrain; } } public bool IsClient { get { return Category == UniqueKey.Category.Client || Category == UniqueKey.Category.GeoClient; } } private GrainId(UniqueKey key) : base(key) { } public static GrainId NewId() { return FindOrCreateGrainId(UniqueKey.NewKey(Guid.NewGuid(), UniqueKey.Category.Grain)); } public static GrainId NewClientId(string clusterId = null) { return NewClientId(Guid.NewGuid(), clusterId); } internal static GrainId NewClientId(Guid id, string clusterId = null) { return FindOrCreateGrainId(UniqueKey.NewKey(id, clusterId == null ? UniqueKey.Category.Client : UniqueKey.Category.GeoClient, 0, clusterId)); } internal static GrainId GetGrainId(UniqueKey key) { return FindOrCreateGrainId(key); } internal static GrainId GetSystemGrainId(Guid guid) { return FindOrCreateGrainId(UniqueKey.NewKey(guid, UniqueKey.Category.SystemGrain)); } // For testing only. internal static GrainId GetGrainIdForTesting(Guid guid) { return FindOrCreateGrainId(UniqueKey.NewKey(guid, UniqueKey.Category.None)); } internal static GrainId NewSystemTargetGrainIdByTypeCode(int typeData) { return FindOrCreateGrainId(UniqueKey.NewSystemTargetKey(Guid.NewGuid(), typeData)); } internal static GrainId GetSystemTargetGrainId(short systemGrainId) { return FindOrCreateGrainId(UniqueKey.NewSystemTargetKey(systemGrainId)); } internal static GrainId GetGrainId(long typeCode, long primaryKey, string keyExt=null) { return FindOrCreateGrainId(UniqueKey.NewKey(primaryKey, keyExt == null ? UniqueKey.Category.Grain : UniqueKey.Category.KeyExtGrain, typeCode, keyExt)); } internal static GrainId GetGrainId(long typeCode, Guid primaryKey, string keyExt=null) { return FindOrCreateGrainId(UniqueKey.NewKey(primaryKey, keyExt == null ? UniqueKey.Category.Grain : UniqueKey.Category.KeyExtGrain, typeCode, keyExt)); } internal static GrainId GetGrainId(long typeCode, string primaryKey) { return FindOrCreateGrainId(UniqueKey.NewKey(0L, UniqueKey.Category.KeyExtGrain, typeCode, primaryKey)); } public Guid PrimaryKey { get { return GetPrimaryKey(); } } public long PrimaryKeyLong { get { return GetPrimaryKeyLong(); } } public string PrimaryKeyString { get { return GetPrimaryKeyString(); } } public string IdentityString { get { return ToDetailedString(); } } public bool IsLongKey { get { return Key.IsLongKey; } } public long GetPrimaryKeyLong(out string keyExt) { return Key.PrimaryKeyToLong(out keyExt); } internal long GetPrimaryKeyLong() { return Key.PrimaryKeyToLong(); } public Guid GetPrimaryKey(out string keyExt) { return Key.PrimaryKeyToGuid(out keyExt); } internal Guid GetPrimaryKey() { return Key.PrimaryKeyToGuid(); } internal string GetPrimaryKeyString() { string key; var tmp = GetPrimaryKey(out key); return key; } internal int GetTypeCode() { return Key.BaseTypeCode; } private static GrainId FindOrCreateGrainId(UniqueKey key) { // Note: This is done here to avoid a wierd cyclic dependency / static initialization ordering problem involving the GrainId, Constants & Interner classes if (grainIdInternCache != null) return grainIdInternCache.FindOrCreate(key, () => new GrainId(key)); lock (lockable) { if (grainIdInternCache == null) { grainIdInternCache = new Interner<UniqueKey, GrainId>(INTERN_CACHE_INITIAL_SIZE, internCacheCleanupInterval); } } return grainIdInternCache.FindOrCreate(key, () => new GrainId(key)); } #region IEquatable<GrainId> Members public bool Equals(GrainId other) { return other != null && Key.Equals(other.Key); } #endregion public override bool Equals(UniqueIdentifier obj) { var o = obj as GrainId; return o != null && Key.Equals(o.Key); } public override bool Equals(object obj) { var o = obj as GrainId; return o != null && Key.Equals(o.Key); } // Keep compiler happy -- it does not like classes to have Equals(...) without GetHashCode() methods public override int GetHashCode() { return Key.GetHashCode(); } /// <summary> /// Get a uniformly distributed hash code value for this grain, based on Jenkins Hash function. /// NOTE: Hash code value may be positive or NEGATIVE. /// </summary> /// <returns>Hash code for this GrainId</returns> public uint GetUniformHashCode() { return Key.GetUniformHashCode(); } public override string ToString() { return ToStringImpl(false); } // same as ToString, just full primary key and type code internal string ToDetailedString() { return ToStringImpl(true); } // same as ToString, just full primary key and type code private string ToStringImpl(bool detailed) { string name = string.Empty; if (Constants.TryGetSystemGrainName(this, out name)) { return name; } var keyString = Key.ToString(); // this should grab the least-significant half of n1, suffixing it with the key extension. string idString = keyString; if (!detailed) { if (keyString.Length >= 48) idString = keyString.Substring(24, 8) + keyString.Substring(48); else idString = keyString.Substring(24, 8); } string fullString = null; switch (Category) { case UniqueKey.Category.Grain: case UniqueKey.Category.KeyExtGrain: var typeString = GetTypeCode().ToString("X"); if (!detailed) typeString = typeString.Tail(8); fullString = String.Format("*grn/{0}/{1}", typeString, idString); break; case UniqueKey.Category.Client: fullString = "*cli/" + idString; break; case UniqueKey.Category.GeoClient: fullString = string.Format("*gcl/{0}/{1}", Key.KeyExt, idString); break; case UniqueKey.Category.SystemTarget: string explicitName = Constants.SystemTargetName(this); if (GetTypeCode() != 0) { var typeStr = GetTypeCode().ToString("X"); return String.Format("{0}/{1}/{2}", explicitName, typeStr, idString); } fullString = explicitName; break; default: fullString = "???/" + idString; break; } return detailed ? String.Format("{0}-0x{1, 8:X8}", fullString, GetUniformHashCode()) : fullString; } internal string ToFullString() { string kx; string pks = Key.IsLongKey ? GetPrimaryKeyLong(out kx).ToString(CultureInfo.InvariantCulture) : GetPrimaryKey(out kx).ToString(); string pksHex = Key.IsLongKey ? GetPrimaryKeyLong(out kx).ToString("X") : GetPrimaryKey(out kx).ToString("X"); return String.Format( "[GrainId: {0}, IdCategory: {1}, BaseTypeCode: {2} (x{3}), PrimaryKey: {4} (x{5}), UniformHashCode: {6} (0x{7, 8:X8}){8}]", ToDetailedString(), // 0 Category, // 1 GetTypeCode(), // 2 GetTypeCode().ToString("X"), // 3 pks, // 4 pksHex, // 5 GetUniformHashCode(), // 6 GetUniformHashCode(), // 7 Key.HasKeyExt ? String.Format(", KeyExtension: {0}", kx) : ""); // 8 } internal string ToStringWithHashCode() { return String.Format("{0}-0x{1, 8:X8}", this.ToString(), this.GetUniformHashCode()); } /// <summary> /// Return this GrainId in a standard string form, suitable for later use with the <c>FromParsableString</c> method. /// </summary> /// <returns>GrainId in a standard string format.</returns> internal string ToParsableString() { // NOTE: This function must be the "inverse" of FromParsableString, and data must round-trip reliably. return Key.ToHexString(); } /// <summary> /// Create a new GrainId object by parsing string in a standard form returned from <c>ToParsableString</c> method. /// </summary> /// <param name="grainId">String containing the GrainId info to be parsed.</param> /// <returns>New GrainId object created from the input data.</returns> internal static GrainId FromParsableString(string grainId) { // NOTE: This function must be the "inverse" of ToParsableString, and data must round-trip reliably. var key = UniqueKey.Parse(grainId); return FindOrCreateGrainId(key); } internal byte[] ToByteArray() { var writer = new BinaryTokenStreamWriter(); writer.Write(this); return writer.ToByteArray(); } internal static GrainId FromByteArray(byte[] byteArray) { var reader = new BinaryTokenStreamReader(byteArray); return reader.ReadGrainId(); } } }
using System; using System.Threading.Tasks; using Orleans.Hosting; using Orleans.Runtime; using Orleans.Streams; using Orleans.TestingHost; using TestExtensions; using Xunit; namespace UnitTests.StreamingTests { public class PubSubRendezvousGrainTests : OrleansTestingBase, IClassFixture<PubSubRendezvousGrainTests.Fixture> { private readonly Fixture fixture; public class Fixture : BaseTestClusterFixture { protected override void ConfigureTestCluster(TestClusterBuilder builder) { builder.AddSiloBuilderConfigurator<SiloHostConfigurator>(); } public class SiloHostConfigurator : ISiloConfigurator { public void Configure(ISiloBuilder hostBuilder) { hostBuilder.AddFaultInjectionMemoryStorage("PubSubStore"); } } } public PubSubRendezvousGrainTests(Fixture fixture) { this.fixture = fixture; } [Fact, TestCategory("BVT"), TestCategory("Streaming"), TestCategory("PubSub")] public async Task RegisterConsumerFaultTest() { this.fixture.Logger.Info("************************ RegisterConsumerFaultTest *********************************"); var streamId = new InternalStreamId("ProviderName", StreamId.Create("StreamNamespace", Guid.NewGuid())); var pubSubGrain = this.fixture.GrainFactory.GetGrain<IPubSubRendezvousGrain>(streamId.ToString()); var faultGrain = this.fixture.GrainFactory.GetGrain<IStorageFaultGrain>(typeof(PubSubRendezvousGrain).FullName); // clean call, to make sure everything is happy and pubsub has state. await pubSubGrain.RegisterConsumer(GuidId.GetGuidId(Guid.NewGuid()), streamId, null, null); int consumers = await pubSubGrain.ConsumerCount(streamId); Assert.Equal(1, consumers); // inject fault await faultGrain.AddFaultOnWrite(pubSubGrain as GrainReference, new ApplicationException("Write")); // expect exception when registering a new consumer await Assert.ThrowsAsync<OrleansException>( () => pubSubGrain.RegisterConsumer(GuidId.GetGuidId(Guid.NewGuid()), streamId, null, null)); // pubsub grain should recover and still function await pubSubGrain.RegisterConsumer(GuidId.GetGuidId(Guid.NewGuid()), streamId, null, null); consumers = await pubSubGrain.ConsumerCount(streamId); Assert.Equal(2, consumers); } [Fact, TestCategory("BVT"), TestCategory("Streaming"), TestCategory("PubSub")] public async Task UnregisterConsumerFaultTest() { this.fixture.Logger.Info("************************ UnregisterConsumerFaultTest *********************************"); var streamId = new InternalStreamId("ProviderName", StreamId.Create("StreamNamespace", Guid.NewGuid())); var pubSubGrain = this.fixture.GrainFactory.GetGrain<IPubSubRendezvousGrain>(streamId.ToString()); var faultGrain = this.fixture.GrainFactory.GetGrain<IStorageFaultGrain>(typeof(PubSubRendezvousGrain).FullName); // Add two consumers so when we remove the first it does a storage write, not a storage clear. GuidId subscriptionId1 = GuidId.GetGuidId(Guid.NewGuid()); GuidId subscriptionId2 = GuidId.GetGuidId(Guid.NewGuid()); await pubSubGrain.RegisterConsumer(subscriptionId1, streamId, null, null); await pubSubGrain.RegisterConsumer(subscriptionId2, streamId, null, null); int consumers = await pubSubGrain.ConsumerCount(streamId); Assert.Equal(2, consumers); // inject fault await faultGrain.AddFaultOnWrite(pubSubGrain as GrainReference, new ApplicationException("Write")); // expect exception when unregistering a consumer await Assert.ThrowsAsync<OrleansException>( () => pubSubGrain.UnregisterConsumer(subscriptionId1, streamId)); // pubsub grain should recover and still function await pubSubGrain.UnregisterConsumer(subscriptionId1, streamId); consumers = await pubSubGrain.ConsumerCount(streamId); Assert.Equal(1, consumers); // inject clear fault, because removing last consumer should trigger a clear storage call. await faultGrain.AddFaultOnClear(pubSubGrain as GrainReference, new ApplicationException("Write")); // expect exception when unregistering a consumer await Assert.ThrowsAsync<OrleansException>( () => pubSubGrain.UnregisterConsumer(subscriptionId2, streamId)); // pubsub grain should recover and still function await pubSubGrain.UnregisterConsumer(subscriptionId2, streamId); consumers = await pubSubGrain.ConsumerCount(streamId); Assert.Equal(0, consumers); } /// <summary> /// This test fails because the producer must be grain reference which is not implied by the IStreamProducerExtension in the producer management calls. /// TODO: Fix rendezvous implementation. /// </summary> /// <returns></returns> [Fact(Skip = "This test fails because the producer must be grain reference which is not implied by the IStreamProducerExtension"), TestCategory("BVT"), TestCategory("Streaming"), TestCategory("PubSub")] public async Task RegisterProducerFaultTest() { this.fixture.Logger.Info("************************ RegisterProducerFaultTest *********************************"); var streamId = new InternalStreamId("ProviderName", StreamId.Create("StreamNamespace", Guid.NewGuid())); var pubSubGrain = this.fixture.GrainFactory.GetGrain<IPubSubRendezvousGrain>(streamId.ToString()); var faultGrain = this.fixture.GrainFactory.GetGrain<IStorageFaultGrain>(typeof(PubSubRendezvousGrain).FullName); // clean call, to make sure everything is happy and pubsub has state. await pubSubGrain.RegisterProducer(streamId, null); int producers = await pubSubGrain.ProducerCount(streamId); Assert.Equal(1, producers); // inject fault await faultGrain.AddFaultOnWrite(pubSubGrain as GrainReference, new ApplicationException("Write")); // expect exception when registering a new producer await Assert.ThrowsAsync<OrleansException>( () => pubSubGrain.RegisterProducer(streamId, null)); // pubsub grain should recover and still function await pubSubGrain.RegisterProducer(streamId, null); producers = await pubSubGrain.ProducerCount(streamId); Assert.Equal(2, producers); } /// <summary> /// This test fails because the producer must be grain reference which is not implied by the IStreamProducerExtension in the producer management calls. /// TODO: Fix rendezvous implementation. /// </summary> [Fact(Skip = "This test fails because the producer must be grain reference which is not implied by the IStreamProducerExtension"), TestCategory("BVT"), TestCategory("Streaming"), TestCategory("PubSub")] public async Task UnregisterProducerFaultTest() { this.fixture.Logger.Info("************************ UnregisterProducerFaultTest *********************************"); var streamId = new InternalStreamId("ProviderName", StreamId.Create("StreamNamespace", Guid.NewGuid())); var pubSubGrain = this.fixture.GrainFactory.GetGrain<IPubSubRendezvousGrain>(streamId.ToString()); var faultGrain = this.fixture.GrainFactory.GetGrain<IStorageFaultGrain>(typeof(PubSubRendezvousGrain).FullName); IStreamProducerExtension firstProducer = new DummyStreamProducerExtension(); IStreamProducerExtension secondProducer = new DummyStreamProducerExtension(); // Add two producers so when we remove the first it does a storage write, not a storage clear. await pubSubGrain.RegisterProducer(streamId, firstProducer); await pubSubGrain.RegisterProducer(streamId, secondProducer); int producers = await pubSubGrain.ProducerCount(streamId); Assert.Equal(2, producers); // inject fault await faultGrain.AddFaultOnWrite(pubSubGrain as GrainReference, new ApplicationException("Write")); // expect exception when unregistering a producer await Assert.ThrowsAsync<OrleansException>( () => pubSubGrain.UnregisterProducer(streamId, firstProducer)); // pubsub grain should recover and still function await pubSubGrain.UnregisterProducer(streamId, firstProducer); producers = await pubSubGrain.ProducerCount(streamId); Assert.Equal(1, producers); // inject clear fault, because removing last producers should trigger a clear storage call. await faultGrain.AddFaultOnClear(pubSubGrain as GrainReference, new ApplicationException("Write")); // expect exception when unregistering a consumer await Assert.ThrowsAsync<OrleansException>( () => pubSubGrain.UnregisterProducer(streamId, secondProducer)); // pubsub grain should recover and still function await pubSubGrain.UnregisterProducer(streamId, secondProducer); producers = await pubSubGrain.ConsumerCount(streamId); Assert.Equal(0, producers); } [Serializable] [Orleans.GenerateSerializer] private class DummyStreamProducerExtension : IStreamProducerExtension { [Orleans.Id(0)] private readonly Guid id; public DummyStreamProducerExtension() { id = Guid.NewGuid(); } public Task AddSubscriber(GuidId subscriptionId, InternalStreamId streamId, IStreamConsumerExtension streamConsumer, string filterData) { return Task.CompletedTask; } public Task RemoveSubscriber(GuidId subscriptionId, InternalStreamId streamId) { return Task.CompletedTask; } public override bool Equals(object obj) { if (ReferenceEquals(null, obj)) return false; if (ReferenceEquals(this, obj)) return true; if (obj.GetType() != GetType()) return false; return Equals((DummyStreamProducerExtension)obj); } public override int GetHashCode() { return id.GetHashCode(); } private bool Equals(DummyStreamProducerExtension other) { return id.Equals(other.id); } } } }
// Python Tools for Visual Studio // Copyright(c) Microsoft Corporation // All rights reserved. // // Licensed under the Apache License, Version 2.0 (the License); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at http://www.apache.org/licenses/LICENSE-2.0 // // THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS // OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY // IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, // MERCHANTABLITY OR NON-INFRINGEMENT. // // See the Apache Version 2.0 License for specific language governing // permissions and limitations under the License. using System; using System.Collections.Generic; using System.ComponentModel.Composition; using System.Windows.Media; using Microsoft.PythonTools.Parsing; using Microsoft.PythonTools.Project; using Microsoft.VisualStudio.Language.StandardClassification; using Microsoft.VisualStudio.Shell; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.Text.Classification; using Microsoft.VisualStudio.Utilities; namespace Microsoft.PythonTools { /// <summary> /// Implements classification of text by using a ScriptEngine which supports the /// TokenCategorizer service. /// /// Languages should subclass this type and override the Engine property. They /// should then export the provider using MEF indicating the content type /// which it is applicable to. /// </summary> [Export(typeof(IClassifierProvider)), ContentType(PythonCoreConstants.ContentType)] internal class PythonClassifierProvider : IClassifierProvider { private Dictionary<TokenCategory, IClassificationType> _categoryMap; private IClassificationType _comment; private IClassificationType _stringLiteral; private IClassificationType _keyword; private IClassificationType _operator; private IClassificationType _groupingClassification; private IClassificationType _dotClassification; private IClassificationType _commaClassification; private readonly IContentType _type; internal readonly IServiceProvider _serviceProvider; [ImportingConstructor] public PythonClassifierProvider(IContentTypeRegistryService contentTypeRegistryService, [Import(typeof(SVsServiceProvider))]IServiceProvider serviceProvider) { _type = contentTypeRegistryService.GetContentType(PythonCoreConstants.ContentType); _serviceProvider = serviceProvider; } /// <summary> /// Import the classification registry to be used for getting a reference /// to the custom classification type later. /// </summary> [Import] public IClassificationTypeRegistryService _classificationRegistry = null; // Set via MEF #region Python Classification Type Definitions [Export] [Name(PythonPredefinedClassificationTypeNames.Grouping)] [BaseDefinition(PythonPredefinedClassificationTypeNames.Operator)] internal static ClassificationTypeDefinition GroupingClassificationDefinition = null; // Set via MEF [Export] [Name(PythonPredefinedClassificationTypeNames.Dot)] [BaseDefinition(PythonPredefinedClassificationTypeNames.Operator)] internal static ClassificationTypeDefinition DotClassificationDefinition = null; // Set via MEF [Export] [Name(PythonPredefinedClassificationTypeNames.Comma)] [BaseDefinition(PythonPredefinedClassificationTypeNames.Operator)] internal static ClassificationTypeDefinition CommaClassificationDefinition = null; // Set via MEF [Export] [Name(PythonPredefinedClassificationTypeNames.Operator)] [BaseDefinition(PredefinedClassificationTypeNames.Operator)] internal static ClassificationTypeDefinition OperatorClassificationDefinition = null; // Set via MEF [Export] [Name(PythonPredefinedClassificationTypeNames.Builtin)] [BaseDefinition(PredefinedClassificationTypeNames.Identifier)] internal static ClassificationTypeDefinition BuiltinClassificationDefinition = null; // Set via MEF #endregion #region IDlrClassifierProvider public IClassifier GetClassifier(ITextBuffer buffer) { if (_categoryMap == null) { _categoryMap = FillCategoryMap(_classificationRegistry); } PythonClassifier res; if (!buffer.Properties.TryGetProperty<PythonClassifier>(typeof(PythonClassifier), out res) && buffer.ContentType.IsOfType(ContentType.TypeName)) { res = new PythonClassifier(this, buffer); buffer.Properties.AddProperty(typeof(PythonClassifier), res); } return res; } public virtual IContentType ContentType { get { return _type; } } public IClassificationType Comment { get { return _comment; } } public IClassificationType StringLiteral { get { return _stringLiteral; } } public IClassificationType Keyword { get { return _keyword; } } public IClassificationType Operator { get { return _operator; } } public IClassificationType GroupingClassification { get { return _groupingClassification; } } public IClassificationType DotClassification { get { return _dotClassification; } } public IClassificationType CommaClassification { get { return _commaClassification; } } #endregion internal Dictionary<TokenCategory, IClassificationType> CategoryMap { get { return _categoryMap; } } private Dictionary<TokenCategory, IClassificationType> FillCategoryMap(IClassificationTypeRegistryService registry) { var categoryMap = new Dictionary<TokenCategory, IClassificationType>(); categoryMap[TokenCategory.DocComment] = _comment = registry.GetClassificationType(PredefinedClassificationTypeNames.Comment); categoryMap[TokenCategory.LineComment] = registry.GetClassificationType(PredefinedClassificationTypeNames.Comment); categoryMap[TokenCategory.Comment] = registry.GetClassificationType(PredefinedClassificationTypeNames.Comment); categoryMap[TokenCategory.NumericLiteral] = registry.GetClassificationType(PredefinedClassificationTypeNames.Number); categoryMap[TokenCategory.CharacterLiteral] = registry.GetClassificationType(PredefinedClassificationTypeNames.Character); categoryMap[TokenCategory.StringLiteral] = _stringLiteral = registry.GetClassificationType(PredefinedClassificationTypeNames.String); categoryMap[TokenCategory.Keyword] = _keyword = registry.GetClassificationType(PredefinedClassificationTypeNames.Keyword); categoryMap[TokenCategory.Directive] = registry.GetClassificationType(PredefinedClassificationTypeNames.Keyword); categoryMap[TokenCategory.Identifier] = registry.GetClassificationType(PredefinedClassificationTypeNames.Identifier); categoryMap[TokenCategory.Operator] = _operator = registry.GetClassificationType(PythonPredefinedClassificationTypeNames.Operator); categoryMap[TokenCategory.Delimiter] = registry.GetClassificationType(PythonPredefinedClassificationTypeNames.Operator); categoryMap[TokenCategory.Grouping] = registry.GetClassificationType(PythonPredefinedClassificationTypeNames.Operator); categoryMap[TokenCategory.WhiteSpace] = registry.GetClassificationType(PredefinedClassificationTypeNames.WhiteSpace); categoryMap[TokenCategory.RegularExpressionLiteral] = registry.GetClassificationType(PredefinedClassificationTypeNames.Literal); categoryMap[TokenCategory.BuiltinIdentifier] = registry.GetClassificationType(PythonPredefinedClassificationTypeNames.Builtin); _groupingClassification = registry.GetClassificationType(PythonPredefinedClassificationTypeNames.Grouping); _commaClassification = registry.GetClassificationType(PythonPredefinedClassificationTypeNames.Comma); _dotClassification = registry.GetClassificationType(PythonPredefinedClassificationTypeNames.Dot); return categoryMap; } } #region Editor Format Definitions [Export(typeof(EditorFormatDefinition))] [ClassificationType(ClassificationTypeNames = PythonPredefinedClassificationTypeNames.Operator)] [Name(PythonPredefinedClassificationTypeNames.Operator)] [UserVisible(true)] [Order(After = LanguagePriority.NaturalLanguage, Before = LanguagePriority.FormalLanguage)] internal sealed class OperatorFormat : ClassificationFormatDefinition { public OperatorFormat() { DisplayName = Strings.OperatorClassificationType; // Matches "Operator" ForegroundColor = Colors.Black; } } [Export(typeof(EditorFormatDefinition))] [ClassificationType(ClassificationTypeNames = PythonPredefinedClassificationTypeNames.Grouping)] [Name(PythonPredefinedClassificationTypeNames.Grouping)] [UserVisible(true)] [Order(After = LanguagePriority.NaturalLanguage, Before = LanguagePriority.FormalLanguage)] internal sealed class GroupingFormat : ClassificationFormatDefinition { public GroupingFormat() { DisplayName = Strings.GroupingClassificationType; // Matches "Operator" ForegroundColor = Colors.Black; } } [Export(typeof(EditorFormatDefinition))] [ClassificationType(ClassificationTypeNames = PythonPredefinedClassificationTypeNames.Comma)] [Name(PythonPredefinedClassificationTypeNames.Comma)] [UserVisible(true)] [Order(After = LanguagePriority.NaturalLanguage, Before = LanguagePriority.FormalLanguage)] internal sealed class CommaFormat : ClassificationFormatDefinition { public CommaFormat() { DisplayName = Strings.CommaClassificationType; // Matches "Operator" ForegroundColor = Colors.Black; } } [Export(typeof(EditorFormatDefinition))] [ClassificationType(ClassificationTypeNames = PythonPredefinedClassificationTypeNames.Dot)] [Name(PythonPredefinedClassificationTypeNames.Dot)] [UserVisible(true)] [Order(After = LanguagePriority.NaturalLanguage, Before = LanguagePriority.FormalLanguage)] internal sealed class DotFormat : ClassificationFormatDefinition { public DotFormat() { DisplayName = Strings.DotClassificationType; // Matches "Operator" ForegroundColor = Colors.Black; } } [Export(typeof(EditorFormatDefinition))] [ClassificationType(ClassificationTypeNames = PythonPredefinedClassificationTypeNames.Builtin)] [Name(PythonPredefinedClassificationTypeNames.Builtin)] [UserVisible(true)] [Order(After = LanguagePriority.NaturalLanguage, Before = LanguagePriority.FormalLanguage)] internal sealed class BuiltinFormat : ClassificationFormatDefinition { public BuiltinFormat() { DisplayName = Strings.BuiltinClassificationType; // Matches "Keyword" ForegroundColor = Colors.Blue; } } #endregion }
// // Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net> // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // using JetBrains.Annotations; namespace NLog { using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Reflection; using System.Threading; using JetBrains.Annotations; using NLog.Common; using NLog.Config; using NLog.Filters; using NLog.Internal; using NLog.Targets; /// <summary> /// Implementation of logging engine. /// </summary> internal static class LoggerImpl { private const int StackTraceSkipMethods = 0; private static readonly Assembly nlogAssembly = typeof(LoggerImpl).Assembly; private static readonly Assembly mscorlibAssembly = typeof(string).Assembly; private static readonly Assembly systemAssembly = typeof(Debug).Assembly; [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA2204:Literals should be spelled correctly", Justification = "Using 'NLog' in message.")] internal static void Write([NotNull] Type loggerType, TargetWithFilterChain targets, LogEventInfo logEvent, LogFactory factory) { if (targets == null) { return; } StackTraceUsage stu = targets.GetStackTraceUsage(); if (stu != StackTraceUsage.None && !logEvent.HasStackTrace) { StackTrace stackTrace; #if !SILVERLIGHT stackTrace = new StackTrace(StackTraceSkipMethods, stu == StackTraceUsage.WithSource); #else stackTrace = new StackTrace(); #endif int firstUserFrame = FindCallingMethodOnStackTrace(stackTrace, loggerType); logEvent.SetStackTrace(stackTrace, firstUserFrame); } int originalThreadId = Thread.CurrentThread.ManagedThreadId; AsyncContinuation exceptionHandler = ex => { if (ex != null) { if (factory.ThrowExceptions && Thread.CurrentThread.ManagedThreadId == originalThreadId) { throw new NLogRuntimeException("Exception occurred in NLog", ex); } } }; for (var t = targets; t != null; t = t.NextInChain) { if (!WriteToTargetWithFilterChain(t, logEvent, exceptionHandler)) { break; } } } /// <summary> /// Finds first user stack frame in a stack trace /// </summary> /// <param name="stackTrace">The stack trace of the logging method invocation</param> /// <param name="loggerType">Type of the logger or logger wrapper</param> /// <returns>Index of the first user stack frame or 0 if all stack frames are non-user</returns> /// <seealso cref="IsNonUserStackFrame"/> private static int FindCallingMethodOnStackTrace([NotNull] StackTrace stackTrace, [NotNull] Type loggerType) { int? firstUserFrame = null; for (int i = 0; i < stackTrace.FrameCount; ++i) { StackFrame frame = stackTrace.GetFrame(i); MethodBase mb = frame.GetMethod(); if (IsNonUserStackFrame(mb, loggerType)) firstUserFrame = i + 1; else if (firstUserFrame != null) return firstUserFrame.Value; } return 0; } /// <summary> /// Defines whether a stack frame belongs to non-user code /// </summary> /// <param name="method">Method of the stack frame</param> /// <param name="loggerType">Type of the logger or logger wrapper</param> /// <returns><see langword="true"/>, if the method is from non-user code and should be skipped</returns> /// <remarks> /// The method is classified as non-user if its declaring assembly is from hidden assemblies list /// or its declaring type is <paramref name="loggerType"/> or one of its subtypes. /// </remarks> private static bool IsNonUserStackFrame([NotNull] MethodBase method, [NotNull] Type loggerType) { var declaringType = method.DeclaringType; // get assembly by declaring type or by module for global methods var assembly = declaringType != null ? declaringType.Assembly : method.Module.Assembly; // skip stack frame if the method declaring type assembly is from hidden assemblies list if (SkipAssembly(assembly)) return true; // or if that type is the loggerType or one of its subtypes return declaringType != null && loggerType.IsAssignableFrom(declaringType); } private static bool SkipAssembly(Assembly assembly) { if (assembly == nlogAssembly) { return true; } if (assembly == mscorlibAssembly) { return true; } if (assembly == systemAssembly) { return true; } if (LogManager.IsHiddenAssembly(assembly)) { return true; } return false; } private static bool WriteToTargetWithFilterChain(TargetWithFilterChain targetListHead, LogEventInfo logEvent, AsyncContinuation onException) { Target target = targetListHead.Target; FilterResult result = GetFilterResult(targetListHead.FilterChain, logEvent); if ((result == FilterResult.Ignore) || (result == FilterResult.IgnoreFinal)) { if (InternalLogger.IsDebugEnabled) { InternalLogger.Debug("{0}.{1} Rejecting message because of a filter.", logEvent.LoggerName, logEvent.Level); } if (result == FilterResult.IgnoreFinal) { return false; } return true; } target.WriteAsyncLogEvent(logEvent.WithContinuation(onException)); if (result == FilterResult.LogFinal) { return false; } return true; } /// <summary> /// Gets the filter result. /// </summary> /// <param name="filterChain">The filter chain.</param> /// <param name="logEvent">The log event.</param> /// <returns>The result of the filter.</returns> private static FilterResult GetFilterResult(IList<Filter> filterChain, LogEventInfo logEvent) { FilterResult result = FilterResult.Neutral; try { //Memory profiling pointed out that using a foreach-loop was allocating //an Enumerator. Switching to a for-loop avoids the memory allocation. for (int i = 0; i < filterChain.Count; i++) { Filter f = filterChain[i]; result = f.GetFilterResult(logEvent); if (result != FilterResult.Neutral) { break; } } return result; } catch (Exception exception) { if (exception.MustBeRethrown()) { throw; } InternalLogger.Warn("Exception during filter evaluation: {0}", exception); return FilterResult.Ignore; } } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using MySql.Data.MySqlClient; using SuperScript.ExternalFile.Storage; namespace SuperScript.ExternalFile.MySql { public class MySqlStoreProvider : IDbStoreProvider { #region Global variables private bool _tableExists; #endregion #region Properties /// <summary> /// Gets or sets the connection string that will be used to communicate with the underlying database. /// </summary> public string ConnectionString { get; set; } /// <summary> /// Gets or sets the name of the database, if this is not already detailed in the connection string. /// </summary> public string DbName { get; set; } /// <summary> /// Gets or sets the name of the table inside the database. /// </summary> public string StoreName { get; set; } #endregion #region Methods /// <summary> /// Adds the specified instance of <see cref="IStorable"/> to the store using the specified key. If an /// item exists with the specified <see cref="IStorable.Key"/> then it will be updated. /// </summary> /// <param name="storable">An instance of <see cref="IStorable"/> which contains all pertinent data.</param> /// <exception cref="ArgumentException">Thrown if the <see cref="IStorable.Key"/> property is null or whitespace.</exception> /// <exception cref="ConfigurablePropertyNotSpecifiedException">The <see cref="ConnectionString"/> has not been populated.</exception> /// <exception cref="ConfigurablePropertyNotSpecifiedException">Thrown if <see cref="StoreName"/> is null or whitespace.</exception> public void AddOrUpdate(IStorable storable) { // verify that a valid key has been specified if (String.IsNullOrWhiteSpace(storable.Key)) { throw new ArgumentException("The key parameter must be a non-zero-length string."); } if (ConnectionString == null) { throw new ConfigurablePropertyNotSpecifiedException("The ConnectionString property must be specified."); } // verify that we have a StoreName if (String.IsNullOrWhiteSpace(StoreName)) { throw new ConfigurablePropertyNotSpecifiedException("The StoreName property must be specified."); } // execute the INSERT query using (var conn = new MySqlConnection(ConnectionString)) using (var cmd = conn.CreateCommand()) { cmd.CommandType = CommandType.Text; cmd.CommandText = @"INSERT INTO `" + StoreName + @"` ( `key`, `cacheForTimePeriod`, `contents`, `contentType`, `longevity` ) VALUES ( ?key, ?cacheForTimePeriod, ?contents, ?contentType, ?longevity ) ON DUPLICATE KEY UPDATE `cacheForTimePeriod` = ?cacheForTimePeriod, `contents` = ?contents, `contentType` = ?contentType, `longevity` = ?longevity;"; cmd.Parameters.AddWithValue("?key", storable.Key); cmd.Parameters.AddWithValue("?cacheForTimePeriod", storable.CacheForTimePeriod_Serialize); cmd.Parameters.AddWithValue("?contents", storable.Contents); cmd.Parameters.AddWithValue("?contentType", storable.ContentType); cmd.Parameters.AddWithValue("?longevity", (int) storable.Longevity); conn.Open(); if (!String.IsNullOrWhiteSpace(DbName)) { conn.ChangeDatabase(DbName); } cmd.ExecuteNonQuery(); } } /// <summary> /// Contains the instructions for initialising a MySQL-based implementation of <see cref="IStore"/>. /// </summary> /// <returns><c>True</c> if the store was created, <c>false</c> otherwise.</returns> /// <exception cref="MissingDatabaseConfigurationException">The <see cref="ConnectionString"/> has not been populated.</exception> /// <exception cref="ConfigurablePropertyNotSpecifiedException">Thrown if <see cref="StoreName"/> is null or whitespace.</exception> private bool CreateStore() { if (ConnectionString == null) { throw new MissingDatabaseConfigurationException("No matching connection string was found for the specified ConnectionStringName."); } // verify that we have a StoreName if (String.IsNullOrWhiteSpace(StoreName)) { throw new ConfigurablePropertyNotSpecifiedException("The StoreName property must be specified."); } using (var conn = new MySqlConnection(ConnectionString)) { using (var cmd = conn.CreateCommand()) { cmd.CommandType = CommandType.Text; cmd.CommandText = @"DROP TABLE IF EXISTS " + StoreName + @"; CREATE TABLE IF NOT EXISTS " + StoreName + @" ( `key` VARCHAR(250) NOT NULL, `cacheForTimePeriod` VARCHAR(20) NOT NULL DEFAULT '{0:00:00:00}', `contents` TEXT NOT NULL, `contentType` VARCHAR(45) NOT NULL, `longevity` INT NOT NULL DEFAULT 0, `created` DATETIME NOT NULL DEFAULT NOW(), PRIMARY KEY (`key`)); SHOW TABLES LIKE '" + StoreName + "';"; conn.Open(); if (!String.IsNullOrWhiteSpace(DbName)) { conn.ChangeDatabase(DbName); } using (var rdr = cmd.ExecuteReader()) { return rdr.HasRows; } } } } /// <summary> /// Deletes the instance of <see cref="IStorable"/> which has been stored against the specified <see cref="key"/>. /// </summary> /// <param name="key">The unique identifier that the <see cref="IStorable"/> was stored under.</param> /// <exception cref="ArgumentException">Thrown if the <see cref="key"/> property is null or whitespace.</exception> /// <exception cref="MissingDatabaseConfigurationException">The <see cref="ConnectionString"/> has not been populated.</exception> /// <exception cref="ConfigurablePropertyNotSpecifiedException">Thrown if <see cref="StoreName"/> is null or whitespace.</exception> public void Delete(string key) { // verify that a valid key has been specified if (String.IsNullOrWhiteSpace(key)) { throw new ArgumentException("The key parameter must be a non-zero-length string."); } if (ConnectionString == null) { throw new MissingDatabaseConfigurationException("No matching connection string was found for the specified ConnectionStringName."); } // verify that we have a StoreName if (String.IsNullOrWhiteSpace(StoreName)) { throw new ConfigurablePropertyNotSpecifiedException("The StoreName property must be specified."); } // execute the DELETE query using (var conn = new MySqlConnection(ConnectionString)) using (var cmd = conn.CreateCommand()) { cmd.CommandType = CommandType.Text; cmd.CommandText = @"DELETE FROM `" + StoreName + @"` WHERE `key` = ?key;"; cmd.Parameters.AddWithValue("?key", key); conn.Open(); if (!String.IsNullOrWhiteSpace(DbName)) { conn.ChangeDatabase(DbName); } cmd.ExecuteNonQuery(); } } /// <summary> /// Deletes the entire store from the database. /// </summary> /// <exception cref="MissingDatabaseConfigurationException">The <see cref="ConnectionString"/> has not been populated.</exception> /// <exception cref="ConfigurablePropertyNotSpecifiedException">Thrown if <see cref="StoreName"/> is null or whitespace.</exception> public void DeleteStore() { if (ConnectionString == null) { throw new MissingDatabaseConfigurationException("No matching connection string was found for the specified ConnectionStringName."); } // verify that we have a StoreName if (String.IsNullOrWhiteSpace(StoreName)) { throw new ConfigurablePropertyNotSpecifiedException("The StoreName property must be specified."); } using (var conn = new MySqlConnection(ConnectionString)) using (var cmd = conn.CreateCommand()) { cmd.CommandType = CommandType.Text; cmd.CommandText = @"DROP TABLE IF EXISTS " + StoreName + ";"; conn.Open(); if (!String.IsNullOrWhiteSpace(DbName)) { conn.ChangeDatabase(DbName); } cmd.ExecuteNonQuery(); } _tableExists = false; } /// <summary> /// <para>Gets the instance of <see cref="IStorable"/> with the specified key.</para> /// <para>Returns null if no matching keys were found.</para> /// </summary> /// <param name="key">The unique identifier that the <see cref="IStorable"/> was stored under.</param> /// <exception cref="ArgumentException">Thrown if the <see cref="key"/> property is null or whitespace.</exception> /// <exception cref="MissingDatabaseConfigurationException">The <see cref="ConnectionString"/> has not been populated.</exception> /// <exception cref="ConfigurablePropertyNotSpecifiedException">Thrown if <see cref="StoreName"/> is null or whitespace.</exception> public IStorable Get(string key) { // verify that a valid key has been specified if (String.IsNullOrWhiteSpace(key)) { throw new ArgumentException("The key parameter must be a non-zero-length string."); } if (ConnectionString == null) { throw new MissingDatabaseConfigurationException("No matching connection string was found for the specified ConnectionStringName."); } // verify that we have a StoreName if (String.IsNullOrWhiteSpace(StoreName)) { throw new ConfigurablePropertyNotSpecifiedException("The StoreName property must be specified."); } // execute the DROP query using (var conn = new MySqlConnection(ConnectionString)) using (var cmd = conn.CreateCommand()) { cmd.CommandType = CommandType.Text; cmd.CommandText = @"SELECT `cacheForTimePeriod`, -- 0 `contents`, -- 1 `contentType`, -- 2 `longevity` -- 3 FROM `" + StoreName + @"` WHERE `key` = ?key;"; cmd.Parameters.AddWithValue("?key", key); conn.Open(); if (!String.IsNullOrWhiteSpace(DbName)) { conn.ChangeDatabase(DbName); } using (var rdr = cmd.ExecuteReader()) { if (!rdr.Read()) { return null; } var storable = new Storable { Key = key, CacheForTimePeriod_Serialize = rdr.GetString(0), Contents = rdr.GetString(1), ContentType = rdr.GetString(2) }; Longevity lgvty; if (Enum.TryParse(rdr.GetString(3), out lgvty)) { storable.Longevity = lgvty; } else { throw new InvalidEnumArgumentException("The value used for Longevity was not valid"); } return storable; } } } /// <summary> /// Returns a snapshot of all <see cref="IStorable"/> instances in the store. /// </summary> /// <exception cref="MissingDatabaseConfigurationException">The <see cref="ConnectionString"/> has not been populated.</exception> /// <exception cref="ConfigurablePropertyNotSpecifiedException">Thrown if <see cref="StoreName"/> is null or whitespace.</exception> public IEnumerable<IStorable> GetAll() { if (ConnectionString == null) { throw new MissingDatabaseConfigurationException("No matching connection string was found for the specified ConnectionStringName."); } // verify that we have a StoreName if (String.IsNullOrWhiteSpace(StoreName)) { throw new ConfigurablePropertyNotSpecifiedException("The StoreName property must be specified."); } using (var conn = new MySqlConnection(ConnectionString)) using (var cmd = conn.CreateCommand()) { cmd.CommandType = CommandType.Text; cmd.CommandText = @"SELECT `key`, -- 0 `cacheForTimePeriod`, -- 1 `contents`, -- 2 `contentType`, -- 3 `longevity` -- 4 FROM `" + StoreName + @"`;"; conn.Open(); if (!String.IsNullOrWhiteSpace(DbName)) { conn.ChangeDatabase(DbName); } using (var rdr = cmd.ExecuteReader()) { var storables = new List<Storable>(); while (rdr.Read()) { var storable = new Storable { Key = rdr.GetString(0), CacheForTimePeriod_Serialize = rdr.GetString(1), Contents = rdr.GetString(2), ContentType = rdr.GetString(3) }; Longevity lgvty; if (Enum.TryParse(rdr.GetString(4), out lgvty)) { storable.Longevity = lgvty; } else { throw new InvalidEnumArgumentException("The value used for Longevity was not valid"); } storables.Add(storable); } return storables; } } } /// <summary> /// Checks that the store (a database table) exists. If not, the store will be created. /// </summary> /// <exception cref="MissingDatabaseConfigurationException">The <see cref="ConnectionString"/> has not been populated.</exception> /// <exception cref="ConfigurablePropertyNotSpecifiedException">Thrown if <see cref="StoreName"/> is null or whitespace.</exception> public void Init() { _tableExists = StoreExists(); if (_tableExists) { return; } _tableExists = CreateStore(); if (!_tableExists) { throw new UnableToCreateStoreException(); } } /// <summary> /// Removes instances of <see cref="IStorable"/> which are older than the specified <see cref="TimeSpan"/>. /// </summary> /// <param name="removeThreshold">Instances of <see cref="IStorable"/> which are older than this will be removed from the store.</param> public void Scavenge(TimeSpan removeThreshold) { if (ConnectionString == null) { throw new MissingDatabaseConfigurationException("No matching connection string was found for the specified ConnectionStringName."); } // verify that we have a StoreName if (String.IsNullOrWhiteSpace(StoreName)) { throw new ConfigurablePropertyNotSpecifiedException("The StoreName property must be specified."); } using (var conn = new MySqlConnection(ConnectionString)) using (var cmd = conn.CreateCommand()) { cmd.CommandType = CommandType.Text; cmd.CommandText = @"DELETE FROM `" + StoreName + @"` WHERE `created` <= ?olderThan;"; cmd.Parameters.Add("?olderThan", MySqlDbType.DateTime).Value = DateTime.Now.Subtract(removeThreshold); conn.Open(); if (!String.IsNullOrWhiteSpace(DbName)) { conn.ChangeDatabase(DbName); } cmd.ExecuteNonQuery(); } } /// <summary> /// Indicates whether the store currently exists. /// </summary> /// <returns><c>True</c> if the store was created, <c>false</c> otherwise.</returns> /// <exception cref="MissingDatabaseConfigurationException">The <see cref="ConnectionString"/> has not been populated.</exception> /// <exception cref="ConfigurablePropertyNotSpecifiedException">Thrown if <see cref="StoreName"/> is null or whitespace.</exception> private bool StoreExists() { if (ConnectionString == null) { throw new MissingDatabaseConfigurationException("No matching connection string was found for the specified ConnectionStringName."); } // verify that we have a StoreName if (String.IsNullOrWhiteSpace(StoreName)) { throw new ConfigurablePropertyNotSpecifiedException("The StoreName property must be specified."); } using (var conn = new MySqlConnection(ConnectionString)) using (var cmd = conn.CreateCommand()) { cmd.CommandType = CommandType.Text; cmd.CommandText = "SHOW TABLES LIKE '" + StoreName + "';"; conn.Open(); if (!String.IsNullOrWhiteSpace(DbName)) { conn.ChangeDatabase(DbName); } using (var rdr = cmd.ExecuteReader()) { return rdr.HasRows; } } } #endregion } }
// // Copyright (c) Microsoft Corporation. All rights reserved. // #define CACHEINFO_ENABLE_TRACKING_THREADS //#define CACHEINFO_DEBUG_THREADING_ISSUES namespace Microsoft.Zelig.CodeGeneration.IR { using System; using System.Collections.Generic; using Microsoft.Zelig.Runtime.TypeSystem; public abstract class ControlFlowGraphState { public static CompilationConstraints[] SharedEmptyCompilationConstraintsArray = new CompilationConstraints[0]; public abstract class CachedInfo : IDisposable { public static CachedInfo[] SharedEmptyArray = new CachedInfo[0]; // // State // protected ControlFlowGraphState m_owner; private int m_version; private int m_lockCount; #if CACHEINFO_ENABLE_TRACKING_THREADS private System.Threading.Thread m_lockOwner; #if CACHEINFO_DEBUG_THREADING_ISSUES protected System.Diagnostics.StackTrace m_lockTrace; #endif #endif // // Constructor Methods // protected CachedInfo() { } // // Helper Methods // public void Dispose() { Unlock(); } public void Lock() { #if CACHEINFO_ENABLE_TRACKING_THREADS System.Threading.Thread thisThread = System.Threading.Thread.CurrentThread; System.Threading.Thread activeThread = System.Threading.Interlocked.CompareExchange( ref m_lockOwner, thisThread, null ); CHECKS.ASSERT( activeThread == null || activeThread == thisThread, "Lock on {0} for method '{1}' already claimed", this.GetType().FullName, m_owner.Method.ToShortString() ); #if CACHEINFO_DEBUG_THREADING_ISSUES if(m_lockCount == 0) { m_lockTrace = new System.Diagnostics.StackTrace(); } #endif #endif m_lockCount++; } public void Unlock() { #if CACHEINFO_ENABLE_TRACKING_THREADS CHECKS.ASSERT( System.Threading.Thread.CurrentThread == m_lockOwner, "Lock on {0} not owned by current thread", this.GetType().FullName ); #endif CHECKS.ASSERT( m_lockCount > 0, "Underflow for lock on {0}", this.GetType().FullName ); m_lockCount--; #if CACHEINFO_ENABLE_TRACKING_THREADS if(m_lockCount == 0) { m_lockOwner = null; #if CACHEINFO_DEBUG_THREADING_ISSUES m_lockTrace = null; #endif } #endif } public void RefreshIfNeeded() { ThreadLockInfo.Assert( m_owner.Method ); if(m_version != m_owner.m_version) { if(m_lockCount != 0) { throw TypeConsistencyErrorException.Create( "Detected attempt to modify state of Control Flow Graph for {0}", this.GetType().FullName ); } Update(); m_version = m_owner.m_version; m_owner.LatestCachedVersion = m_version; } } protected abstract void Update(); // // Access Methods // internal ControlFlowGraphState Owner { set { m_owner = value; } } } //--// class ThreadLockInfo : IDisposable { internal class ExceptionInfo : IDisposable { // // Constructor Methods // internal ExceptionInfo( MethodRepresentation md ) { s_lockException = md; } // // Helper Methods // public void Dispose() { s_lockException = null; } } // // State // [ThreadStatic] private static MethodRepresentation s_lock; [ThreadStatic] private static MethodRepresentation s_lockException; // // Constructor Methods // internal ThreadLockInfo( MethodRepresentation md ) { Assert( md ); s_lock = md; } internal ThreadLockInfo( MethodRepresentation md , MethodRepresentation md2 ) { Assert( md ); s_lock = md; s_lockException = md2; } // // Helper Methods // public void Dispose() { s_lock = null; s_lockException = null; } internal static void Assert( MethodRepresentation md ) { if(s_lock == null || md == s_lock || md == s_lockException) { return; } throw TypeConsistencyErrorException.Create( "Detected attempt to access state of Control Flow Graph for '{0}' while thread is locked to access only '{1}'", md.ToShortString(), s_lock.ToShortString() ); } } class GroupLockInfo : IDisposable { // // State // private readonly IDisposable[] m_locks; // // Constructor Methods // internal GroupLockInfo( IDisposable[] locks ) { m_locks = locks; } // // Helper Methods // public void Dispose() { // // Release in opposite order. // for(int i = m_locks.Length; --i >= 0; ) { m_locks[i].Dispose(); } } } //--// class CachedInfo_FlowInformation : CachedInfo { // // Helper Methods // protected override void Update() { using(new PerformanceCounters.ContextualTiming( m_owner, "FlowInformation" )) { foreach(BasicBlock bb in m_owner.m_basicBlocks) { bb.ResetFlowInformation(); } foreach(BasicBlock bb in m_owner.m_basicBlocks) { bb.UpdateFlowInformation(); } } } } //--// // // State // protected MethodRepresentation m_md; protected int m_version; protected EntryBasicBlock m_entryBasicBlock; protected ExitBasicBlock m_exitBasicBlock; protected BasicBlock[] m_basicBlocks; // This is the set of all the basic blocks in the CFG. protected VariableExpression m_returnValue; protected VariableExpression[] m_arguments; protected VariableExpression[] m_variables; protected int m_variablesCount; protected CachedInfo[] m_cache; // // Constructor Methods // protected ControlFlowGraphState() // Default constructor required by TypeSystemSerializer. { m_cache = CachedInfo.SharedEmptyArray; } protected ControlFlowGraphState( MethodRepresentation md ) { m_md = md; m_version = 1; m_basicBlocks = BasicBlock.SharedEmptyArray; m_variables = VariableExpression.SharedEmptyArray; m_cache = CachedInfo.SharedEmptyArray; } protected ControlFlowGraphState( ControlFlowGraphState source ) : this( source.m_md ) { } //--// // // Helper Methods // protected virtual void CloneVariables( CloningContext context , ControlFlowGraphState source ) { VariableExpression var; var = source.m_returnValue; if(var != null) { m_returnValue = AllocateTemporary( var.Type, var.DebugName ); context.Register( var, m_returnValue ); } //--// VariableExpression[] args = source.m_arguments; int argsNum = args.Length; m_arguments = new VariableExpression[argsNum]; for(int i = 0; i < argsNum; i++) { var = args[i]; m_arguments[i] = new ArgumentVariableExpression( var.Type, var.DebugName, i ); context.Register( var, m_arguments[i] ); } } //--// protected void TrackVariable( VariableExpression var ) { if(m_variablesCount == m_variables.Length) { m_variables = ArrayUtility.EnsureSizeOfNotNullArray( m_variables, m_variablesCount + 16 ); } m_variables[m_variablesCount++] = var; } public LocalVariableExpression AllocateLocal( TypeRepresentation td , VariableExpression.DebugInfo debugInfo ) { LocalVariableExpression newLocal = new LocalVariableExpression( td, debugInfo ); TrackVariable( newLocal ); return newLocal; } public TemporaryVariableExpression AllocateTemporary( TypeRepresentation td , VariableExpression.DebugInfo debugInfo ) { TemporaryVariableExpression newTmp = new TemporaryVariableExpression( td, debugInfo ); TrackVariable( newTmp ); return newTmp; } public ExceptionObjectVariableExpression AllocateExceptionObjectVariable( TypeRepresentation td ) { ExceptionObjectVariableExpression newEx = new ExceptionObjectVariableExpression( td, null ); TrackVariable( newEx ); return newEx; } //--// internal void Register( BasicBlock basicBlock ) { BumpVersion(); if(basicBlock is EntryBasicBlock) { CHECKS.ASSERT( m_entryBasicBlock == null, "Entry Basic Block already exists" ); m_entryBasicBlock = (EntryBasicBlock)basicBlock; } else if(basicBlock is ExitBasicBlock) { CHECKS.ASSERT( m_exitBasicBlock == null, "Exit Basic Block already exists" ); m_exitBasicBlock = (ExitBasicBlock)basicBlock; } m_basicBlocks = ArrayUtility.AddUniqueToNotNullArray( m_basicBlocks, basicBlock ); } internal void Deregister( BasicBlock basicBlock ) { BumpVersion(); if(basicBlock == m_entryBasicBlock) { m_entryBasicBlock = null; } else if(basicBlock == m_exitBasicBlock) { m_exitBasicBlock = null; } } internal void BumpVersion() { m_version++; } //--// public static IDisposable LockThreadToMethod( MethodRepresentation md ) { return new ThreadLockInfo( md ); } public static IDisposable AddExceptionToThreadMethodLock( MethodRepresentation md ) { return new ThreadLockInfo.ExceptionInfo( md ); } public IDisposable GroupLock( params IDisposable[] locks ) { return new GroupLockInfo( locks ); } protected T GetCachedInfo< T >() where T : CachedInfo, new() { foreach(CachedInfo ci in m_cache) { if(ci is T) { ci.RefreshIfNeeded(); return (T)ci; } } T newCI = new T(); newCI.Owner = this; m_cache = ArrayUtility.AppendToNotNullArray( m_cache, newCI ); newCI.RefreshIfNeeded(); return newCI; } public void UpdateFlowInformation() { GetCachedInfo< CachedInfo_FlowInformation >(); } public IDisposable LockFlowInformation() { var ci = GetCachedInfo< CachedInfo_FlowInformation >(); ci.Lock(); return ci; } //--// int m_checkpointCachedVersion = -1; protected int LatestCachedVersion { get; set; } public void ResetCacheCheckpoint( ) { m_checkpointCachedVersion = LatestCachedVersion; } public void AssertNoCacheRefreshSinceCheckpoint( ) { CHECKS.ASSERT( m_checkpointCachedVersion == LatestCachedVersion, "Cached info updated since checkpoint. The cache used was stale!" ); } //--// protected void InnerApplyTransformation( TransformationContextForIR context ) { context.Transform( ref m_md ); context.Transform( ref m_version ); context.Transform( ref m_entryBasicBlock ); context.Transform( ref m_exitBasicBlock ); context.Transform( ref m_basicBlocks ); context.Transform( ref m_returnValue ); context.Transform( ref m_arguments ); context.Transform( ref m_variables ); context.Transform( ref m_variablesCount ); } //--// public void PerformActionOnOperators( Action<Operator> action ) { foreach(BasicBlock bb in m_basicBlocks) { foreach(Operator op in bb.Operators) { action( op ); } } } //--// public BasicBlock FirstBasicBlock { get { if(m_basicBlocks != null && m_basicBlocks.Length > 0) { return m_basicBlocks[0]; } return CreateFirstNormalBasicBlock(); } } public NormalBasicBlock CreateFirstNormalBasicBlock() { // // Important: get 'NormalizedEntryBasicBlock' before allocating the new basic block, or it will be reclaimed!! // var bbPrev = this.NormalizedEntryBasicBlock; var bbNext = this.NormalizedExitBasicBlock; var bb = new NormalBasicBlock( this ); bbPrev.FlowControl = UnconditionalControlOperator.New( null, bb ); bb .FlowControl = UnconditionalControlOperator.New( null, bbNext ); return bb; } public NormalBasicBlock CreateLastNormalBasicBlock() { // // Important: get 'NormalizedExitBasicBlock' before allocating the new basic block, or it will be reclaimed!! // var bbNext = this.NormalizedExitBasicBlock; var bb = new NormalBasicBlock( this ); bb.FlowControl = UnconditionalControlOperator.New( null, bbNext ); return bb; } public void AddReturnOperator() { // // Create proper flow control for exit basic block. // ControlOperator op; if(m_returnValue != null) { op = ReturnControlOperator.New( m_returnValue ); } else { op = ReturnControlOperator.New(); } m_exitBasicBlock.AddOperator( op ); } public Operator GenerateVariableInitialization( Debugging.DebugInfo debugInfo , VariableExpression var ) { return GenerateVariableInitialization( debugInfo, var, var.Type, false ); } public abstract Operator GenerateVariableInitialization( Debugging.DebugInfo debugInfo , Expression var , TypeRepresentation td , bool fThroughPointer ); public abstract BasicBlock GetInjectionPoint( BasicBlock.Qualifier qualifier ); //--// public virtual void RenumberVariables() { int numLocal = 0; int numTmp = 0; int numEx = 0; foreach(VariableExpression var in m_variables) { if(var is LocalVariableExpression) { var.Number = numLocal++; } else if(var is TemporaryVariableExpression) { var.Number = numTmp++; } else if(var is ExceptionObjectVariableExpression) { var.Number = numEx++; } } } //--// public static bool SameCompilationConstraints( CompilationConstraints[] ccArray1 , CompilationConstraints[] ccArray2 ) { int len1 = ccArray1.Length; int len2 = ccArray2.Length; if(len1 != len2) { return false; } for(int i = 0; i < len1; i++) { if(ccArray1[i] != ccArray2[i]) { return false; } } return true; } public static CompilationConstraints[] AddCompilationConstraint( CompilationConstraints[] ccArray , CompilationConstraints cc ) { int pos = 0; for(; pos < ccArray.Length; pos++) { CompilationConstraints cc2 = ccArray[pos]; if(cc2 == cc) { return ccArray; } if(cc2 > cc) { break; } } return ArrayUtility.InsertAtPositionOfNotNullArray( ccArray, pos, cc ); } public static CompilationConstraints[] RemoveCompilationConstraint( CompilationConstraints[] ccArray , CompilationConstraints cc ) { for(int pos = 0; pos < ccArray.Length; pos++) { CompilationConstraints cc2 = ccArray[pos]; if(cc2 == cc) { return ArrayUtility.RemoveAtPositionFromNotNullArray( ccArray, pos ); } if(cc2 > cc) { break; } } return ccArray; } public static bool HasCompilationConstraint( CompilationConstraints[] ccArray , CompilationConstraints cc ) { for(int pos = 0; pos < ccArray.Length; pos++) { CompilationConstraints cc2 = ccArray[pos]; if(cc2 == cc) { return true; } if(cc2 > cc) { break; } } return false; } public static bool HasAnyCompilationConstraint( CompilationConstraints[] ccTarget , params CompilationConstraints[] ccFilter ) { CompilationConstraints match; return HasAnyCompilationConstraint( ccTarget, out match, ccFilter ); } public static bool HasAnyCompilationConstraint( CompilationConstraints[] ccTarget , out CompilationConstraints match , params CompilationConstraints[] ccFilter ) { foreach(CompilationConstraints cc in ccFilter) { if(HasCompilationConstraint( ccTarget, cc )) { match = cc; return true; } } match = default(CompilationConstraints); return false; } //--// public static Operator CheckSingleUse( Operator[][] useChains , VariableExpression var ) { Operator[] uses = useChains[var.SpanningTreeIndex]; if(uses.Length == 1) { return uses[0]; } return null; } public static Operator CheckSingleDefinition( Operator[][] defChains , VariableExpression var ) { Operator[] defs = defChains[var.SpanningTreeIndex]; if(defs.Length == 1) { return defs[0]; } return null; } public static Operator CheckSingleDefinition( GrowOnlyHashTable< VariableExpression, Operator > defLookup , Expression ex ) { VariableExpression var = ex as VariableExpression; if(var != null) { Operator def; if(defLookup.TryGetValue( var, out def )) { return def; } } return null; } //--// // // Access Methods // public abstract TypeSystemForIR TypeSystemForIR { get; } public CompilationConstraints[] CompilationConstraintsArray { get { CompilationConstraints[] res = SharedEmptyCompilationConstraintsArray; MethodRepresentation.BuildTimeAttributes bta = m_md.BuildTimeFlags; if ((bta & MethodRepresentation.BuildTimeAttributes.CanAllocate ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.Allocations_ON ); else if((bta & MethodRepresentation.BuildTimeAttributes.CannotAllocate ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.Allocations_OFF ); if ((bta & MethodRepresentation.BuildTimeAttributes.StackAvailable ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.StackAccess_ON ); else if((bta & MethodRepresentation.BuildTimeAttributes.StackNotAvailable ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.StackAccess_OFF ); if ((bta & MethodRepresentation.BuildTimeAttributes.EnableBoundsChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.BoundsChecks_ON ); else if((bta & MethodRepresentation.BuildTimeAttributes.DisableBoundsChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.BoundsChecks_OFF ); else if((bta & MethodRepresentation.BuildTimeAttributes.DisableDeepBoundsChecks) != 0) res = AddCompilationConstraint( res, CompilationConstraints.BoundsChecks_OFF_DEEP ); if ((bta & MethodRepresentation.BuildTimeAttributes.EnableNullChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.NullChecks_ON ); else if((bta & MethodRepresentation.BuildTimeAttributes.DisableNullChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.NullChecks_OFF ); else if((bta & MethodRepresentation.BuildTimeAttributes.DisableDeepNullChecks ) != 0) res = AddCompilationConstraint( res, CompilationConstraints.NullChecks_OFF_DEEP ); return res; } } public MethodRepresentation Method { get { return m_md; } } public EntryBasicBlock EntryBasicBlock { get { return m_entryBasicBlock; } } public ExitBasicBlock ExitBasicBlock { get { return m_exitBasicBlock; } } public BasicBlock NormalizedEntryBasicBlock { get { return this.GetInjectionPoint( BasicBlock.Qualifier.EntryInjectionStart ); } } public BasicBlock NormalizedExitBasicBlock { get { return this.GetInjectionPoint( BasicBlock.Qualifier.EpilogueStart ); } } public VariableExpression ReturnValue { get { return m_returnValue; } } public VariableExpression[] Arguments { get { return m_arguments; } } public int Version { get { return m_version; } } //--// // // Debug Methods // public override string ToString() { return string.Format( "FlowGraph({0})", m_md.ToShortString() ); } public void Dump( IIntermediateRepresentationDumper dumper ) { dumper.DumpGraph( this ); } public abstract string ToPrettyString( Operator op ); } }
// // Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // #if !SILVERLIGHT && !__IOS__ && !__ANDROID__ namespace NLog.Targets { using System; using System.Collections.Generic; using System.ComponentModel; using System.Configuration; using System.Data; using System.Data.Common; using System.Globalization; using System.Reflection; using System.Text; using System.Transactions; using NLog.Common; using NLog.Config; using NLog.Internal; using NLog.Layouts; using ConfigurationManager = System.Configuration.ConfigurationManager; /// <summary> /// Writes log messages to the database using an ADO.NET provider. /// </summary> /// <seealso href="https://github.com/nlog/nlog/wiki/Database-target">Documentation on NLog Wiki</seealso> /// <example> /// <para> /// The configuration is dependent on the database type, because /// there are differnet methods of specifying connection string, SQL /// command and command parameters. /// </para> /// <para>MS SQL Server using System.Data.SqlClient:</para> /// <code lang="XML" source="examples/targets/Configuration File/Database/MSSQL/NLog.config" height="450" /> /// <para>Oracle using System.Data.OracleClient:</para> /// <code lang="XML" source="examples/targets/Configuration File/Database/Oracle.Native/NLog.config" height="350" /> /// <para>Oracle using System.Data.OleDBClient:</para> /// <code lang="XML" source="examples/targets/Configuration File/Database/Oracle.OleDB/NLog.config" height="350" /> /// <para>To set up the log target programmatically use code like this (an equivalent of MSSQL configuration):</para> /// <code lang="C#" source="examples/targets/Configuration API/Database/MSSQL/Example.cs" height="630" /> /// </example> [Target("Database")] public sealed class DatabaseTarget : Target, IInstallable { private static Assembly systemDataAssembly = typeof(IDbConnection).Assembly; private IDbConnection activeConnection = null; private string activeConnectionString; /// <summary> /// Initializes a new instance of the <see cref="DatabaseTarget" /> class. /// </summary> public DatabaseTarget() { this.Parameters = new List<DatabaseParameterInfo>(); this.InstallDdlCommands = new List<DatabaseCommandInfo>(); this.UninstallDdlCommands = new List<DatabaseCommandInfo>(); this.DBProvider = "sqlserver"; this.DBHost = "."; this.ConnectionStringsSettings = ConfigurationManager.ConnectionStrings; this.CommandType = CommandType.Text; } /// <summary> /// Initializes a new instance of the <see cref="DatabaseTarget" /> class. /// </summary> /// <param name="name">Name of the target.</param> public DatabaseTarget(string name) : this() { this.Name = name; } /// <summary> /// Gets or sets the name of the database provider. /// </summary> /// <remarks> /// <para> /// The parameter name should be a provider invariant name as registered in machine.config or app.config. Common values are: /// </para> /// <ul> /// <li><c>System.Data.SqlClient</c> - <see href="http://msdn.microsoft.com/en-us/library/system.data.sqlclient.aspx">SQL Sever Client</see></li> /// <li><c>System.Data.SqlServerCe.3.5</c> - <see href="http://www.microsoft.com/sqlserver/2005/en/us/compact.aspx">SQL Sever Compact 3.5</see></li> /// <li><c>System.Data.OracleClient</c> - <see href="http://msdn.microsoft.com/en-us/library/system.data.oracleclient.aspx">Oracle Client from Microsoft</see> (deprecated in .NET Framework 4)</li> /// <li><c>Oracle.DataAccess.Client</c> - <see href="http://www.oracle.com/technology/tech/windows/odpnet/index.html">ODP.NET provider from Oracle</see></li> /// <li><c>System.Data.SQLite</c> - <see href="http://sqlite.phxsoftware.com/">System.Data.SQLite driver for SQLite</see></li> /// <li><c>Npgsql</c> - <see href="http://npgsql.projects.postgresql.org/">Npgsql driver for PostgreSQL</see></li> /// <li><c>MySql.Data.MySqlClient</c> - <see href="http://www.mysql.com/downloads/connector/net/">MySQL Connector/Net</see></li> /// </ul> /// <para>(Note that provider invariant names are not supported on .NET Compact Framework).</para> /// <para> /// Alternatively the parameter value can be be a fully qualified name of the provider /// connection type (class implementing <see cref="IDbConnection" />) or one of the following tokens: /// </para> /// <ul> /// <li><c>sqlserver</c>, <c>mssql</c>, <c>microsoft</c> or <c>msde</c> - SQL Server Data Provider</li> /// <li><c>oledb</c> - OLEDB Data Provider</li> /// <li><c>odbc</c> - ODBC Data Provider</li> /// </ul> /// </remarks> /// <docgen category='Connection Options' order='10' /> [RequiredParameter] [DefaultValue("sqlserver")] public string DBProvider { get; set; } /// <summary> /// Gets or sets the name of the connection string (as specified in <see href="http://msdn.microsoft.com/en-us/library/bf7sd233.aspx">&lt;connectionStrings&gt; configuration section</see>. /// </summary> /// <docgen category='Connection Options' order='10' /> public string ConnectionStringName { get; set; } /// <summary> /// Gets or sets the connection string. When provided, it overrides the values /// specified in DBHost, DBUserName, DBPassword, DBDatabase. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout ConnectionString { get; set; } /// <summary> /// Gets or sets the connection string using for installation and uninstallation. If not provided, regular ConnectionString is being used. /// </summary> /// <docgen category='Installation Options' order='10' /> public Layout InstallConnectionString { get; set; } /// <summary> /// Gets the installation DDL commands. /// </summary> /// <docgen category='Installation Options' order='10' /> [ArrayParameter(typeof(DatabaseCommandInfo), "install-command")] public IList<DatabaseCommandInfo> InstallDdlCommands { get; private set; } /// <summary> /// Gets the uninstallation DDL commands. /// </summary> /// <docgen category='Installation Options' order='10' /> [ArrayParameter(typeof(DatabaseCommandInfo), "uninstall-command")] public IList<DatabaseCommandInfo> UninstallDdlCommands { get; private set; } /// <summary> /// Gets or sets a value indicating whether to keep the /// database connection open between the log events. /// </summary> /// <docgen category='Connection Options' order='10' /> [DefaultValue(false)] public bool KeepConnection { get; set; } /// <summary> /// Obsolete - value will be ignored! The logging code always runs outside of transaction. /// /// Gets or sets a value indicating whether to use database transactions. /// Some data providers require this. /// </summary> /// <docgen category='Connection Options' order='10' /> /// <remarks> /// This option was removed in NLog 4.0 because the logging code always runs outside of transaction. /// This ensures that the log gets written to the database if you rollback the main transaction because of an error and want to log the error. /// </remarks> [Obsolete("Obsolete - value will be ignored - logging code always runs outside of transaction. Will be removed in NLog 6.")] public bool? UseTransactions { get; set; } /// <summary> /// Gets or sets the database host name. If the ConnectionString is not provided /// this value will be used to construct the "Server=" part of the /// connection string. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout DBHost { get; set; } /// <summary> /// Gets or sets the database user name. If the ConnectionString is not provided /// this value will be used to construct the "User ID=" part of the /// connection string. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout DBUserName { get; set; } /// <summary> /// Gets or sets the database password. If the ConnectionString is not provided /// this value will be used to construct the "Password=" part of the /// connection string. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout DBPassword { get; set; } /// <summary> /// Gets or sets the database name. If the ConnectionString is not provided /// this value will be used to construct the "Database=" part of the /// connection string. /// </summary> /// <docgen category='Connection Options' order='10' /> public Layout DBDatabase { get; set; } /// <summary> /// Gets or sets the text of the SQL command to be run on each log level. /// </summary> /// <remarks> /// Typically this is a SQL INSERT statement or a stored procedure call. /// It should use the database-specific parameters (marked as <c>@parameter</c> /// for SQL server or <c>:parameter</c> for Oracle, other data providers /// have their own notation) and not the layout renderers, /// because the latter is prone to SQL injection attacks. /// The layout renderers should be specified as &lt;parameter /&gt; elements instead. /// </remarks> /// <docgen category='SQL Statement' order='10' /> [RequiredParameter] public Layout CommandText { get; set; } /// <summary> /// Gets or sets the type of the SQL command to be run on each log level. /// </summary> /// <remarks> /// This specifies how the command text is interpreted, as "Text" (default) or as "StoredProcedure". /// When using the value StoredProcedure, the commandText-property would /// normally be the name of the stored procedure. TableDirect method is not supported in this context. /// </remarks> /// <docgen category='SQL Statement' order='11' /> [DefaultValue(CommandType.Text)] public CommandType CommandType { get; set; } /// <summary> /// Gets the collection of parameters. Each parameter contains a mapping /// between NLog layout and a database named or positional parameter. /// </summary> /// <docgen category='SQL Statement' order='12' /> [ArrayParameter(typeof(DatabaseParameterInfo), "parameter")] public IList<DatabaseParameterInfo> Parameters { get; private set; } internal DbProviderFactory ProviderFactory { get; set; } // this is so we can mock the connection string without creating sub-processes internal ConnectionStringSettingsCollection ConnectionStringsSettings { get; set; } internal Type ConnectionType { get; set; } /// <summary> /// Performs installation which requires administrative permissions. /// </summary> /// <param name="installationContext">The installation context.</param> public void Install(InstallationContext installationContext) { this.RunInstallCommands(installationContext, this.InstallDdlCommands); } /// <summary> /// Performs uninstallation which requires administrative permissions. /// </summary> /// <param name="installationContext">The installation context.</param> public void Uninstall(InstallationContext installationContext) { this.RunInstallCommands(installationContext, this.UninstallDdlCommands); } /// <summary> /// Determines whether the item is installed. /// </summary> /// <param name="installationContext">The installation context.</param> /// <returns> /// Value indicating whether the item is installed or null if it is not possible to determine. /// </returns> public bool? IsInstalled(InstallationContext installationContext) { return null; } internal IDbConnection OpenConnection(string connectionString) { IDbConnection connection; if (this.ProviderFactory != null) { connection = this.ProviderFactory.CreateConnection(); } else { connection = (IDbConnection)Activator.CreateInstance(this.ConnectionType); } connection.ConnectionString = connectionString; connection.Open(); return connection; } /// <summary> /// Initializes the target. Can be used by inheriting classes /// to initialize logging. /// </summary> [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Naming", "CA2204:Literals should be spelled correctly", MessageId = "connectionStrings", Justification = "Name of the config file section.")] protected override void InitializeTarget() { base.InitializeTarget(); #pragma warning disable 618 if (UseTransactions.HasValue) #pragma warning restore 618 { InternalLogger.Warn("UseTransactions is obsolete and will not be used - will be removed in NLog 6"); } bool foundProvider = false; if (!string.IsNullOrEmpty(this.ConnectionStringName)) { // read connection string and provider factory from the configuration file var cs = this.ConnectionStringsSettings[this.ConnectionStringName]; if (cs == null) { throw new NLogConfigurationException("Connection string '" + this.ConnectionStringName + "' is not declared in <connectionStrings /> section."); } this.ConnectionString = SimpleLayout.Escape(cs.ConnectionString); if (!string.IsNullOrEmpty(cs.ProviderName)) { this.ProviderFactory = DbProviderFactories.GetFactory(cs.ProviderName); foundProvider = true; } } if (!foundProvider) { foreach (DataRow row in DbProviderFactories.GetFactoryClasses().Rows) { var invariantname = (string)row["InvariantName"]; if (invariantname == this.DBProvider) { this.ProviderFactory = DbProviderFactories.GetFactory(this.DBProvider); foundProvider = true; break; } } } if (!foundProvider) { switch (this.DBProvider.ToUpper(CultureInfo.InvariantCulture)) { case "SQLSERVER": case "MSSQL": case "MICROSOFT": case "MSDE": this.ConnectionType = systemDataAssembly.GetType("System.Data.SqlClient.SqlConnection", true); break; case "OLEDB": this.ConnectionType = systemDataAssembly.GetType("System.Data.OleDb.OleDbConnection", true); break; case "ODBC": this.ConnectionType = systemDataAssembly.GetType("System.Data.Odbc.OdbcConnection", true); break; default: this.ConnectionType = Type.GetType(this.DBProvider, true); break; } } } /// <summary> /// Closes the target and releases any unmanaged resources. /// </summary> protected override void CloseTarget() { base.CloseTarget(); InternalLogger.Trace("DatabaseTarget: close connection because of CloseTarget"); this.CloseConnection(); } /// <summary> /// Writes the specified logging event to the database. It creates /// a new database command, prepares parameters for it by calculating /// layouts and executes the command. /// </summary> /// <param name="logEvent">The logging event.</param> protected override void Write(LogEventInfo logEvent) { try { this.WriteEventToDatabase(logEvent); } catch (Exception exception) { InternalLogger.Error(exception, "Error when writing to database."); if (exception.MustBeRethrownImmediately()) { throw; } InternalLogger.Trace("DatabaseTarget: close connection because of error"); this.CloseConnection(); throw; } finally { if (!this.KeepConnection) { InternalLogger.Trace("DatabaseTarget: close connection (KeepConnection = false)."); this.CloseConnection(); } } } /// <summary> /// Writes an array of logging events to the log target. By default it iterates on all /// events and passes them to "Write" method. Inheriting classes can use this method to /// optimize batch writes. /// </summary> /// <param name="logEvents">Logging events to be written out.</param> protected override void Write(ArraySegment<AsyncLogEventInfo> logEvents) { var buckets = SortHelpers.BucketSort(logEvents, l => l.LogEvent!=null, c => this.BuildConnectionString(c.LogEvent)); try { foreach (var kvp in buckets) { foreach (AsyncLogEventInfo ev in kvp.Value) { try { this.WriteEventToDatabase(ev.LogEvent); ev.Continuation(null); } catch (Exception exception) { // in case of exception, close the connection and report it InternalLogger.Error(exception, "Error when writing to database."); if (exception.MustBeRethrownImmediately()) { throw; } InternalLogger.Trace("DatabaseTarget: close connection because of exception"); this.CloseConnection(); ev.Continuation(exception); if (exception.MustBeRethrown()) { throw; } } } } } finally { if (!this.KeepConnection) { InternalLogger.Trace("DatabaseTarget: close connection because of KeepConnection=false"); this.CloseConnection(); } } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "It's up to the user to ensure proper quoting.")] private void WriteEventToDatabase(LogEventInfo logEvent) { //Always suppress transaction so that the caller does not rollback loggin if they are rolling back their transaction. using (TransactionScope transactionScope = new TransactionScope(TransactionScopeOption.Suppress)) { this.EnsureConnectionOpen(this.BuildConnectionString(logEvent)); IDbCommand command = this.activeConnection.CreateCommand(); command.CommandText = this.CommandText.Render(logEvent); command.CommandType = this.CommandType; InternalLogger.Trace("Executing {0}: {1}", command.CommandType, command.CommandText); foreach (DatabaseParameterInfo par in this.Parameters) { IDbDataParameter p = command.CreateParameter(); p.Direction = ParameterDirection.Input; if (par.Name != null) { p.ParameterName = par.Name; } if (par.Size != 0) { p.Size = par.Size; } if (par.Precision != 0) { p.Precision = par.Precision; } if (par.Scale != 0) { p.Scale = par.Scale; } string stringValue = par.Layout.Render(logEvent); p.Value = stringValue; command.Parameters.Add(p); InternalLogger.Trace(" Parameter: '{0}' = '{1}' ({2})", p.ParameterName, p.Value, p.DbType); } int result = command.ExecuteNonQuery(); InternalLogger.Trace("Finished execution, result = {0}", result); //not really needed as there is no transaction at all. transactionScope.Complete(); } } private string BuildConnectionString(LogEventInfo logEvent) { if (this.ConnectionString != null) { return this.ConnectionString.Render(logEvent); } var sb = new StringBuilder(); sb.Append("Server="); sb.Append(this.DBHost.Render(logEvent)); sb.Append(";"); if (this.DBUserName == null) { sb.Append("Trusted_Connection=SSPI;"); } else { sb.Append("User id="); sb.Append(this.DBUserName.Render(logEvent)); sb.Append(";Password="); sb.Append(this.DBPassword.Render(logEvent)); sb.Append(";"); } if (this.DBDatabase != null) { sb.Append("Database="); sb.Append(this.DBDatabase.Render(logEvent)); } return sb.ToString(); } private void EnsureConnectionOpen(string connectionString) { if (this.activeConnection != null) { if (this.activeConnectionString != connectionString) { InternalLogger.Trace("DatabaseTarget: close connection because of opening new."); this.CloseConnection(); } } if (this.activeConnection != null) { return; } InternalLogger.Trace("DatabaseTarget: open connection."); this.activeConnection = this.OpenConnection(connectionString); this.activeConnectionString = connectionString; } private void CloseConnection() { if (this.activeConnection != null) { this.activeConnection.Close(); this.activeConnection.Dispose(); this.activeConnection = null; this.activeConnectionString = null; } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2100:Review SQL queries for security vulnerabilities", Justification = "It's up to the user to ensure proper quoting.")] private void RunInstallCommands(InstallationContext installationContext, IEnumerable<DatabaseCommandInfo> commands) { // create log event that will be used to render all layouts LogEventInfo logEvent = installationContext.CreateLogEvent(); try { foreach (var commandInfo in commands) { string cs; if (commandInfo.ConnectionString != null) { // if there is connection string specified on the command info, use it cs = commandInfo.ConnectionString.Render(logEvent); } else if (this.InstallConnectionString != null) { // next, try InstallConnectionString cs = this.InstallConnectionString.Render(logEvent); } else { // if it's not defined, fall back to regular connection string cs = this.BuildConnectionString(logEvent); } this.EnsureConnectionOpen(cs); var command = this.activeConnection.CreateCommand(); command.CommandType = commandInfo.CommandType; command.CommandText = commandInfo.Text.Render(logEvent); try { installationContext.Trace("Executing {0} '{1}'", command.CommandType, command.CommandText); command.ExecuteNonQuery(); } catch (Exception exception) { if (exception.MustBeRethrownImmediately()) { throw; } if (commandInfo.IgnoreFailures || installationContext.IgnoreFailures) { installationContext.Warning(exception.Message); } else { installationContext.Error(exception.Message); throw; } } } } finally { InternalLogger.Trace("DatabaseTarget: close connection after install."); this.CloseConnection(); } } } } #endif
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Diagnostics; using Microsoft.CodeAnalysis.CSharp.Syntax; namespace Microsoft.CodeAnalysis.CSharp.EditAndContinue { internal sealed class TopSyntaxComparer : SyntaxComparer { internal static readonly TopSyntaxComparer Instance = new TopSyntaxComparer(); private TopSyntaxComparer() { } #region Tree Traversal protected internal override bool TryGetParent(SyntaxNode node, out SyntaxNode parent) { var parentNode = node.Parent; parent = parentNode; return parentNode != null; } protected internal override IEnumerable<SyntaxNode> GetChildren(SyntaxNode node) { Debug.Assert(GetLabel(node) != IgnoredNode); return HasChildren(node) ? EnumerateChildren(node) : null; } private IEnumerable<SyntaxNode> EnumerateChildren(SyntaxNode node) { foreach (var child in node.ChildNodesAndTokens()) { var childNode = child.AsNode(); if (childNode != null && GetLabel(childNode) != IgnoredNode) { yield return childNode; } } } protected internal override IEnumerable<SyntaxNode> GetDescendants(SyntaxNode node) { foreach (var descendant in node.DescendantNodesAndTokens( descendIntoChildren: HasChildren, descendIntoTrivia: false)) { var descendantNode = descendant.AsNode(); if (descendantNode != null && GetLabel(descendantNode) != IgnoredNode) { yield return descendantNode; } } } private static bool HasChildren(SyntaxNode node) { // Leaves are labeled statements that don't have a labeled child. // We also return true for non-labeled statements. Label label = Classify(node.Kind(), out var isLeaf, ignoreVariableDeclarations: false); // ignored should always be reported as leaves Debug.Assert(label != Label.Ignored || isLeaf); return !isLeaf; } #endregion #region Labels // Assumptions: // - Each listed label corresponds to one or more syntax kinds. // - Nodes with same labels might produce Update edits, nodes with different labels don't. // - If IsTiedToParent(label) is true for a label then all its possible parent labels must precede the label. // (i.e. both MethodDeclaration and TypeDeclaration must precede TypeParameter label). // - All descendants of a node whose kind is listed here will be ignored regardless of their labels internal enum Label { CompilationUnit, NamespaceDeclaration, ExternAliasDirective, // tied to parent UsingDirective, // tied to parent TypeDeclaration, EnumDeclaration, DelegateDeclaration, FieldDeclaration, // tied to parent FieldVariableDeclaration, // tied to parent FieldVariableDeclarator, // tied to parent MethodDeclaration, // tied to parent OperatorDeclaration, // tied to parent ConversionOperatorDeclaration, // tied to parent ConstructorDeclaration, // tied to parent DestructorDeclaration, // tied to parent PropertyDeclaration, // tied to parent IndexerDeclaration, // tied to parent EventDeclaration, // tied to parent EnumMemberDeclaration, // tied to parent AccessorList, // tied to parent AccessorDeclaration, // tied to parent TypeParameterList, // tied to parent TypeParameterConstraintClause, // tied to parent TypeParameter, // tied to parent ParameterList, // tied to parent BracketedParameterList, // tied to parent Parameter, // tied to parent AttributeList, // tied to parent Attribute, // tied to parent // helpers: Count, Ignored = IgnoredNode } /// <summary> /// Return 1 if it is desirable to report two edits (delete and insert) rather than a move edit /// when the node changes its parent. /// </summary> private static int TiedToAncestor(Label label) { switch (label) { case Label.ExternAliasDirective: case Label.UsingDirective: case Label.FieldDeclaration: case Label.FieldVariableDeclaration: case Label.FieldVariableDeclarator: case Label.MethodDeclaration: case Label.OperatorDeclaration: case Label.ConversionOperatorDeclaration: case Label.ConstructorDeclaration: case Label.DestructorDeclaration: case Label.PropertyDeclaration: case Label.IndexerDeclaration: case Label.EventDeclaration: case Label.EnumMemberDeclaration: case Label.AccessorDeclaration: case Label.AccessorList: case Label.TypeParameterList: case Label.TypeParameter: case Label.TypeParameterConstraintClause: case Label.ParameterList: case Label.BracketedParameterList: case Label.Parameter: case Label.AttributeList: case Label.Attribute: return 1; default: return 0; } } // internal for testing internal static Label Classify(SyntaxKind kind, out bool isLeaf, bool ignoreVariableDeclarations) { switch (kind) { case SyntaxKind.CompilationUnit: isLeaf = false; return Label.CompilationUnit; case SyntaxKind.GlobalStatement: // TODO: isLeaf = true; return Label.Ignored; case SyntaxKind.ExternAliasDirective: isLeaf = true; return Label.ExternAliasDirective; case SyntaxKind.UsingDirective: isLeaf = true; return Label.UsingDirective; case SyntaxKind.NamespaceDeclaration: isLeaf = false; return Label.NamespaceDeclaration; case SyntaxKind.ClassDeclaration: case SyntaxKind.StructDeclaration: case SyntaxKind.InterfaceDeclaration: isLeaf = false; return Label.TypeDeclaration; case SyntaxKind.EnumDeclaration: isLeaf = false; return Label.EnumDeclaration; case SyntaxKind.DelegateDeclaration: isLeaf = false; return Label.DelegateDeclaration; case SyntaxKind.FieldDeclaration: case SyntaxKind.EventFieldDeclaration: isLeaf = false; return Label.FieldDeclaration; case SyntaxKind.VariableDeclaration: isLeaf = ignoreVariableDeclarations; return ignoreVariableDeclarations ? Label.Ignored : Label.FieldVariableDeclaration; case SyntaxKind.VariableDeclarator: isLeaf = true; return ignoreVariableDeclarations ? Label.Ignored : Label.FieldVariableDeclarator; case SyntaxKind.MethodDeclaration: isLeaf = false; return Label.MethodDeclaration; case SyntaxKind.ConversionOperatorDeclaration: isLeaf = false; return Label.ConversionOperatorDeclaration; case SyntaxKind.OperatorDeclaration: isLeaf = false; return Label.OperatorDeclaration; case SyntaxKind.ConstructorDeclaration: isLeaf = false; return Label.ConstructorDeclaration; case SyntaxKind.DestructorDeclaration: isLeaf = true; return Label.DestructorDeclaration; case SyntaxKind.PropertyDeclaration: isLeaf = false; return Label.PropertyDeclaration; case SyntaxKind.IndexerDeclaration: isLeaf = false; return Label.IndexerDeclaration; case SyntaxKind.EventDeclaration: isLeaf = false; return Label.EventDeclaration; case SyntaxKind.EnumMemberDeclaration: isLeaf = false; // attribute may be applied return Label.EnumMemberDeclaration; case SyntaxKind.AccessorList: isLeaf = false; return Label.AccessorList; case SyntaxKind.GetAccessorDeclaration: case SyntaxKind.SetAccessorDeclaration: case SyntaxKind.AddAccessorDeclaration: case SyntaxKind.RemoveAccessorDeclaration: isLeaf = true; return Label.AccessorDeclaration; case SyntaxKind.TypeParameterList: isLeaf = false; return Label.TypeParameterList; case SyntaxKind.TypeParameterConstraintClause: isLeaf = false; return Label.TypeParameterConstraintClause; case SyntaxKind.TypeParameter: isLeaf = false; // children: attributes return Label.TypeParameter; case SyntaxKind.ParameterList: isLeaf = false; return Label.ParameterList; case SyntaxKind.BracketedParameterList: isLeaf = false; return Label.BracketedParameterList; case SyntaxKind.Parameter: // We ignore anonymous methods and lambdas, // we only care about parameters of member declarations. isLeaf = false; // children: attributes return Label.Parameter; case SyntaxKind.AttributeList: isLeaf = false; return Label.AttributeList; case SyntaxKind.Attribute: isLeaf = true; return Label.Attribute; default: isLeaf = true; return Label.Ignored; } } protected internal override int GetLabel(SyntaxNode node) { return (int)GetLabel(node.Kind()); } internal static Label GetLabel(SyntaxKind kind) { return Classify(kind, out var isLeaf, ignoreVariableDeclarations: false); } // internal for testing internal static bool HasLabel(SyntaxKind kind, bool ignoreVariableDeclarations) { return Classify(kind, out var isLeaf, ignoreVariableDeclarations) != Label.Ignored; } protected internal override int LabelCount { get { return (int)Label.Count; } } protected internal override int TiedToAncestor(int label) { return TiedToAncestor((Label)label); } #endregion #region Comparisons public override bool ValuesEqual(SyntaxNode left, SyntaxNode right) { Func<SyntaxKind, bool> ignoreChildFunction; switch (left.Kind()) { // all syntax kinds with a method body child: case SyntaxKind.MethodDeclaration: case SyntaxKind.ConversionOperatorDeclaration: case SyntaxKind.OperatorDeclaration: case SyntaxKind.ConstructorDeclaration: case SyntaxKind.DestructorDeclaration: case SyntaxKind.GetAccessorDeclaration: case SyntaxKind.SetAccessorDeclaration: case SyntaxKind.AddAccessorDeclaration: case SyntaxKind.RemoveAccessorDeclaration: // When comparing method bodies we need to NOT ignore VariableDeclaration and VariableDeclarator children, // but when comparing field definitions we should ignore VariableDeclarations children. ignoreChildFunction = childKind => HasLabel(childKind, ignoreVariableDeclarations: true); break; default: if (HasChildren(left)) { ignoreChildFunction = childKind => HasLabel(childKind, ignoreVariableDeclarations: false); } else { ignoreChildFunction = null; } break; } return SyntaxFactory.AreEquivalent(left, right, ignoreChildFunction); } protected override bool TryComputeWeightedDistance(SyntaxNode leftNode, SyntaxNode rightNode, out double distance) { SyntaxNodeOrToken? leftName = TryGetName(leftNode); SyntaxNodeOrToken? rightName = TryGetName(rightNode); Debug.Assert(rightName.HasValue == leftName.HasValue); if (leftName.HasValue) { distance = ComputeDistance(leftName.Value, rightName.Value); return true; } else { distance = 0; return false; } } private static SyntaxNodeOrToken? TryGetName(SyntaxNode node) { switch (node.Kind()) { case SyntaxKind.ExternAliasDirective: return ((ExternAliasDirectiveSyntax)node).Identifier; case SyntaxKind.UsingDirective: return ((UsingDirectiveSyntax)node).Name; case SyntaxKind.NamespaceDeclaration: return ((NamespaceDeclarationSyntax)node).Name; case SyntaxKind.ClassDeclaration: case SyntaxKind.StructDeclaration: case SyntaxKind.InterfaceDeclaration: return ((TypeDeclarationSyntax)node).Identifier; case SyntaxKind.EnumDeclaration: return ((EnumDeclarationSyntax)node).Identifier; case SyntaxKind.DelegateDeclaration: return ((DelegateDeclarationSyntax)node).Identifier; case SyntaxKind.FieldDeclaration: case SyntaxKind.EventFieldDeclaration: case SyntaxKind.VariableDeclaration: return null; case SyntaxKind.VariableDeclarator: return ((VariableDeclaratorSyntax)node).Identifier; case SyntaxKind.MethodDeclaration: return ((MethodDeclarationSyntax)node).Identifier; case SyntaxKind.ConversionOperatorDeclaration: return ((ConversionOperatorDeclarationSyntax)node).Type; case SyntaxKind.OperatorDeclaration: return ((OperatorDeclarationSyntax)node).OperatorToken; case SyntaxKind.ConstructorDeclaration: return ((ConstructorDeclarationSyntax)node).Identifier; case SyntaxKind.DestructorDeclaration: return ((DestructorDeclarationSyntax)node).Identifier; case SyntaxKind.PropertyDeclaration: return ((PropertyDeclarationSyntax)node).Identifier; case SyntaxKind.IndexerDeclaration: return null; case SyntaxKind.EventDeclaration: return ((EventDeclarationSyntax)node).Identifier; case SyntaxKind.EnumMemberDeclaration: return ((EnumMemberDeclarationSyntax)node).Identifier; case SyntaxKind.GetAccessorDeclaration: case SyntaxKind.SetAccessorDeclaration: return null; case SyntaxKind.TypeParameterConstraintClause: return ((TypeParameterConstraintClauseSyntax)node).Name.Identifier; case SyntaxKind.TypeParameter: return ((TypeParameterSyntax)node).Identifier; case SyntaxKind.TypeParameterList: case SyntaxKind.ParameterList: case SyntaxKind.BracketedParameterList: return null; case SyntaxKind.Parameter: return ((ParameterSyntax)node).Identifier; case SyntaxKind.AttributeList: return ((AttributeListSyntax)node).Target; case SyntaxKind.Attribute: return ((AttributeSyntax)node).Name; default: return null; } } #endregion } }
// $ANTLR 3.1.2 BuildOptions\\ProfileTreeGrammar.g3 2009-09-30 13:18:19 // The variable 'variable' is assigned but its value is never used. #pragma warning disable 219 // Unreachable code detected. #pragma warning disable 162 //import java.util.Map; //import java.util.HashMap; using BigInteger = java.math.BigInteger; using Console = System.Console; using System.Collections.Generic; using Antlr.Runtime; using Antlr.Runtime.Tree; using RewriteRuleITokenStream = Antlr.Runtime.Tree.RewriteRuleTokenStream;using Stack = System.Collections.Generic.Stack<object>; using List = System.Collections.IList; using ArrayList = System.Collections.Generic.List<object>; using Antlr.Runtime.Debug; using IOException = System.IO.IOException; [System.CodeDom.Compiler.GeneratedCode("ANTLR", "3.1.2")] [System.CLSCompliant(false)] public partial class ProfileTreeGrammar : DebugTreeParser { internal static readonly string[] tokenNames = new string[] { "<invalid>", "<EOR>", "<DOWN>", "<UP>", "CALL", "FUNC", "ID", "INT", "NEWLINE", "WS", "'-'", "'%'", "'('", "')'", "'*'", "'/'", "'+'", "'='" }; public const int EOF=-1; public const int T__10=10; public const int T__11=11; public const int T__12=12; public const int T__13=13; public const int T__14=14; public const int T__15=15; public const int T__16=16; public const int T__17=17; public const int CALL=4; public const int FUNC=5; public const int ID=6; public const int INT=7; public const int NEWLINE=8; public const int WS=9; // delegates // delegators public static readonly string[] ruleNames = new string[] { "invalidRule", "call", "expr", "prog", "stat" }; int ruleLevel = 0; public virtual int RuleLevel { get { return ruleLevel; } } public virtual void IncRuleLevel() { ruleLevel++; } public virtual void DecRuleLevel() { ruleLevel--; } public ProfileTreeGrammar( ITreeNodeStream input ) : this( input, new Profiler(null), new RecognizerSharedState() ) { } public ProfileTreeGrammar( ITreeNodeStream input, IDebugEventListener dbg, RecognizerSharedState state ) : base( input, dbg, state ) { Profiler p = (Profiler)dbg; p.setParser(this); } public ProfileTreeGrammar( ITreeNodeStream input, IDebugEventListener dbg ) : base( input, dbg, new RecognizerSharedState() ) { Profiler p = (Profiler)dbg; p.setParser(this); } public virtual bool AlreadyParsedRule( IIntStream input, int ruleIndex ) { ((Profiler)dbg).ExamineRuleMemoization(input, ruleIndex, ProfileTreeGrammar.ruleNames[ruleIndex]); return super.AlreadyParsedRule(input, ruleIndex); } public virtual void Memoize( IIntStream input, int ruleIndex, int ruleStartIndex ) { ((Profiler)dbg).Memoize(input, ruleIndex, ruleStartIndex, ProfileTreeGrammar.ruleNames[ruleIndex]); super.Memoize(input, ruleIndex, ruleStartIndex); } protected virtual bool EvalPredicate( bool result, string predicate ) { dbg.SemanticPredicate( result, predicate ); return result; } public override string[] TokenNames { get { return ProfileTreeGrammar.tokenNames; } } public override string GrammarFileName { get { return "BuildOptions\\ProfileTreeGrammar.g3"; } } #region Rules // $ANTLR start "prog" // BuildOptions\\ProfileTreeGrammar.g3:53:0: prog : ( stat )* ; private void prog( ) { try { dbg.EnterRule( GrammarFileName, "prog" ); if ( RuleLevel == 0 ) { dbg.Commence(); } IncRuleLevel(); dbg.Location( 53, -1 ); try { // BuildOptions\\ProfileTreeGrammar.g3:53:9: ( ( stat )* ) dbg.EnterAlt( 1 ); // BuildOptions\\ProfileTreeGrammar.g3:53:9: ( stat )* { dbg.Location( 53, 8 ); // BuildOptions\\ProfileTreeGrammar.g3:53:9: ( stat )* try { dbg.EnterSubRule( 1 ); for ( ; ; ) { int alt1=2; try { dbg.EnterDecision( 1 ); int LA1_0 = input.LA(1); if ( ((LA1_0>=CALL && LA1_0<=INT)||(LA1_0>=10 && LA1_0<=11)||(LA1_0>=14 && LA1_0<=17)) ) { alt1=1; } } finally { dbg.ExitDecision( 1 ); } switch ( alt1 ) { case 1: dbg.EnterAlt( 1 ); // BuildOptions\\ProfileTreeGrammar.g3:53:0: stat { dbg.Location( 53, 8 ); PushFollow(Follow._stat_in_prog48); stat(); state._fsp--; } break; default: goto loop1; } } loop1: ; } finally { dbg.ExitSubRule( 1 ); } } } catch ( RecognitionException re ) { ReportError(re); Recover(input,re); } finally { } dbg.Location(54, 4); } finally { dbg.ExitRule( GrammarFileName, "prog" ); DecRuleLevel(); if ( RuleLevel == 0 ) { dbg.Terminate(); } } return ; } // $ANTLR end "prog" // $ANTLR start "stat" // BuildOptions\\ProfileTreeGrammar.g3:56:0: stat : ( expr | ^( '=' ID expr ) | ^( FUNC ( . )+ ) ); private void stat( ) { CommonTree ID2=null; BigInteger expr1 = default(BigInteger); BigInteger expr3 = default(BigInteger); try { dbg.EnterRule( GrammarFileName, "stat" ); if ( RuleLevel == 0 ) { dbg.Commence(); } IncRuleLevel(); dbg.Location( 56, -1 ); try { // BuildOptions\\ProfileTreeGrammar.g3:56:9: ( expr | ^( '=' ID expr ) | ^( FUNC ( . )+ ) ) int alt3=3; try { dbg.EnterDecision( 3 ); switch ( input.LA(1) ) { case CALL: case ID: case INT: case 10: case 11: case 14: case 15: case 16: { alt3=1; } break; case 17: { alt3=2; } break; case FUNC: { alt3=3; } break; default: { NoViableAltException nvae = new NoViableAltException("", 3, 0, input); dbg.RecognitionException( nvae ); throw nvae; } } } finally { dbg.ExitDecision( 3 ); } switch ( alt3 ) { case 1: dbg.EnterAlt( 1 ); // BuildOptions\\ProfileTreeGrammar.g3:56:9: expr { dbg.Location( 56, 8 ); PushFollow(Follow._expr_in_stat63); expr1=expr(); state._fsp--; dbg.Location( 56, 35 ); string result = expr1.ToString(); Console.Out.WriteLine(expr1 + " (about " + result[0] + "*10^" + (result.Length-1) + ")"); } break; case 2: dbg.EnterAlt( 2 ); // BuildOptions\\ProfileTreeGrammar.g3:59:9: ^( '=' ID expr ) { dbg.Location( 59, 8 ); dbg.Location( 59, 10 ); Match(input,17,Follow._17_in_stat98); Match(input, TokenTypes.Down, null); dbg.Location( 59, 14 ); ID2=(CommonTree)Match(input,ID,Follow._ID_in_stat100); dbg.Location( 59, 17 ); PushFollow(Follow._expr_in_stat102); expr3=expr(); state._fsp--; Match(input, TokenTypes.Up, null); dbg.Location( 59, 35 ); globalMemory[(ID2!=null?ID2.Text:null)] = expr3; } break; case 3: dbg.EnterAlt( 3 ); // BuildOptions\\ProfileTreeGrammar.g3:60:9: ^( FUNC ( . )+ ) { dbg.Location( 60, 8 ); dbg.Location( 60, 10 ); Match(input,FUNC,Follow._FUNC_in_stat128); Match(input, TokenTypes.Down, null); dbg.Location( 60, 15 ); // BuildOptions\\ProfileTreeGrammar.g3:60:16: ( . )+ int cnt2=0; try { dbg.EnterSubRule( 2 ); for ( ; ; ) { int alt2=2; try { dbg.EnterDecision( 2 ); int LA2_0 = input.LA(1); if ( ((LA2_0>=CALL && LA2_0<=17)) ) { alt2=1; } else if ( (LA2_0==UP) ) { alt2=2; } } finally { dbg.ExitDecision( 2 ); } switch ( alt2 ) { case 1: dbg.EnterAlt( 1 ); // BuildOptions\\ProfileTreeGrammar.g3:60:0: . { dbg.Location( 60, 15 ); MatchAny(input); } break; default: if ( cnt2 >= 1 ) goto loop2; EarlyExitException eee2 = new EarlyExitException( 2, input ); dbg.RecognitionException( eee2 ); throw eee2; } cnt2++; } loop2: ; } finally { dbg.ExitSubRule( 2 ); } Match(input, TokenTypes.Up, null); } break; } } catch ( RecognitionException re ) { ReportError(re); Recover(input,re); } finally { } dbg.Location(61, 4); } finally { dbg.ExitRule( GrammarFileName, "stat" ); DecRuleLevel(); if ( RuleLevel == 0 ) { dbg.Terminate(); } } return ; } // $ANTLR end "stat" // $ANTLR start "expr" // BuildOptions\\ProfileTreeGrammar.g3:63:0: expr returns [BigInteger value] : ( ^( '+' a= expr b= expr ) | ^( '-' a= expr b= expr ) | ^( '*' a= expr b= expr ) | ^( '/' a= expr b= expr ) | ^( '%' a= expr b= expr ) | ID | INT | call ); private BigInteger expr( ) { BigInteger value = default(BigInteger); CommonTree ID4=null; CommonTree INT5=null; BigInteger a = default(BigInteger); BigInteger b = default(BigInteger); BigInteger call6 = default(BigInteger); try { dbg.EnterRule( GrammarFileName, "expr" ); if ( RuleLevel == 0 ) { dbg.Commence(); } IncRuleLevel(); dbg.Location( 63, -1 ); try { // BuildOptions\\ProfileTreeGrammar.g3:64:9: ( ^( '+' a= expr b= expr ) | ^( '-' a= expr b= expr ) | ^( '*' a= expr b= expr ) | ^( '/' a= expr b= expr ) | ^( '%' a= expr b= expr ) | ID | INT | call ) int alt4=8; try { dbg.EnterDecision( 4 ); switch ( input.LA(1) ) { case 16: { alt4=1; } break; case 10: { alt4=2; } break; case 14: { alt4=3; } break; case 15: { alt4=4; } break; case 11: { alt4=5; } break; case ID: { alt4=6; } break; case INT: { alt4=7; } break; case CALL: { alt4=8; } break; default: { NoViableAltException nvae = new NoViableAltException("", 4, 0, input); dbg.RecognitionException( nvae ); throw nvae; } } } finally { dbg.ExitDecision( 4 ); } switch ( alt4 ) { case 1: dbg.EnterAlt( 1 ); // BuildOptions\\ProfileTreeGrammar.g3:64:9: ^( '+' a= expr b= expr ) { dbg.Location( 64, 8 ); dbg.Location( 64, 10 ); Match(input,16,Follow._16_in_expr172); Match(input, TokenTypes.Down, null); dbg.Location( 64, 15 ); PushFollow(Follow._expr_in_expr176); a=expr(); state._fsp--; dbg.Location( 64, 22 ); PushFollow(Follow._expr_in_expr180); b=expr(); state._fsp--; Match(input, TokenTypes.Up, null); dbg.Location( 64, 35 ); value = a.add(b); } break; case 2: dbg.EnterAlt( 2 ); // BuildOptions\\ProfileTreeGrammar.g3:65:9: ^( '-' a= expr b= expr ) { dbg.Location( 65, 8 ); dbg.Location( 65, 10 ); Match(input,10,Follow._10_in_expr200); Match(input, TokenTypes.Down, null); dbg.Location( 65, 15 ); PushFollow(Follow._expr_in_expr204); a=expr(); state._fsp--; dbg.Location( 65, 22 ); PushFollow(Follow._expr_in_expr208); b=expr(); state._fsp--; Match(input, TokenTypes.Up, null); dbg.Location( 65, 35 ); value = a.subtract(b); } break; case 3: dbg.EnterAlt( 3 ); // BuildOptions\\ProfileTreeGrammar.g3:66:9: ^( '*' a= expr b= expr ) { dbg.Location( 66, 8 ); dbg.Location( 66, 10 ); Match(input,14,Follow._14_in_expr228); Match(input, TokenTypes.Down, null); dbg.Location( 66, 15 ); PushFollow(Follow._expr_in_expr232); a=expr(); state._fsp--; dbg.Location( 66, 22 ); PushFollow(Follow._expr_in_expr236); b=expr(); state._fsp--; Match(input, TokenTypes.Up, null); dbg.Location( 66, 35 ); value = a.multiply(b); } break; case 4: dbg.EnterAlt( 4 ); // BuildOptions\\ProfileTreeGrammar.g3:67:9: ^( '/' a= expr b= expr ) { dbg.Location( 67, 8 ); dbg.Location( 67, 10 ); Match(input,15,Follow._15_in_expr256); Match(input, TokenTypes.Down, null); dbg.Location( 67, 15 ); PushFollow(Follow._expr_in_expr260); a=expr(); state._fsp--; dbg.Location( 67, 22 ); PushFollow(Follow._expr_in_expr264); b=expr(); state._fsp--; Match(input, TokenTypes.Up, null); dbg.Location( 67, 35 ); value = a.divide(b); } break; case 5: dbg.EnterAlt( 5 ); // BuildOptions\\ProfileTreeGrammar.g3:68:9: ^( '%' a= expr b= expr ) { dbg.Location( 68, 8 ); dbg.Location( 68, 10 ); Match(input,11,Follow._11_in_expr284); Match(input, TokenTypes.Down, null); dbg.Location( 68, 15 ); PushFollow(Follow._expr_in_expr288); a=expr(); state._fsp--; dbg.Location( 68, 22 ); PushFollow(Follow._expr_in_expr292); b=expr(); state._fsp--; Match(input, TokenTypes.Up, null); dbg.Location( 68, 35 ); value = a.remainder(b); } break; case 6: dbg.EnterAlt( 6 ); // BuildOptions\\ProfileTreeGrammar.g3:69:9: ID { dbg.Location( 69, 8 ); ID4=(CommonTree)Match(input,ID,Follow._ID_in_expr311); dbg.Location( 69, 35 ); value = getValue((ID4!=null?ID4.Text:null)); } break; case 7: dbg.EnterAlt( 7 ); // BuildOptions\\ProfileTreeGrammar.g3:70:9: INT { dbg.Location( 70, 8 ); INT5=(CommonTree)Match(input,INT,Follow._INT_in_expr347); dbg.Location( 70, 35 ); value = new BigInteger((INT5!=null?INT5.Text:null)); } break; case 8: dbg.EnterAlt( 8 ); // BuildOptions\\ProfileTreeGrammar.g3:71:9: call { dbg.Location( 71, 8 ); PushFollow(Follow._call_in_expr382); call6=call(); state._fsp--; dbg.Location( 71, 35 ); value = call6; } break; } } catch ( RecognitionException re ) { ReportError(re); Recover(input,re); } finally { } dbg.Location(72, 4); } finally { dbg.ExitRule( GrammarFileName, "expr" ); DecRuleLevel(); if ( RuleLevel == 0 ) { dbg.Terminate(); } } return value; } // $ANTLR end "expr" // $ANTLR start "call" // BuildOptions\\ProfileTreeGrammar.g3:74:0: call returns [BigInteger value] : ^( CALL ID expr ) ; private BigInteger call( ) { BigInteger value = default(BigInteger); CommonTree ID8=null; BigInteger expr7 = default(BigInteger); try { dbg.EnterRule( GrammarFileName, "call" ); if ( RuleLevel == 0 ) { dbg.Commence(); } IncRuleLevel(); dbg.Location( 74, -1 ); try { // BuildOptions\\ProfileTreeGrammar.g3:75:9: ( ^( CALL ID expr ) ) dbg.EnterAlt( 1 ); // BuildOptions\\ProfileTreeGrammar.g3:75:9: ^( CALL ID expr ) { dbg.Location( 75, 8 ); dbg.Location( 75, 10 ); Match(input,CALL,Follow._CALL_in_call430); Match(input, TokenTypes.Down, null); dbg.Location( 75, 15 ); ID8=(CommonTree)Match(input,ID,Follow._ID_in_call432); dbg.Location( 75, 18 ); PushFollow(Follow._expr_in_call434); expr7=expr(); state._fsp--; Match(input, TokenTypes.Up, null); dbg.Location( 75, 35 ); BigInteger p = expr7; CommonTree funcRoot = findFunction((ID8!=null?ID8.Text:null), p); if (funcRoot == null) { Console.Error.WriteLine("No match found for " + (ID8!=null?ID8.Text:null) + "(" + p + ")"); } else { // Here we set up the local evaluator to run over the // function definition with the parameter value. // This re-reads a sub-AST of our input AST! ProfileTreeGrammar e = new ProfileTreeGrammar(funcRoot, functionDefinitions, globalMemory, p); value = e.expr(); } } } catch ( RecognitionException re ) { ReportError(re); Recover(input,re); } finally { } dbg.Location(87, 4); } finally { dbg.ExitRule( GrammarFileName, "call" ); DecRuleLevel(); if ( RuleLevel == 0 ) { dbg.Terminate(); } } return value; } // $ANTLR end "call" #endregion Rules #region Follow sets private static class Follow { public static readonly BitSet _stat_in_prog48 = new BitSet(new ulong[]{0x3CCF2UL}); public static readonly BitSet _expr_in_stat63 = new BitSet(new ulong[]{0x2UL}); public static readonly BitSet _17_in_stat98 = new BitSet(new ulong[]{0x4UL}); public static readonly BitSet _ID_in_stat100 = new BitSet(new ulong[]{0x1CCD0UL}); public static readonly BitSet _expr_in_stat102 = new BitSet(new ulong[]{0x8UL}); public static readonly BitSet _FUNC_in_stat128 = new BitSet(new ulong[]{0x4UL}); public static readonly BitSet _16_in_expr172 = new BitSet(new ulong[]{0x4UL}); public static readonly BitSet _expr_in_expr176 = new BitSet(new ulong[]{0x1CCD0UL}); public static readonly BitSet _expr_in_expr180 = new BitSet(new ulong[]{0x8UL}); public static readonly BitSet _10_in_expr200 = new BitSet(new ulong[]{0x4UL}); public static readonly BitSet _expr_in_expr204 = new BitSet(new ulong[]{0x1CCD0UL}); public static readonly BitSet _expr_in_expr208 = new BitSet(new ulong[]{0x8UL}); public static readonly BitSet _14_in_expr228 = new BitSet(new ulong[]{0x4UL}); public static readonly BitSet _expr_in_expr232 = new BitSet(new ulong[]{0x1CCD0UL}); public static readonly BitSet _expr_in_expr236 = new BitSet(new ulong[]{0x8UL}); public static readonly BitSet _15_in_expr256 = new BitSet(new ulong[]{0x4UL}); public static readonly BitSet _expr_in_expr260 = new BitSet(new ulong[]{0x1CCD0UL}); public static readonly BitSet _expr_in_expr264 = new BitSet(new ulong[]{0x8UL}); public static readonly BitSet _11_in_expr284 = new BitSet(new ulong[]{0x4UL}); public static readonly BitSet _expr_in_expr288 = new BitSet(new ulong[]{0x1CCD0UL}); public static readonly BitSet _expr_in_expr292 = new BitSet(new ulong[]{0x8UL}); public static readonly BitSet _ID_in_expr311 = new BitSet(new ulong[]{0x2UL}); public static readonly BitSet _INT_in_expr347 = new BitSet(new ulong[]{0x2UL}); public static readonly BitSet _call_in_expr382 = new BitSet(new ulong[]{0x2UL}); public static readonly BitSet _CALL_in_call430 = new BitSet(new ulong[]{0x4UL}); public static readonly BitSet _ID_in_call432 = new BitSet(new ulong[]{0x1CCD0UL}); public static readonly BitSet _expr_in_call434 = new BitSet(new ulong[]{0x8UL}); } #endregion Follow sets }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.IO; using System.Linq; using Epic.Monads; using LiveLinq.List; using MoreCollections; using static Epic.Monads.Utility; using System.Text; using System.Text.RegularExpressions; namespace Reactive.Io { public static partial class PathUtility { public static string DefaultDirectorySeparator { get; } /// <summary> /// Returns a regex that filters files the same as the specified pattern. /// From here: http://www.java2s.com/Code/CSharp/Regular-Expressions/Checksifnamematchespatternwithandwildcards.htm /// Copyright: Julijan ?ribar, 2004-2007 /// /// This software is provided 'as-is', without any express or implied /// warranty. In no event will the author(s) be held liable for any damages /// arising from the use of this software. /// Permission is granted to anyone to use this software for any purpose, /// including commercial applications, and to alter it and redistribute it /// freely, subject to the following restrictions: /// 1. The origin of this software must not be misrepresented; you must not /// claim that you wrote the original software. If you use this software /// in a product, an acknowledgment in the product documentation would be /// appreciated but is not required. /// 2. Altered source versions must be plainly marked as such, and must not be /// misrepresented as being the original software. /// 3. This notice may not be removed or altered from any source distribution. /// </summary> /// <param name="filename"> /// Name to match. /// </param> /// <param name="pattern"> /// Pattern to match to. /// </param> /// <returns> /// <c>true</c> if name matches pattern, otherwise <c>false</c>. /// </returns> public static Regex FileNamePatternToRegex(string pattern) { // prepare the pattern to the form appropriate for Regex class var sb = new StringBuilder(pattern); // remove superflous occurences of "?*" and "*?" while (sb.ToString().IndexOf("?*") != -1) { sb.Replace("?*", "*"); } while (sb.ToString().IndexOf("*?") != -1) { sb.Replace("*?", "*"); } // remove superflous occurences of asterisk '*' while (sb.ToString().IndexOf("**") != -1) { sb.Replace("**", "*"); } // if only asterisk '*' is left, the mask is ".*" if (sb.ToString().Equals("*")) pattern = ".*"; else { // replace '.' with "\." sb.Replace(".", "\\."); // replaces all occurrences of '*' with ".*" sb.Replace("*", ".*"); // replaces all occurrences of '?' with '.*' sb.Replace("?", "."); // add "\b" to the beginning and end of the pattern sb.Insert(0, "\\b"); sb.Append("\\b"); pattern = sb.ToString(); } Regex regex = new Regex(pattern, RegexOptions.IgnoreCase); return regex; } public static PathSpec ParsePathSpec(string path, PathFlags flags = PathFlags.UseDefaultsForGivenPath) { string error = string.Empty; PathSpec pathSpec; if (!TryParsePathSpec(path, out pathSpec, out error, flags)) throw new ArgumentException(error); return pathSpec; } public static IMaybe<PathSpec> TryParsePathSpec(string path, PathFlags flags = PathFlags.UseDefaultsForGivenPath) { string error = string.Empty; PathSpec pathSpec; if (!TryParsePathSpec(path, out pathSpec, out error, flags)) return Nothing<PathSpec>(); return Something(pathSpec); } public static bool TryParsePathSpec(string path, out PathSpec pathSpec, PathFlags flags = PathFlags.UseDefaultsForGivenPath) { string error = string.Empty; return TryParsePathSpec(path, out pathSpec, out error, flags); } public static bool TryParsePathSpec(string path, out PathSpec pathSpec, out string error, PathFlags flags = PathFlags.UseDefaultsForGivenPath) { if (flags.HasFlag(PathFlags.UseDefaultsFromUtility) && flags.HasFlag(PathFlags.UseDefaultsForGivenPath)) throw new ArgumentException("Cannot specify both PathFlags.UseDefaultsFromUtility and PathFlags.UseDefaultsForGivenPath"); if (flags.HasFlag(PathFlags.UseDefaultsFromUtility)) flags = GetDefaultFlagsForThisEnvironment(); error = string.Empty; pathSpec = null; var isWindowsStyle = path.Contains("\\") || path.Contains(":"); var isUnixStyle = path.Contains("/"); if (isWindowsStyle && isUnixStyle) { error = "Cannot mix slashes and backslashes in the same path"; return false; } if (isWindowsStyle) { if (flags.HasFlag(PathFlags.UseDefaultsForGivenPath)) flags = PathFlags.None; if (path.Length > 1 && path.EndsWith("\\")) path = path.Substring(0, path.Length - 1); var colonIdx = path.LastIndexOf(':'); if (colonIdx > -1 && (colonIdx != 1 || !char.IsLetter(path[0]) || (path.Length > 2 && path[2] != '\\'))) { error = "A Windows path may not contain a : character, except as part of the drive specifier."; return false; } var isAbsolute = path.IsAbsoluteWindowsPath(); if (isAbsolute) { var components = path.Split('\\').ToList(); components.RemoveWhere((i, str) => i != 0 && str == "."); if (components.Any(String.IsNullOrEmpty)) { error = "Must not contain any directories that have empty names"; return false; } if (components.IsAncestorOfRoot()) { error = "Must not point to an ancestor of the filesystem root"; return false; } pathSpec = new PathSpec(flags, "\\", components); } else if (path.StartsWith(".")) { var components = path.Split('\\').ToList(); components.RemoveWhere((i, str) => i != 0 && str == "."); if (components.Any(String.IsNullOrEmpty)) { error = "Must not contain any directories that have empty names"; return false; } if (components.IsAncestorOfRoot()) { error = "Must not point to an ancestor of the filesystem root"; return false; } pathSpec = new PathSpec(flags, "\\", components); } else if (path.StartsWith("\\\\")) { var components = "\\\\".ItemConcat(path.Substring(2).Split('\\')).ToList(); components.RemoveWhere((i, str) => i != 0 && str == "."); if (components.Any(String.IsNullOrEmpty)) { error = "Must not contain any directories that have empty names"; return false; } if (components.IsAncestorOfRoot()) { error = "Must not point to an ancestor of the filesystem root"; return false; } pathSpec = new PathSpec(flags, "\\", components); } else if (path.StartsWith("\\")) { var components = "\\".ItemConcat(path.Substring(1).Split('\\')).ToList(); components.RemoveWhere((i, str) => i != 0 && str == "."); if (components.Any(String.IsNullOrEmpty)) { error = "Must not contain any directories that have empty names"; return false; } if (components.IsAncestorOfRoot()) { error = "Must not point to an ancestor of the filesystem root"; return false; } pathSpec = new PathSpec(flags, "\\", components); } else { var components = ".".ItemConcat(path.Split('\\')).ToList(); components.RemoveWhere((i, str) => i != 0 && str == "."); if (components.Any(String.IsNullOrEmpty)) { error = "Must not contain any directories that have empty names"; return false; } if (components.IsAncestorOfRoot()) { error = "Must not point to an ancestor of the filesystem root"; return false; } pathSpec = new PathSpec(flags, "\\", components); } return true; } if (isUnixStyle) { if (flags.HasFlag(PathFlags.UseDefaultsForGivenPath)) flags = PathFlags.CaseSensitive; if (path.Length > 1 && path.EndsWith("/")) path = path.Substring(0, path.Length - 1); if (path.Contains(":")) { error = "A Unix path may not contain a : character."; return false; } var isAbsolute = path.IsAbsoluteUnixPath(); if (isAbsolute) { var components = "/".ItemConcat(path.Substring(1).Split('/')).ToList(); components.RemoveWhere((i, str) => i != 0 && str == "."); if (components.Any(String.IsNullOrEmpty)) { error = "Must not contain any directories that have empty names"; return false; } if (components.IsAncestorOfRoot()) { error = "Must not point to an ancestor of the filesystem root"; return false; } pathSpec = new PathSpec(flags, "/", components); } else if (path.StartsWith(".")) { var components = path.Split('/').ToList(); components.RemoveWhere((i, str) => i != 0 && str == "."); if (components.Any(String.IsNullOrEmpty)) { error = "Must not contain any directories that have empty names"; return false; } if (components.IsAncestorOfRoot()) { error = "Must not point to an ancestor of the filesystem root"; return false; } pathSpec = new PathSpec(flags, "/", components); } else { var components = ".".ItemConcat(path.Split('/')).ToList(); components.RemoveWhere((i, str) => i != 0 && str == "."); if (components.Any(String.IsNullOrEmpty)) { error = "Must not contain any directories that have empty names"; return false; } if (components.IsAncestorOfRoot()) { error = "Must not point to an ancestor of the filesystem root"; return false; } pathSpec = new PathSpec(flags, "/", components); } return true; } // If we reach this point, there are no backslashes or slashes in the path, meaning that it's a // path with one element. if (flags.HasFlag(PathFlags.UseDefaultsForGivenPath)) flags = GetDefaultFlagsForThisEnvironment(); if (path == ".." || path == ".") pathSpec = new PathSpec(flags, DefaultDirectorySeparator, path); else pathSpec = new PathSpec(flags, DefaultDirectorySeparator, ".", path); return true; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Apache.Ignite.Core.Impl.Binary { using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.IO; using Apache.Ignite.Core.Binary; using Apache.Ignite.Core.Impl.Binary.IO; using Apache.Ignite.Core.Impl.Binary.Metadata; using Apache.Ignite.Core.Impl.Binary.Structure; /// <summary> /// Binary writer implementation. /// </summary> internal class BinaryWriter : IBinaryWriter, IBinaryRawWriter { /** Marshaller. */ private readonly Marshaller _marsh; /** Stream. */ private readonly IBinaryStream _stream; /** Builder (used only during build). */ private BinaryObjectBuilder _builder; /** Handles. */ private BinaryHandleDictionary<object, long> _hnds; /** Metadatas collected during this write session. */ private IDictionary<int, BinaryType> _metas; /** Current type ID. */ private int _curTypeId; /** Current name converter */ private IBinaryNameMapper _curConverter; /** Current mapper. */ private IBinaryIdMapper _curMapper; /** Current object start position. */ private int _curPos; /** Current raw position. */ private int _curRawPos; /** Whether we are currently detaching an object. */ private bool _detaching; /** Current type structure tracker, */ private BinaryStructureTracker _curStruct; /** Schema holder. */ private readonly BinaryObjectSchemaHolder _schema = BinaryObjectSchemaHolder.Current; /// <summary> /// Gets the marshaller. /// </summary> internal Marshaller Marshaller { get { return _marsh; } } /// <summary> /// Write named boolean value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Boolean value.</param> public void WriteBoolean(string fieldName, bool val) { WriteFieldId(fieldName, BinaryUtils.TypeBool); _stream.WriteByte(BinaryUtils.TypeBool); _stream.WriteBool(val); } /// <summary> /// Write boolean value. /// </summary> /// <param name="val">Boolean value.</param> public void WriteBoolean(bool val) { _stream.WriteBool(val); } /// <summary> /// Write named boolean array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Boolean array.</param> public void WriteBooleanArray(string fieldName, bool[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayBool); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayBool); BinaryUtils.WriteBooleanArray(val, _stream); } } /// <summary> /// Write boolean array. /// </summary> /// <param name="val">Boolean array.</param> public void WriteBooleanArray(bool[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayBool); BinaryUtils.WriteBooleanArray(val, _stream); } } /// <summary> /// Write named byte value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Byte value.</param> public void WriteByte(string fieldName, byte val) { WriteFieldId(fieldName, BinaryUtils.TypeBool); _stream.WriteByte(BinaryUtils.TypeByte); _stream.WriteByte(val); } /// <summary> /// Write byte value. /// </summary> /// <param name="val">Byte value.</param> public void WriteByte(byte val) { _stream.WriteByte(val); } /// <summary> /// Write named byte array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Byte array.</param> public void WriteByteArray(string fieldName, byte[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayByte); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayByte); BinaryUtils.WriteByteArray(val, _stream); } } /// <summary> /// Write byte array. /// </summary> /// <param name="val">Byte array.</param> public void WriteByteArray(byte[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayByte); BinaryUtils.WriteByteArray(val, _stream); } } /// <summary> /// Write named short value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Short value.</param> public void WriteShort(string fieldName, short val) { WriteFieldId(fieldName, BinaryUtils.TypeShort); _stream.WriteByte(BinaryUtils.TypeShort); _stream.WriteShort(val); } /// <summary> /// Write short value. /// </summary> /// <param name="val">Short value.</param> public void WriteShort(short val) { _stream.WriteShort(val); } /// <summary> /// Write named short array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Short array.</param> public void WriteShortArray(string fieldName, short[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayShort); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayShort); BinaryUtils.WriteShortArray(val, _stream); } } /// <summary> /// Write short array. /// </summary> /// <param name="val">Short array.</param> public void WriteShortArray(short[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayShort); BinaryUtils.WriteShortArray(val, _stream); } } /// <summary> /// Write named char value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Char value.</param> public void WriteChar(string fieldName, char val) { WriteFieldId(fieldName, BinaryUtils.TypeChar); _stream.WriteByte(BinaryUtils.TypeChar); _stream.WriteChar(val); } /// <summary> /// Write char value. /// </summary> /// <param name="val">Char value.</param> public void WriteChar(char val) { _stream.WriteChar(val); } /// <summary> /// Write named char array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Char array.</param> public void WriteCharArray(string fieldName, char[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayChar); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayChar); BinaryUtils.WriteCharArray(val, _stream); } } /// <summary> /// Write char array. /// </summary> /// <param name="val">Char array.</param> public void WriteCharArray(char[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayChar); BinaryUtils.WriteCharArray(val, _stream); } } /// <summary> /// Write named int value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Int value.</param> public void WriteInt(string fieldName, int val) { WriteFieldId(fieldName, BinaryUtils.TypeInt); _stream.WriteByte(BinaryUtils.TypeInt); _stream.WriteInt(val); } /// <summary> /// Write int value. /// </summary> /// <param name="val">Int value.</param> public void WriteInt(int val) { _stream.WriteInt(val); } /// <summary> /// Write named int array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Int array.</param> public void WriteIntArray(string fieldName, int[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayInt); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayInt); BinaryUtils.WriteIntArray(val, _stream); } } /// <summary> /// Write int array. /// </summary> /// <param name="val">Int array.</param> public void WriteIntArray(int[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayInt); BinaryUtils.WriteIntArray(val, _stream); } } /// <summary> /// Write named long value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Long value.</param> public void WriteLong(string fieldName, long val) { WriteFieldId(fieldName, BinaryUtils.TypeLong); _stream.WriteByte(BinaryUtils.TypeLong); _stream.WriteLong(val); } /// <summary> /// Write long value. /// </summary> /// <param name="val">Long value.</param> public void WriteLong(long val) { _stream.WriteLong(val); } /// <summary> /// Write named long array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Long array.</param> public void WriteLongArray(string fieldName, long[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayLong); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayLong); BinaryUtils.WriteLongArray(val, _stream); } } /// <summary> /// Write long array. /// </summary> /// <param name="val">Long array.</param> public void WriteLongArray(long[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayLong); BinaryUtils.WriteLongArray(val, _stream); } } /// <summary> /// Write named float value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Float value.</param> public void WriteFloat(string fieldName, float val) { WriteFieldId(fieldName, BinaryUtils.TypeFloat); _stream.WriteByte(BinaryUtils.TypeFloat); _stream.WriteFloat(val); } /// <summary> /// Write float value. /// </summary> /// <param name="val">Float value.</param> public void WriteFloat(float val) { _stream.WriteFloat(val); } /// <summary> /// Write named float array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Float array.</param> public void WriteFloatArray(string fieldName, float[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayFloat); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayFloat); BinaryUtils.WriteFloatArray(val, _stream); } } /// <summary> /// Write float array. /// </summary> /// <param name="val">Float array.</param> public void WriteFloatArray(float[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayFloat); BinaryUtils.WriteFloatArray(val, _stream); } } /// <summary> /// Write named double value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Double value.</param> public void WriteDouble(string fieldName, double val) { WriteFieldId(fieldName, BinaryUtils.TypeDouble); _stream.WriteByte(BinaryUtils.TypeDouble); _stream.WriteDouble(val); } /// <summary> /// Write double value. /// </summary> /// <param name="val">Double value.</param> public void WriteDouble(double val) { _stream.WriteDouble(val); } /// <summary> /// Write named double array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Double array.</param> public void WriteDoubleArray(string fieldName, double[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayDouble); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayDouble); BinaryUtils.WriteDoubleArray(val, _stream); } } /// <summary> /// Write double array. /// </summary> /// <param name="val">Double array.</param> public void WriteDoubleArray(double[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayDouble); BinaryUtils.WriteDoubleArray(val, _stream); } } /// <summary> /// Write named decimal value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Decimal value.</param> public void WriteDecimal(string fieldName, decimal? val) { WriteFieldId(fieldName, BinaryUtils.TypeDecimal); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeDecimal); BinaryUtils.WriteDecimal(val.Value, _stream); } } /// <summary> /// Write decimal value. /// </summary> /// <param name="val">Decimal value.</param> public void WriteDecimal(decimal? val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeDecimal); BinaryUtils.WriteDecimal(val.Value, _stream); } } /// <summary> /// Write named decimal array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Decimal array.</param> public void WriteDecimalArray(string fieldName, decimal?[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayDecimal); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayDecimal); BinaryUtils.WriteDecimalArray(val, _stream); } } /// <summary> /// Write decimal array. /// </summary> /// <param name="val">Decimal array.</param> public void WriteDecimalArray(decimal?[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayDecimal); BinaryUtils.WriteDecimalArray(val, _stream); } } /// <summary> /// Write named date value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Date value.</param> public void WriteTimestamp(string fieldName, DateTime? val) { WriteFieldId(fieldName, BinaryUtils.TypeTimestamp); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeTimestamp); BinaryUtils.WriteTimestamp(val.Value, _stream); } } /// <summary> /// Write date value. /// </summary> /// <param name="val">Date value.</param> public void WriteTimestamp(DateTime? val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeTimestamp); BinaryUtils.WriteTimestamp(val.Value, _stream); } } /// <summary> /// Write named date array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Date array.</param> public void WriteTimestampArray(string fieldName, DateTime?[] val) { WriteFieldId(fieldName, BinaryUtils.TypeTimestamp); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayTimestamp); BinaryUtils.WriteTimestampArray(val, _stream); } } /// <summary> /// Write date array. /// </summary> /// <param name="val">Date array.</param> public void WriteTimestampArray(DateTime?[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayTimestamp); BinaryUtils.WriteTimestampArray(val, _stream); } } /// <summary> /// Write named string value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">String value.</param> public void WriteString(string fieldName, string val) { WriteFieldId(fieldName, BinaryUtils.TypeString); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeString); BinaryUtils.WriteString(val, _stream); } } /// <summary> /// Write string value. /// </summary> /// <param name="val">String value.</param> public void WriteString(string val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeString); BinaryUtils.WriteString(val, _stream); } } /// <summary> /// Write named string array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">String array.</param> public void WriteStringArray(string fieldName, string[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayString); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayString); BinaryUtils.WriteStringArray(val, _stream); } } /// <summary> /// Write string array. /// </summary> /// <param name="val">String array.</param> public void WriteStringArray(string[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayString); BinaryUtils.WriteStringArray(val, _stream); } } /// <summary> /// Write named GUID value. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">GUID value.</param> public void WriteGuid(string fieldName, Guid? val) { WriteFieldId(fieldName, BinaryUtils.TypeGuid); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeGuid); BinaryUtils.WriteGuid(val.Value, _stream); } } /// <summary> /// Write GUID value. /// </summary> /// <param name="val">GUID value.</param> public void WriteGuid(Guid? val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeGuid); BinaryUtils.WriteGuid(val.Value, _stream); } } /// <summary> /// Write named GUID array. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">GUID array.</param> public void WriteGuidArray(string fieldName, Guid?[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayGuid); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayGuid); BinaryUtils.WriteGuidArray(val, _stream); } } /// <summary> /// Write GUID array. /// </summary> /// <param name="val">GUID array.</param> public void WriteGuidArray(Guid?[] val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArrayGuid); BinaryUtils.WriteGuidArray(val, _stream); } } /// <summary> /// Write named enum value. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fieldName">Field name.</param> /// <param name="val">Enum value.</param> public void WriteEnum<T>(string fieldName, T val) { WriteFieldId(fieldName, BinaryUtils.TypeEnum); WriteEnum(val); } /// <summary> /// Write enum value. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="val">Enum value.</param> public void WriteEnum<T>(T val) { // ReSharper disable once CompareNonConstrainedGenericWithNull if (val == null) WriteNullField(); else { var desc = _marsh.GetDescriptor(val.GetType()); if (desc != null) { var metaHnd = _marsh.GetBinaryTypeHandler(desc); _stream.WriteByte(BinaryUtils.TypeEnum); BinaryUtils.WriteEnum(this, val); SaveMetadata(desc, metaHnd.OnObjectWriteFinished()); } else { // Unregistered enum, write as serializable Write(new SerializableObjectHolder(val)); } } } /// <summary> /// Write named enum array. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fieldName">Field name.</param> /// <param name="val">Enum array.</param> public void WriteEnumArray<T>(string fieldName, T[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArrayEnum); WriteEnumArray(val); } /// <summary> /// Write enum array. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="val">Enum array.</param> public void WriteEnumArray<T>(T[] val) { WriteEnumArrayInternal(val, null); } /// <summary> /// Writes the enum array. /// </summary> /// <param name="val">The value.</param> /// <param name="elementTypeId">The element type id.</param> public void WriteEnumArrayInternal(Array val, int? elementTypeId) { if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArrayEnum); var elTypeId = elementTypeId ?? BinaryUtils.GetEnumTypeId(val.GetType().GetElementType(), Marshaller); BinaryUtils.WriteArray(val, this, elTypeId); } } /// <summary> /// Write named object value. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="fieldName">Field name.</param> /// <param name="val">Object value.</param> public void WriteObject<T>(string fieldName, T val) { WriteFieldId(fieldName, BinaryUtils.TypeObject); // ReSharper disable once CompareNonConstrainedGenericWithNull if (val == null) WriteNullField(); else Write(val); } /// <summary> /// Write object value. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="val">Object value.</param> public void WriteObject<T>(T val) { Write(val); } /// <summary> /// Write named object array. /// </summary> /// <typeparam name="T">Element type.</typeparam> /// <param name="fieldName">Field name.</param> /// <param name="val">Object array.</param> public void WriteArray<T>(string fieldName, T[] val) { WriteFieldId(fieldName, BinaryUtils.TypeArray); if (val == null) WriteNullField(); else { _stream.WriteByte(BinaryUtils.TypeArray); BinaryUtils.WriteArray(val, this); } } /// <summary> /// Write object array. /// </summary> /// <typeparam name="T">Element type.</typeparam> /// <param name="val">Object array.</param> public void WriteArray<T>(T[] val) { WriteArrayInternal(val); } /// <summary> /// Write object array. /// </summary> /// <param name="val">Object array.</param> public void WriteArrayInternal(Array val) { if (val == null) WriteNullRawField(); else { _stream.WriteByte(BinaryUtils.TypeArray); BinaryUtils.WriteArray(val, this); } } /// <summary> /// Write named collection. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Collection.</param> public void WriteCollection(string fieldName, ICollection val) { WriteFieldId(fieldName, BinaryUtils.TypeCollection); if (val == null) WriteNullField(); else WriteCollection(val); } /// <summary> /// Write collection. /// </summary> /// <param name="val">Collection.</param> public void WriteCollection(ICollection val) { WriteByte(BinaryUtils.TypeCollection); BinaryUtils.WriteCollection(val, this); } /// <summary> /// Write named dictionary. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="val">Dictionary.</param> public void WriteDictionary(string fieldName, IDictionary val) { WriteFieldId(fieldName, BinaryUtils.TypeDictionary); if (val == null) WriteNullField(); else WriteDictionary(val); } /// <summary> /// Write dictionary. /// </summary> /// <param name="val">Dictionary.</param> public void WriteDictionary(IDictionary val) { WriteByte(BinaryUtils.TypeDictionary); BinaryUtils.WriteDictionary(val, this); } /// <summary> /// Write NULL field. /// </summary> private void WriteNullField() { _stream.WriteByte(BinaryUtils.HdrNull); } /// <summary> /// Write NULL raw field. /// </summary> private void WriteNullRawField() { _stream.WriteByte(BinaryUtils.HdrNull); } /// <summary> /// Get raw writer. /// </summary> /// <returns> /// Raw writer. /// </returns> public IBinaryRawWriter GetRawWriter() { if (_curRawPos == 0) _curRawPos = _stream.Position; return this; } /// <summary> /// Set new builder. /// </summary> /// <param name="builder">Builder.</param> /// <returns>Previous builder.</returns> internal BinaryObjectBuilder SetBuilder(BinaryObjectBuilder builder) { BinaryObjectBuilder ret = _builder; _builder = builder; return ret; } /// <summary> /// Constructor. /// </summary> /// <param name="marsh">Marshaller.</param> /// <param name="stream">Stream.</param> internal BinaryWriter(Marshaller marsh, IBinaryStream stream) { _marsh = marsh; _stream = stream; } /// <summary> /// Write object. /// </summary> /// <param name="obj">Object.</param> public void Write<T>(T obj) { // Handle special case for null. // ReSharper disable once CompareNonConstrainedGenericWithNull if (obj == null) { _stream.WriteByte(BinaryUtils.HdrNull); return; } // We use GetType() of a real object instead of typeof(T) to take advantage of // automatic Nullable'1 unwrapping. Type type = obj.GetType(); // Handle common case when primitive is written. if (type.IsPrimitive) { WritePrimitive(obj, type); return; } // Handle enums. if (type.IsEnum) { WriteEnum(obj); return; } // Handle special case for builder. if (WriteBuilderSpecials(obj)) return; // Suppose that we faced normal object and perform descriptor lookup. IBinaryTypeDescriptor desc = _marsh.GetDescriptor(type); if (desc != null) { // Writing normal object. var pos = _stream.Position; // Dealing with handles. if (!(desc.Serializer is IBinarySystemTypeSerializer) && WriteHandle(pos, obj)) return; // Skip header length as not everything is known now _stream.Seek(BinaryObjectHeader.Size, SeekOrigin.Current); // Preserve old frame. int oldTypeId = _curTypeId; IBinaryNameMapper oldConverter = _curConverter; IBinaryIdMapper oldMapper = _curMapper; int oldRawPos = _curRawPos; var oldPos = _curPos; var oldStruct = _curStruct; // Push new frame. _curTypeId = desc.TypeId; _curConverter = desc.NameMapper; _curMapper = desc.IdMapper; _curRawPos = 0; _curPos = pos; _curStruct = new BinaryStructureTracker(desc, desc.WriterTypeStructure); var schemaIdx = _schema.PushSchema(); try { // Write object fields. desc.Serializer.WriteBinary(obj, this); // Write schema var schemaOffset = _stream.Position - pos; int schemaId; var flags = desc.UserType ? BinaryObjectHeader.Flag.UserType : BinaryObjectHeader.Flag.None; var hasSchema = _schema.WriteSchema(_stream, schemaIdx, out schemaId, ref flags); if (hasSchema) { flags |= BinaryObjectHeader.Flag.HasSchema; // Calculate and write header. if (_curRawPos > 0) _stream.WriteInt(_curRawPos - pos); // raw offset is in the last 4 bytes } else schemaOffset = BinaryObjectHeader.Size; if (_curRawPos > 0) flags |= BinaryObjectHeader.Flag.HasRaw; var len = _stream.Position - pos; var header = new BinaryObjectHeader(desc.TypeId, obj.GetHashCode(), len, schemaId, schemaOffset, flags); BinaryObjectHeader.Write(header, _stream, pos); Stream.Seek(pos + len, SeekOrigin.Begin); // Seek to the end } finally { _schema.PopSchema(schemaIdx); } // Apply structure updates if any. _curStruct.UpdateWriterStructure(this); // Restore old frame. _curTypeId = oldTypeId; _curConverter = oldConverter; _curMapper = oldMapper; _curRawPos = oldRawPos; _curPos = oldPos; _curStruct = oldStruct; } else { // Are we dealing with a well-known type? var handler = BinarySystemHandlers.GetWriteHandler(type); if (handler == null) // We did our best, object cannot be marshalled. throw new BinaryObjectException("Unsupported object type [type=" + type + ", object=" + obj + ']'); handler(this, obj); } } /// <summary> /// Write primitive type. /// </summary> /// <param name="val">Object.</param> /// <param name="type">Type.</param> private unsafe void WritePrimitive<T>(T val, Type type) { // .Net defines 14 primitive types. We support 12 - excluding IntPtr and UIntPtr. // Types check sequence is designed to minimize comparisons for the most frequent types. if (type == typeof(int)) { _stream.WriteByte(BinaryUtils.TypeInt); _stream.WriteInt((int)(object)val); } else if (type == typeof(long)) { _stream.WriteByte(BinaryUtils.TypeLong); _stream.WriteLong((long)(object)val); } else if (type == typeof(bool)) { _stream.WriteByte(BinaryUtils.TypeBool); _stream.WriteBool((bool)(object)val); } else if (type == typeof(byte)) { _stream.WriteByte(BinaryUtils.TypeByte); _stream.WriteByte((byte)(object)val); } else if (type == typeof(short)) { _stream.WriteByte(BinaryUtils.TypeShort); _stream.WriteShort((short)(object)val); } else if (type == typeof (char)) { _stream.WriteByte(BinaryUtils.TypeChar); _stream.WriteChar((char)(object)val); } else if (type == typeof(float)) { _stream.WriteByte(BinaryUtils.TypeFloat); _stream.WriteFloat((float)(object)val); } else if (type == typeof(double)) { _stream.WriteByte(BinaryUtils.TypeDouble); _stream.WriteDouble((double)(object)val); } else if (type == typeof(sbyte)) { sbyte val0 = (sbyte)(object)val; _stream.WriteByte(BinaryUtils.TypeByte); _stream.WriteByte(*(byte*)&val0); } else if (type == typeof(ushort)) { ushort val0 = (ushort)(object)val; _stream.WriteByte(BinaryUtils.TypeShort); _stream.WriteShort(*(short*)&val0); } else if (type == typeof(uint)) { uint val0 = (uint)(object)val; _stream.WriteByte(BinaryUtils.TypeInt); _stream.WriteInt(*(int*)&val0); } else if (type == typeof(ulong)) { ulong val0 = (ulong)(object)val; _stream.WriteByte(BinaryUtils.TypeLong); _stream.WriteLong(*(long*)&val0); } else throw new BinaryObjectException("Unsupported object type [type=" + type.FullName + ", object=" + val + ']'); } /// <summary> /// Try writing object as special builder type. /// </summary> /// <param name="obj">Object.</param> /// <returns>True if object was written, false otherwise.</returns> private bool WriteBuilderSpecials<T>(T obj) { if (_builder != null) { // Special case for binary object during build. BinaryObject portObj = obj as BinaryObject; if (portObj != null) { if (!WriteHandle(_stream.Position, portObj)) _builder.ProcessBinary(_stream, portObj); return true; } // Special case for builder during build. BinaryObjectBuilder portBuilder = obj as BinaryObjectBuilder; if (portBuilder != null) { if (!WriteHandle(_stream.Position, portBuilder)) _builder.ProcessBuilder(_stream, portBuilder); return true; } } return false; } /// <summary> /// Add handle to handles map. /// </summary> /// <param name="pos">Position in stream.</param> /// <param name="obj">Object.</param> /// <returns><c>true</c> if object was written as handle.</returns> private bool WriteHandle(long pos, object obj) { if (_hnds == null) { // Cache absolute handle position. _hnds = new BinaryHandleDictionary<object, long>(obj, pos); return false; } long hndPos; if (!_hnds.TryGetValue(obj, out hndPos)) { // Cache absolute handle position. _hnds.Add(obj, pos); return false; } _stream.WriteByte(BinaryUtils.HdrHnd); // Handle is written as difference between position before header and handle position. _stream.WriteInt((int)(pos - hndPos)); return true; } /// <summary> /// Perform action with detached semantics. /// </summary> /// <param name="a"></param> internal void WithDetach(Action<BinaryWriter> a) { if (_detaching) a(this); else { _detaching = true; BinaryHandleDictionary<object, long> oldHnds = _hnds; _hnds = null; try { a(this); } finally { _detaching = false; if (oldHnds != null) { // Merge newly recorded handles with old ones and restore old on the stack. // Otherwise we can use current handles right away. if (_hnds != null) oldHnds.Merge(_hnds); _hnds = oldHnds; } } } } /// <summary> /// Stream. /// </summary> internal IBinaryStream Stream { get { return _stream; } } /// <summary> /// Gets collected metadatas. /// </summary> /// <returns>Collected metadatas (if any).</returns> internal ICollection<BinaryType> GetBinaryTypes() { return _metas == null ? null : _metas.Values; } /// <summary> /// Check whether the given object is binarizeble, i.e. it can be serialized with binary marshaller. /// </summary> /// <param name="obj">Object.</param> /// <returns>True if binarizable.</returns> internal bool IsBinarizable(object obj) { if (obj != null) { Type type = obj.GetType(); // We assume object as binarizable only in case it has descriptor. // Collections, Enums and non-primitive arrays do not have descriptors // and this is fine here because we cannot know whether their members are binarizable. return _marsh.GetDescriptor(type) != null || BinarySystemHandlers.GetWriteHandler(type) != null; } return true; } /// <summary> /// Write field ID. /// </summary> /// <param name="fieldName">Field name.</param> /// <param name="fieldTypeId">Field type ID.</param> private void WriteFieldId(string fieldName, byte fieldTypeId) { if (_curRawPos != 0) throw new BinaryObjectException("Cannot write named fields after raw data is written."); var fieldId = _curStruct.GetFieldId(fieldName, fieldTypeId); _schema.PushField(fieldId, _stream.Position - _curPos); } /// <summary> /// Saves metadata for this session. /// </summary> /// <param name="desc">The descriptor.</param> /// <param name="fields">Fields metadata.</param> internal void SaveMetadata(IBinaryTypeDescriptor desc, IDictionary<string, int> fields) { Debug.Assert(desc != null); if (_metas == null) { _metas = new Dictionary<int, BinaryType>(1) { {desc.TypeId, new BinaryType(desc, fields)} }; } else { BinaryType meta; if (_metas.TryGetValue(desc.TypeId, out meta)) { if (fields != null) { IDictionary<string, int> existingFields = meta.GetFieldsMap(); foreach (KeyValuePair<string, int> field in fields) { if (!existingFields.ContainsKey(field.Key)) existingFields[field.Key] = field.Value; } } } else _metas[desc.TypeId] = new BinaryType(desc, fields); } } } }
// // Author: // Jb Evain (jbevain@gmail.com) // // Copyright (c) 2008 - 2015 Jb Evain // Copyright (c) 2008 - 2011 Novell, Inc. // // Licensed under the MIT/X11 license. // using System; using System.Collections.Generic; using Mono.Collections.Generic; using SR = System.Reflection; using Mono.Cecil.Metadata; namespace Mono.Cecil { enum ImportGenericKind { Definition, Open, } struct ImportGenericContext { Collection<IGenericParameterProvider> stack; public bool IsEmpty { get { return stack == null; } } public ImportGenericContext (IGenericParameterProvider provider) { stack = null; Push (provider); } public void Push (IGenericParameterProvider provider) { if (stack == null) stack = new Collection<IGenericParameterProvider> (1) { provider }; else stack.Add (provider); } public void Pop () { stack.RemoveAt (stack.Count - 1); } public TypeReference MethodParameter (string method, int position) { for (int i = stack.Count - 1; i >= 0; i--) { var candidate = stack [i] as MethodReference; if (candidate == null) continue; if (method != candidate.Name) continue; return candidate.GenericParameters [position]; } throw new InvalidOperationException (); } public TypeReference TypeParameter (string type, int position) { for (int i = stack.Count - 1; i >= 0; i--) { var candidate = GenericTypeFor (stack [i]); if (candidate.FullName != type) continue; return candidate.GenericParameters [position]; } throw new InvalidOperationException (); } static TypeReference GenericTypeFor (IGenericParameterProvider context) { var type = context as TypeReference; if (type != null) return type.GetElementType (); var method = context as MethodReference; if (method != null) return method.DeclaringType.GetElementType (); throw new InvalidOperationException (); } } class MetadataImporter { readonly ModuleDefinition module; public MetadataImporter (ModuleDefinition module) { this.module = module; } #if !CF static readonly Dictionary<Type, ElementType> type_etype_mapping = new Dictionary<Type, ElementType> (18) { { typeof (void), ElementType.Void }, { typeof (bool), ElementType.Boolean }, { typeof (char), ElementType.Char }, { typeof (sbyte), ElementType.I1 }, { typeof (byte), ElementType.U1 }, { typeof (short), ElementType.I2 }, { typeof (ushort), ElementType.U2 }, { typeof (int), ElementType.I4 }, { typeof (uint), ElementType.U4 }, { typeof (long), ElementType.I8 }, { typeof (ulong), ElementType.U8 }, { typeof (float), ElementType.R4 }, { typeof (double), ElementType.R8 }, { typeof (string), ElementType.String }, { typeof (TypedReference), ElementType.TypedByRef }, { typeof (IntPtr), ElementType.I }, { typeof (UIntPtr), ElementType.U }, { typeof (object), ElementType.Object }, }; public TypeReference ImportType (Type type, ImportGenericContext context) { return ImportType (type, context, ImportGenericKind.Open); } public TypeReference ImportType (Type type, ImportGenericContext context, ImportGenericKind import_kind) { if (IsTypeSpecification (type) || ImportOpenGenericType (type, import_kind)) return ImportTypeSpecification (type, context); var reference = new TypeReference ( string.Empty, type.Name, module, ImportScope (type.Assembly), type.IsValueType); reference.etype = ImportElementType (type); if (IsNestedType (type)) reference.DeclaringType = ImportType (type.DeclaringType, context, import_kind); else reference.Namespace = type.Namespace ?? string.Empty; if (type.IsGenericType) ImportGenericParameters (reference, type.GetGenericArguments ()); return reference; } static bool ImportOpenGenericType (Type type, ImportGenericKind import_kind) { return type.IsGenericType && type.IsGenericTypeDefinition && import_kind == ImportGenericKind.Open; } static bool ImportOpenGenericMethod (SR.MethodBase method, ImportGenericKind import_kind) { return method.IsGenericMethod && method.IsGenericMethodDefinition && import_kind == ImportGenericKind.Open; } static bool IsNestedType (Type type) { #if !SILVERLIGHT return type.IsNested; #else return type.DeclaringType != null; #endif } TypeReference ImportTypeSpecification (Type type, ImportGenericContext context) { if (type.IsByRef) return new ByReferenceType (ImportType (type.GetElementType (), context)); if (type.IsPointer) return new PointerType (ImportType (type.GetElementType (), context)); if (type.IsArray) return new ArrayType (ImportType (type.GetElementType (), context), type.GetArrayRank ()); if (type.IsGenericType) return ImportGenericInstance (type, context); if (type.IsGenericParameter) return ImportGenericParameter (type, context); throw new NotSupportedException (type.FullName); } static TypeReference ImportGenericParameter (Type type, ImportGenericContext context) { if (context.IsEmpty) throw new InvalidOperationException (); if (type.DeclaringMethod != null) return context.MethodParameter (type.DeclaringMethod.Name, type.GenericParameterPosition); if (type.DeclaringType != null) return context.TypeParameter (NormalizedFullName (type.DeclaringType), type.GenericParameterPosition); throw new InvalidOperationException(); } private static string NormalizedFullName (Type type) { if (IsNestedType (type)) return NormalizedFullName (type.DeclaringType) + "/" + type.Name; return type.FullName; } TypeReference ImportGenericInstance (Type type, ImportGenericContext context) { var element_type = ImportType (type.GetGenericTypeDefinition (), context, ImportGenericKind.Definition); var instance = new GenericInstanceType (element_type); var arguments = type.GetGenericArguments (); var instance_arguments = instance.GenericArguments; context.Push (element_type); try { for (int i = 0; i < arguments.Length; i++) instance_arguments.Add (ImportType (arguments [i], context)); return instance; } finally { context.Pop (); } } static bool IsTypeSpecification (Type type) { return type.HasElementType || IsGenericInstance (type) || type.IsGenericParameter; } static bool IsGenericInstance (Type type) { return type.IsGenericType && !type.IsGenericTypeDefinition; } static ElementType ImportElementType (Type type) { ElementType etype; if (!type_etype_mapping.TryGetValue (type, out etype)) return ElementType.None; return etype; } AssemblyNameReference ImportScope (SR.Assembly assembly) { AssemblyNameReference scope; #if !SILVERLIGHT var name = assembly.GetName (); if (TryGetAssemblyNameReference (name, out scope)) return scope; scope = new AssemblyNameReference (name.Name, name.Version) { Culture = name.CultureInfo.Name, PublicKeyToken = name.GetPublicKeyToken (), HashAlgorithm = (AssemblyHashAlgorithm) name.HashAlgorithm, }; module.AssemblyReferences.Add (scope); return scope; #else var name = AssemblyNameReference.Parse (assembly.FullName); if (TryGetAssemblyNameReference (name, out scope)) return scope; module.AssemblyReferences.Add (name); return name; #endif } #if !SILVERLIGHT bool TryGetAssemblyNameReference (SR.AssemblyName name, out AssemblyNameReference assembly_reference) { var references = module.AssemblyReferences; for (int i = 0; i < references.Count; i++) { var reference = references [i]; if (name.FullName != reference.FullName) // TODO compare field by field continue; assembly_reference = reference; return true; } assembly_reference = null; return false; } #endif public FieldReference ImportField (SR.FieldInfo field, ImportGenericContext context) { var declaring_type = ImportType (field.DeclaringType, context); if (IsGenericInstance (field.DeclaringType)) field = ResolveFieldDefinition (field); context.Push (declaring_type); try { return new FieldReference { Name = field.Name, DeclaringType = declaring_type, FieldType = ImportType (field.FieldType, context), }; } finally { context.Pop (); } } static SR.FieldInfo ResolveFieldDefinition (SR.FieldInfo field) { #if !SILVERLIGHT return field.Module.ResolveField (field.MetadataToken); #else return field.DeclaringType.GetGenericTypeDefinition ().GetField (field.Name, SR.BindingFlags.Public | SR.BindingFlags.NonPublic | (field.IsStatic ? SR.BindingFlags.Static : SR.BindingFlags.Instance)); #endif } public MethodReference ImportMethod (SR.MethodBase method, ImportGenericContext context, ImportGenericKind import_kind) { if (IsMethodSpecification (method) || ImportOpenGenericMethod (method, import_kind)) return ImportMethodSpecification (method, context); var declaring_type = ImportType (method.DeclaringType, context); if (IsGenericInstance (method.DeclaringType)) method = method.Module.ResolveMethod (method.MetadataToken); var reference = new MethodReference { Name = method.Name, HasThis = HasCallingConvention (method, SR.CallingConventions.HasThis), ExplicitThis = HasCallingConvention (method, SR.CallingConventions.ExplicitThis), DeclaringType = ImportType (method.DeclaringType, context, ImportGenericKind.Definition), }; if (HasCallingConvention (method, SR.CallingConventions.VarArgs)) reference.CallingConvention &= MethodCallingConvention.VarArg; if (method.IsGenericMethod) ImportGenericParameters (reference, method.GetGenericArguments ()); context.Push (reference); try { var method_info = method as SR.MethodInfo; reference.ReturnType = method_info != null ? ImportType (method_info.ReturnType, context) : ImportType (typeof (void), default (ImportGenericContext)); var parameters = method.GetParameters (); var reference_parameters = reference.Parameters; for (int i = 0; i < parameters.Length; i++) reference_parameters.Add ( new ParameterDefinition (ImportType (parameters [i].ParameterType, context))); reference.DeclaringType = declaring_type; return reference; } finally { context.Pop (); } } static void ImportGenericParameters (IGenericParameterProvider provider, Type [] arguments) { var provider_parameters = provider.GenericParameters; for (int i = 0; i < arguments.Length; i++) provider_parameters.Add (new GenericParameter (arguments [i].Name, provider)); } static bool IsMethodSpecification (SR.MethodBase method) { return method.IsGenericMethod && !method.IsGenericMethodDefinition; } MethodReference ImportMethodSpecification (SR.MethodBase method, ImportGenericContext context) { var method_info = method as SR.MethodInfo; if (method_info == null) throw new InvalidOperationException (); var element_method = ImportMethod (method_info.GetGenericMethodDefinition (), context, ImportGenericKind.Definition); var instance = new GenericInstanceMethod (element_method); var arguments = method.GetGenericArguments (); var instance_arguments = instance.GenericArguments; context.Push (element_method); try { for (int i = 0; i < arguments.Length; i++) instance_arguments.Add (ImportType (arguments [i], context)); return instance; } finally { context.Pop (); } } static bool HasCallingConvention (SR.MethodBase method, SR.CallingConventions conventions) { return (method.CallingConvention & conventions) != 0; } #endif public TypeReference ImportType (TypeReference type, ImportGenericContext context) { if (type.IsTypeSpecification ()) return ImportTypeSpecification (type, context); var reference = new TypeReference ( type.Namespace, type.Name, module, ImportScope (type.Scope), type.IsValueType); MetadataSystem.TryProcessPrimitiveTypeReference (reference); if (type.IsNested) reference.DeclaringType = ImportType (type.DeclaringType, context); if (type.HasGenericParameters) ImportGenericParameters (reference, type); return reference; } IMetadataScope ImportScope (IMetadataScope scope) { switch (scope.MetadataScopeType) { case MetadataScopeType.AssemblyNameReference: return ImportAssemblyName ((AssemblyNameReference) scope); case MetadataScopeType.ModuleDefinition: if (scope == module) return scope; return ImportAssemblyName (((ModuleDefinition) scope).Assembly.Name); case MetadataScopeType.ModuleReference: throw new NotImplementedException (); } throw new NotSupportedException (); } AssemblyNameReference ImportAssemblyName (AssemblyNameReference name) { AssemblyNameReference reference; if (TryGetAssemblyNameReference (name, out reference)) return reference; reference = new AssemblyNameReference (name.Name, name.Version) { Culture = name.Culture, HashAlgorithm = name.HashAlgorithm, IsRetargetable = name.IsRetargetable }; var pk_token = !name.PublicKeyToken.IsNullOrEmpty () ? new byte [name.PublicKeyToken.Length] : Empty<byte>.Array; if (pk_token.Length > 0) Buffer.BlockCopy (name.PublicKeyToken, 0, pk_token, 0, pk_token.Length); reference.PublicKeyToken = pk_token; module.AssemblyReferences.Add (reference); return reference; } bool TryGetAssemblyNameReference (AssemblyNameReference name_reference, out AssemblyNameReference assembly_reference) { var references = module.AssemblyReferences; for (int i = 0; i < references.Count; i++) { var reference = references [i]; if (name_reference.FullName != reference.FullName) // TODO compare field by field continue; assembly_reference = reference; return true; } assembly_reference = null; return false; } static void ImportGenericParameters (IGenericParameterProvider imported, IGenericParameterProvider original) { var parameters = original.GenericParameters; var imported_parameters = imported.GenericParameters; for (int i = 0; i < parameters.Count; i++) imported_parameters.Add (new GenericParameter (parameters [i].Name, imported)); } TypeReference ImportTypeSpecification (TypeReference type, ImportGenericContext context) { switch (type.etype) { case ElementType.SzArray: var vector = (ArrayType) type; return new ArrayType (ImportType (vector.ElementType, context)); case ElementType.Ptr: var pointer = (PointerType) type; return new PointerType (ImportType (pointer.ElementType, context)); case ElementType.ByRef: var byref = (ByReferenceType) type; return new ByReferenceType (ImportType (byref.ElementType, context)); case ElementType.Pinned: var pinned = (PinnedType) type; return new PinnedType (ImportType (pinned.ElementType, context)); case ElementType.Sentinel: var sentinel = (SentinelType) type; return new SentinelType (ImportType (sentinel.ElementType, context)); case ElementType.CModOpt: var modopt = (OptionalModifierType) type; return new OptionalModifierType ( ImportType (modopt.ModifierType, context), ImportType (modopt.ElementType, context)); case ElementType.CModReqD: var modreq = (RequiredModifierType) type; return new RequiredModifierType ( ImportType (modreq.ModifierType, context), ImportType (modreq.ElementType, context)); case ElementType.Array: var array = (ArrayType) type; var imported_array = new ArrayType (ImportType (array.ElementType, context)); if (array.IsVector) return imported_array; var dimensions = array.Dimensions; var imported_dimensions = imported_array.Dimensions; imported_dimensions.Clear (); for (int i = 0; i < dimensions.Count; i++) { var dimension = dimensions [i]; imported_dimensions.Add (new ArrayDimension (dimension.LowerBound, dimension.UpperBound)); } return imported_array; case ElementType.GenericInst: var instance = (GenericInstanceType) type; var element_type = ImportType (instance.ElementType, context); var imported_instance = new GenericInstanceType (element_type); var arguments = instance.GenericArguments; var imported_arguments = imported_instance.GenericArguments; for (int i = 0; i < arguments.Count; i++) imported_arguments.Add (ImportType (arguments [i], context)); return imported_instance; case ElementType.Var: var var_parameter = (GenericParameter) type; if (var_parameter.DeclaringType == null) throw new InvalidOperationException (); return context.TypeParameter (var_parameter.DeclaringType.FullName, var_parameter.Position); case ElementType.MVar: var mvar_parameter = (GenericParameter) type; if (mvar_parameter.DeclaringMethod == null) throw new InvalidOperationException (); return context.MethodParameter (mvar_parameter.DeclaringMethod.Name, mvar_parameter.Position); } throw new NotSupportedException (type.etype.ToString ()); } public FieldReference ImportField (FieldReference field, ImportGenericContext context) { var declaring_type = ImportType (field.DeclaringType, context); context.Push (declaring_type); try { return new FieldReference { Name = field.Name, DeclaringType = declaring_type, FieldType = ImportType (field.FieldType, context), }; } finally { context.Pop (); } } public MethodReference ImportMethod (MethodReference method, ImportGenericContext context) { if (method.IsGenericInstance) return ImportMethodSpecification (method, context); var declaring_type = ImportType (method.DeclaringType, context); var reference = new MethodReference { Name = method.Name, HasThis = method.HasThis, ExplicitThis = method.ExplicitThis, DeclaringType = declaring_type, CallingConvention = method.CallingConvention, }; if (method.HasGenericParameters) ImportGenericParameters (reference, method); context.Push (reference); try { reference.ReturnType = ImportType (method.ReturnType, context); if (!method.HasParameters) return reference; var reference_parameters = reference.Parameters; var parameters = method.Parameters; for (int i = 0; i < parameters.Count; i++) reference_parameters.Add ( new ParameterDefinition (ImportType (parameters [i].ParameterType, context))); return reference; } finally { context.Pop(); } } MethodSpecification ImportMethodSpecification (MethodReference method, ImportGenericContext context) { if (!method.IsGenericInstance) throw new NotSupportedException (); var instance = (GenericInstanceMethod) method; var element_method = ImportMethod (instance.ElementMethod, context); var imported_instance = new GenericInstanceMethod (element_method); var arguments = instance.GenericArguments; var imported_arguments = imported_instance.GenericArguments; for (int i = 0; i < arguments.Count; i++) imported_arguments.Add (ImportType (arguments [i], context)); return imported_instance; } } }
/* * Copyright 2006-2015 TIBIC SOLUTIONS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections.Generic; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using LWAS.Extensible.Interfaces; using LWAS.Extensible.Interfaces.Configuration; using LWAS.Extensible.Interfaces.DataBinding; using LWAS.Extensible.Interfaces.WebParts; namespace LWAS.WebParts.Templating { public class TemplatingProvider : ITemplatingProvider, IPageComponent { private WebPartManager manager = null; private Page _page; public Page Page { get { return this._page; } set { this._page = value; this.manager = WebPartManager.GetCurrentWebPartManager(this._page); } } public TemplatingMode Mode { get; set; } private Control _container; public Control InnerContainer { get { return _container; } set { _container = value; } } private Control _selectorsHolder; public Control SelectorsHolder { get { return _selectorsHolder; } } private Control _commandersHolder; public Control CommandersHolder { get { return _commandersHolder; } } private Label _message; public Label Message { get { return _message; } } public void Init(Control target, Style messageStyle) { UpdatePanel messageUpdatePanel = target.FindControl("messageUpdatePanel") as UpdatePanel; if (null == messageUpdatePanel) { messageUpdatePanel = new UpdatePanel(); messageUpdatePanel.ID = "messageUpdatePanel"; target.Controls.Add(messageUpdatePanel); Table messageTable = new Table(); TableRow messageRow = new TableRow(); TableCell messageCell = new TableCell(); messageCell.ApplyStyle(messageStyle); _message = new Label(); messageCell.Controls.Add(this._message); messageRow.Cells.Add(messageCell); messageTable.Rows.Add(messageRow); messageUpdatePanel.ContentTemplateContainer.Controls.Add(messageTable); } UpdatePanel commandersUpdatePanel = target.FindControl("commandersUpdatePanel") as UpdatePanel; if (null == commandersUpdatePanel) { commandersUpdatePanel = new UpdatePanel(); commandersUpdatePanel.ID = "commandersUpdatePanel"; target.Controls.Add(commandersUpdatePanel); } Panel commandersWrapper = commandersUpdatePanel.FindControl("commandersWrapper") as Panel; if (null == commandersWrapper) { commandersWrapper = new Panel(); commandersWrapper.ID = "commandersWrapper"; commandersWrapper.CssClass = "table-responsive"; commandersUpdatePanel.ContentTemplateContainer.Controls.Add(commandersWrapper); } _commandersHolder = commandersWrapper.FindControl("commandersHolder"); if (null == _commandersHolder) { _commandersHolder = new Table(); _commandersHolder.ID = "commandersHolder"; ((Table)_commandersHolder).CssClass = ""; commandersWrapper.Controls.Add(_commandersHolder); } _selectorsHolder = target.FindControl("selectorsHolder"); if (null == _selectorsHolder) { _selectorsHolder = new Table(); _selectorsHolder.ID = "selectorsHolder"; target.Controls.Add(this._selectorsHolder); } UpdatePanel updatePanel = target.FindControl("containerUpdatePanel") as UpdatePanel; if (null == updatePanel) { updatePanel = new UpdatePanel(); updatePanel.ID = "containerUpdatePanel"; target.Controls.Add(updatePanel); } Panel wrapper = updatePanel.FindControl("containerWrapper") as Panel; if (null == wrapper) { wrapper = new Panel(); wrapper.ID = "containerWrapper"; wrapper.CssClass = "table-responsive"; updatePanel.ContentTemplateContainer.Controls.Add(wrapper); } Table innerTable = wrapper.FindControl("innerTable") as Table; if (null == innerTable) { innerTable = new Table(); innerTable.ID = "innerTable"; innerTable.CssClass = "table table-striped table-hover table-condensed"; wrapper.Controls.Add(innerTable); } if (null == this._container) { _container = innerTable; } } public virtual void CreateCommanders(IConfigurationType config, ITemplatable templatable, Dictionary<string, Control> commanders) { CommandersTemplate.Instance.Create(_commandersHolder, config, templatable, commanders, this.manager); } public virtual void CreateSelectors(IConfigurationType config, ITemplatable templatable, Dictionary<string, Control> selectors) { SelectorsTemplate.Instance.Create(_selectorsHolder, config, templatable, selectors, this.manager); } public virtual void CreateFilter(IConfigurationType config, ITemplatingItemsCollection filters, IBinder binder, ITemplatable templatable) { FiltersTemplate.Instance.Create(_container, config, templatable, filters, binder, this.manager); } public virtual void ExtractFilter(IConfigurationType config, ITemplatingItemsCollection filters) { FiltersTemplate.Instance.Extract(_container, config, filters, this.manager); } public virtual void CreateHeader(IConfigurationType config, ITemplatable templatable) { HeaderTemplate.Instance.Create(_container, config, templatable, this.manager); } public virtual void CreateFooter(IConfigurationType config, ITemplatable templatable) { FooterTemplate.Instance.Create(_container, config, templatable, this.manager); } public virtual void InstantiateGroupIn(IConfigurationType config, IBinder binder, int itemIndex, ITemplatingItem item, ITemplatable templatable) { GroupingTemplate.Instance.Create(_container, config, templatable, binder, item, itemIndex, this.manager); } public virtual void InstantiateIn(IConfigurationType config, IBinder binder, int itemIndex, ITemplatingItem item, ITemplatable templatable) { ItemTemplate.Instance.Create(_container, config, templatable, binder, item, itemIndex, this.manager); } public virtual void InstantiateTotalsIn(IConfigurationType config, IBinder binder, int itemIndex, ITemplatingItem item, ITemplatable templatable) { TotalsTemplate.Instance.Create(_container, config, templatable, binder, item, itemIndex, this.manager); } public virtual void ExtractItems(IConfigurationType config, int itemsCount, ITemplatingItemsCollection items) { ItemTemplate.Instance.Extract(_container, config, null, null, items, null, -1, itemsCount, this.manager); } public ITemplatingItem NewTemplatingItemInstance() { return new TemplatingItem(); } public void PopulateItem(IConfigurationType config, ITemplatingItem item, string prefix) { ItemTemplate.Instance.PopulateItem(_container, config, item, prefix); } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: folio/rpc/reservation_guarantee_svc.proto #pragma warning disable 1591 #region Designer generated code using System; using System.Threading; using System.Threading.Tasks; using grpc = global::Grpc.Core; namespace HOLMS.Types.Folio.RPC { public static partial class ReservationGuaranteeSvc { static readonly string __ServiceName = "holms.types.booking.rpc.ReservationGuaranteeSvc"; static readonly grpc::Marshaller<global::HOLMS.Types.Booking.Indicators.ReservationIndicator> __Marshaller_ReservationIndicator = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::HOLMS.Types.Booking.Indicators.ReservationIndicator.Parser.ParseFrom); static readonly grpc::Marshaller<global::HOLMS.Types.Folio.RPC.ResGSvcPreTenderStatus> __Marshaller_ResGSvcPreTenderStatus = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::HOLMS.Types.Folio.RPC.ResGSvcPreTenderStatus.Parser.ParseFrom); static readonly grpc::Marshaller<global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeRequest> __Marshaller_ResGSvcTenderCheckGuaranteeRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeRequest.Parser.ParseFrom); static readonly grpc::Marshaller<global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeResponse> __Marshaller_ResGSvcTenderCheckGuaranteeResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeResponse.Parser.ParseFrom); static readonly grpc::Marshaller<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNewNotPresentCardGuaranteeRequest> __Marshaller_ResGSvcTenderNewNotPresentCardGuaranteeRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::HOLMS.Types.Folio.RPC.ResGSvcTenderNewNotPresentCardGuaranteeRequest.Parser.ParseFrom); static readonly grpc::Marshaller<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse> __Marshaller_ResGSvcTenderNotPresentCardGuaranteeResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse.Parser.ParseFrom); static readonly grpc::Marshaller<global::HOLMS.Types.Folio.RPC.ResGSvcTenderStoredNotPresentCardGuaranteeRequest> __Marshaller_ResGSvcTenderStoredNotPresentCardGuaranteeRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::HOLMS.Types.Folio.RPC.ResGSvcTenderStoredNotPresentCardGuaranteeRequest.Parser.ParseFrom); static readonly grpc::Marshaller<global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeRequest> __Marshaller_VoidGuaranteeRequest = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeRequest.Parser.ParseFrom); static readonly grpc::Marshaller<global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeResponse> __Marshaller_VoidGuaranteeResponse = grpc::Marshallers.Create((arg) => global::Google.Protobuf.MessageExtensions.ToByteArray(arg), global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeResponse.Parser.ParseFrom); static readonly grpc::Method<global::HOLMS.Types.Booking.Indicators.ReservationIndicator, global::HOLMS.Types.Folio.RPC.ResGSvcPreTenderStatus> __Method_GetPreTenderStatus = new grpc::Method<global::HOLMS.Types.Booking.Indicators.ReservationIndicator, global::HOLMS.Types.Folio.RPC.ResGSvcPreTenderStatus>( grpc::MethodType.Unary, __ServiceName, "GetPreTenderStatus", __Marshaller_ReservationIndicator, __Marshaller_ResGSvcPreTenderStatus); static readonly grpc::Method<global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeRequest, global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeResponse> __Method_TenderCheckGuarantee = new grpc::Method<global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeRequest, global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeResponse>( grpc::MethodType.Unary, __ServiceName, "TenderCheckGuarantee", __Marshaller_ResGSvcTenderCheckGuaranteeRequest, __Marshaller_ResGSvcTenderCheckGuaranteeResponse); static readonly grpc::Method<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNewNotPresentCardGuaranteeRequest, global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse> __Method_TenderNewNotPresentCardGuarantee = new grpc::Method<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNewNotPresentCardGuaranteeRequest, global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse>( grpc::MethodType.Unary, __ServiceName, "TenderNewNotPresentCardGuarantee", __Marshaller_ResGSvcTenderNewNotPresentCardGuaranteeRequest, __Marshaller_ResGSvcTenderNotPresentCardGuaranteeResponse); static readonly grpc::Method<global::HOLMS.Types.Folio.RPC.ResGSvcTenderStoredNotPresentCardGuaranteeRequest, global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse> __Method_TenderStoredNotPresentCardGuarantee = new grpc::Method<global::HOLMS.Types.Folio.RPC.ResGSvcTenderStoredNotPresentCardGuaranteeRequest, global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse>( grpc::MethodType.Unary, __ServiceName, "TenderStoredNotPresentCardGuarantee", __Marshaller_ResGSvcTenderStoredNotPresentCardGuaranteeRequest, __Marshaller_ResGSvcTenderNotPresentCardGuaranteeResponse); static readonly grpc::Method<global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeRequest, global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeResponse> __Method_VoidGuarantee = new grpc::Method<global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeRequest, global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeResponse>( grpc::MethodType.Unary, __ServiceName, "VoidGuarantee", __Marshaller_VoidGuaranteeRequest, __Marshaller_VoidGuaranteeResponse); /// <summary>Service descriptor</summary> public static global::Google.Protobuf.Reflection.ServiceDescriptor Descriptor { get { return global::HOLMS.Types.Folio.RPC.ReservationGuaranteeSvcReflection.Descriptor.Services[0]; } } /// <summary>Base class for server-side implementations of ReservationGuaranteeSvc</summary> public abstract partial class ReservationGuaranteeSvcBase { public virtual global::System.Threading.Tasks.Task<global::HOLMS.Types.Folio.RPC.ResGSvcPreTenderStatus> GetPreTenderStatus(global::HOLMS.Types.Booking.Indicators.ReservationIndicator request, grpc::ServerCallContext context) { throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, "")); } public virtual global::System.Threading.Tasks.Task<global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeResponse> TenderCheckGuarantee(global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeRequest request, grpc::ServerCallContext context) { throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, "")); } public virtual global::System.Threading.Tasks.Task<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse> TenderNewNotPresentCardGuarantee(global::HOLMS.Types.Folio.RPC.ResGSvcTenderNewNotPresentCardGuaranteeRequest request, grpc::ServerCallContext context) { throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, "")); } public virtual global::System.Threading.Tasks.Task<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse> TenderStoredNotPresentCardGuarantee(global::HOLMS.Types.Folio.RPC.ResGSvcTenderStoredNotPresentCardGuaranteeRequest request, grpc::ServerCallContext context) { throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, "")); } public virtual global::System.Threading.Tasks.Task<global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeResponse> VoidGuarantee(global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeRequest request, grpc::ServerCallContext context) { throw new grpc::RpcException(new grpc::Status(grpc::StatusCode.Unimplemented, "")); } } /// <summary>Client for ReservationGuaranteeSvc</summary> public partial class ReservationGuaranteeSvcClient : grpc::ClientBase<ReservationGuaranteeSvcClient> { /// <summary>Creates a new client for ReservationGuaranteeSvc</summary> /// <param name="channel">The channel to use to make remote calls.</param> public ReservationGuaranteeSvcClient(grpc::Channel channel) : base(channel) { } /// <summary>Creates a new client for ReservationGuaranteeSvc that uses a custom <c>CallInvoker</c>.</summary> /// <param name="callInvoker">The callInvoker to use to make remote calls.</param> public ReservationGuaranteeSvcClient(grpc::CallInvoker callInvoker) : base(callInvoker) { } /// <summary>Protected parameterless constructor to allow creation of test doubles.</summary> protected ReservationGuaranteeSvcClient() : base() { } /// <summary>Protected constructor to allow creation of configured clients.</summary> /// <param name="configuration">The client configuration.</param> protected ReservationGuaranteeSvcClient(ClientBaseConfiguration configuration) : base(configuration) { } public virtual global::HOLMS.Types.Folio.RPC.ResGSvcPreTenderStatus GetPreTenderStatus(global::HOLMS.Types.Booking.Indicators.ReservationIndicator request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return GetPreTenderStatus(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual global::HOLMS.Types.Folio.RPC.ResGSvcPreTenderStatus GetPreTenderStatus(global::HOLMS.Types.Booking.Indicators.ReservationIndicator request, grpc::CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_GetPreTenderStatus, null, options, request); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Folio.RPC.ResGSvcPreTenderStatus> GetPreTenderStatusAsync(global::HOLMS.Types.Booking.Indicators.ReservationIndicator request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return GetPreTenderStatusAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Folio.RPC.ResGSvcPreTenderStatus> GetPreTenderStatusAsync(global::HOLMS.Types.Booking.Indicators.ReservationIndicator request, grpc::CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_GetPreTenderStatus, null, options, request); } public virtual global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeResponse TenderCheckGuarantee(global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return TenderCheckGuarantee(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeResponse TenderCheckGuarantee(global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeRequest request, grpc::CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_TenderCheckGuarantee, null, options, request); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeResponse> TenderCheckGuaranteeAsync(global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return TenderCheckGuaranteeAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeResponse> TenderCheckGuaranteeAsync(global::HOLMS.Types.Folio.RPC.ResGSvcTenderCheckGuaranteeRequest request, grpc::CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_TenderCheckGuarantee, null, options, request); } public virtual global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse TenderNewNotPresentCardGuarantee(global::HOLMS.Types.Folio.RPC.ResGSvcTenderNewNotPresentCardGuaranteeRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return TenderNewNotPresentCardGuarantee(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse TenderNewNotPresentCardGuarantee(global::HOLMS.Types.Folio.RPC.ResGSvcTenderNewNotPresentCardGuaranteeRequest request, grpc::CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_TenderNewNotPresentCardGuarantee, null, options, request); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse> TenderNewNotPresentCardGuaranteeAsync(global::HOLMS.Types.Folio.RPC.ResGSvcTenderNewNotPresentCardGuaranteeRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return TenderNewNotPresentCardGuaranteeAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse> TenderNewNotPresentCardGuaranteeAsync(global::HOLMS.Types.Folio.RPC.ResGSvcTenderNewNotPresentCardGuaranteeRequest request, grpc::CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_TenderNewNotPresentCardGuarantee, null, options, request); } public virtual global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse TenderStoredNotPresentCardGuarantee(global::HOLMS.Types.Folio.RPC.ResGSvcTenderStoredNotPresentCardGuaranteeRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return TenderStoredNotPresentCardGuarantee(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse TenderStoredNotPresentCardGuarantee(global::HOLMS.Types.Folio.RPC.ResGSvcTenderStoredNotPresentCardGuaranteeRequest request, grpc::CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_TenderStoredNotPresentCardGuarantee, null, options, request); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse> TenderStoredNotPresentCardGuaranteeAsync(global::HOLMS.Types.Folio.RPC.ResGSvcTenderStoredNotPresentCardGuaranteeRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return TenderStoredNotPresentCardGuaranteeAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Folio.RPC.ResGSvcTenderNotPresentCardGuaranteeResponse> TenderStoredNotPresentCardGuaranteeAsync(global::HOLMS.Types.Folio.RPC.ResGSvcTenderStoredNotPresentCardGuaranteeRequest request, grpc::CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_TenderStoredNotPresentCardGuarantee, null, options, request); } public virtual global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeResponse VoidGuarantee(global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return VoidGuarantee(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeResponse VoidGuarantee(global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeRequest request, grpc::CallOptions options) { return CallInvoker.BlockingUnaryCall(__Method_VoidGuarantee, null, options, request); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeResponse> VoidGuaranteeAsync(global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeRequest request, grpc::Metadata headers = null, DateTime? deadline = null, CancellationToken cancellationToken = default(CancellationToken)) { return VoidGuaranteeAsync(request, new grpc::CallOptions(headers, deadline, cancellationToken)); } public virtual grpc::AsyncUnaryCall<global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeResponse> VoidGuaranteeAsync(global::HOLMS.Types.Booking.Guarantees.VoidGuaranteeRequest request, grpc::CallOptions options) { return CallInvoker.AsyncUnaryCall(__Method_VoidGuarantee, null, options, request); } /// <summary>Creates a new instance of client from given <c>ClientBaseConfiguration</c>.</summary> protected override ReservationGuaranteeSvcClient NewInstance(ClientBaseConfiguration configuration) { return new ReservationGuaranteeSvcClient(configuration); } } /// <summary>Creates service definition that can be registered with a server</summary> /// <param name="serviceImpl">An object implementing the server-side handling logic.</param> public static grpc::ServerServiceDefinition BindService(ReservationGuaranteeSvcBase serviceImpl) { return grpc::ServerServiceDefinition.CreateBuilder() .AddMethod(__Method_GetPreTenderStatus, serviceImpl.GetPreTenderStatus) .AddMethod(__Method_TenderCheckGuarantee, serviceImpl.TenderCheckGuarantee) .AddMethod(__Method_TenderNewNotPresentCardGuarantee, serviceImpl.TenderNewNotPresentCardGuarantee) .AddMethod(__Method_TenderStoredNotPresentCardGuarantee, serviceImpl.TenderStoredNotPresentCardGuarantee) .AddMethod(__Method_VoidGuarantee, serviceImpl.VoidGuarantee).Build(); } } } #endregion
using System; using System.Data; using System.Configuration; using System.Web; using System.Web.Security; using System.Web.UI; using System.Web.UI.WebControls; using System.Web.UI.WebControls.WebParts; using System.Web.UI.HtmlControls; using ProdProcessCurrentTableAdapters; /// <summary> /// Summary description for PlanningManagement /// </summary> public class PlanningManagement { public enum BindOperation { Bind, Unbind }; private OrderTableAdapter _LocalOrderTableAdapter = null; private BindStateTableAdapter _BindState = null; private OrderComponentsTableAdapter _OrderComponents = null; public static int BINDSTATE_CAUSAL_UNLOAD = 800; public static int BINDSTATE_CAUSAL_LOAD = 200; public static int BINDSTATE_TYPE_INITIAL = 0; public static int BINDSTATE_TYPE_READYTOUNLOAD = 5; public static int BINDSTATE_TYPE_UNLOADED = 10; public static int ORDERCOMPONENTS_STATE_INITIAL = 0; public static int ORDERCOMPONENTS_STATE_CHECKED = 10; protected OrderTableAdapter LocalOrderTableAdapter { get { if (_LocalOrderTableAdapter == null) _LocalOrderTableAdapter = new OrderTableAdapter(); return _LocalOrderTableAdapter; } } protected BindStateTableAdapter BindState { get { if (_BindState == null) _BindState = new BindStateTableAdapter(); return _BindState; } } protected OrderComponentsTableAdapter OrderComponents { get { if (_OrderComponents == null) _OrderComponents = new OrderComponentsTableAdapter(); return _OrderComponents; } } // //////////////////////////////////////////////////////////////////////////////////// // Methods definition // //////////////////////////////////////////////////////////////////////////////////// // ------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------ // Selections // ------------------------------------------------------------------------------------ public ProdProcessCurrent.OrderDataTable GetIncomingOrders() { return LocalOrderTableAdapter.GetOrders(); } // ------------------------------------------------------------------------------------ public ProdProcessCurrent.OrderRow GetIncomingOrders(int SetNo, int SerialNo) { try { return LocalOrderTableAdapter.GetOrdersByIDs(SetNo, SerialNo)[0]; } catch (Exception exp) { Console.WriteLine("Exception thrown: " + exp.Message); return null; } } // ------------------------------------------------------------------------------------ public ProdProcessCurrent.OrderRow GetIncomingOrders(int SerialNo) { return GetIncomingOrders(0, SerialNo); } // ------------------------------------------------------------------------------------ public int CountRecentOrders(int DateDiff) { return (Int32) LocalOrderTableAdapter.CountRecentOrders(DateDiff); } public ProdProcessCurrent.BindStateDataTable GetBindState() { return BindState.GetBindState(); } public ProdProcessCurrent.BindStateDataTable GetBindStateByType(int Type) { return BindState.GetByType(Type); } public ProdProcessCurrent.BindStateDataTable GetBindStateByType(int Type, string Filter) { if (Filter == null || Filter.Trim().Length == 0) return BindState.GetByType(Type); return BindState.GetDataByFilter(Filter, Type); } public ProdProcessCurrent.BindStateDataTable GetBindStateByComponentID(string ComponentID, int Type) { if (ComponentID == null) ComponentID = ""; return BindState.GetBindStateBy(ComponentID, Type); } public ProdProcessCurrent.BindStateDataTable GetBindStateBy( int OrderSetNo, int OrderSerialNo, string ProductID) { if (ProductID == null) ProductID = ""; // By defautl it refers to the MixID 0, that is the whole product return BindState.GetDataByOrderProduct( OrderSetNo, OrderSerialNo, ProductID); } public ProdProcessCurrent.BindStateDataTable GetBindStateByIDs( int OrderSetNo, int OrderSerialNo, string ProductID, string ComponentID) { if (ProductID == null) ProductID = ""; if (ComponentID == null) ComponentID = ""; // By defautl it refers to the MixID 0, that is the whole product return BindState.GetBindStateByNoMixID( OrderSetNo, OrderSerialNo, ProductID, ComponentID); } public ProdProcessCurrent.BindStateRow GetBindStateByIDs( int OrderSetNo, int OrderSerialNo, string ProductID, string ComponentID, int MixID) { if (ProductID == null) ProductID = ""; if (ComponentID == null) ComponentID = ""; ProdProcessCurrent.BindStateDataTable bindStateList = BindState.GetBindStateByIDs(OrderSetNo, OrderSerialNo, ProductID, ComponentID, MixID); if (bindStateList.Rows.Count == 0) return null; return bindStateList[0]; } public ProdProcessCurrent.OrderComponentsDataTable GetOrderComponents() { return OrderComponents.GetAll(); } public int CountOrderComponentsBy(int State) { int? outValue = OrderComponents.CountByState(State); if (outValue.HasValue) return outValue.Value; return 0; } public ProdProcessCurrent.OrderComponentsDataTable GetOrderComponentsBy( string ComponentID) { if (ComponentID == null) ComponentID = ""; return OrderComponents.GetDataByComponentID(ComponentID); } public ProdProcessCurrent.OrderComponentsDataTable GetOrderComponentsListBy(int ID) { return OrderComponents.GetDataByID(ID); } public ProdProcessCurrent.OrderComponentsRow GetOrderComponentsBy(int ID) { ProdProcessCurrent.OrderComponentsDataTable orderComponentsList = OrderComponents.GetDataByID(ID); if (orderComponentsList.Count > 0) return orderComponentsList[0]; return null; } // ------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------ /// <summary> /// r124 /// Sum of bound quantities associated to componentID /// </summary> /// <param name="ComponentID"></param> /// <param name="Type"></param> /// <returns></returns> public float GetBoundQuantity(string ComponentID, int Type) { int? typeVal = Type; double? boundAtInitialState = (double?)BindState.GetBoundQuantity(ComponentID, typeVal); if (boundAtInitialState.HasValue) return (float)boundAtInitialState.Value; return 0.0f; } // ------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------ /// <summary> /// r124 /// Sum of quantities of all bind state with type == BINDSTATE_TYPE_INITIAL or BINDSTATE_TYPE_READYTOUNLOAD /// </summary> /// <param name="ComponentID"></param> /// <returns></returns> public float GetBoundQuantity(string ComponentID) { return GetBoundQuantity(ComponentID, BINDSTATE_TYPE_INITIAL) + GetBoundQuantity(ComponentID, BINDSTATE_TYPE_READYTOUNLOAD); } // ------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------ // Updates // ------------------------------------------------------------------------------------ public ProdProcessCurrent.OrderRow CreateNewOrderRow() { ProdProcessCurrent.OrderDataTable orderDataTable = new ProdProcessCurrent.OrderDataTable(); return orderDataTable.NewOrderRow(); } // ------------------------------------------------------------------------------------ public bool AddNewOrderRow(ProdProcessCurrent.OrderRow orderRow) { ProdProcessCurrent.OrderDataTable orderDataTable = new ProdProcessCurrent.OrderDataTable(); orderDataTable.Rows.Add(orderRow.ItemArray); int affectedRows = LocalOrderTableAdapter.Update(orderDataTable); return affectedRows == 1; } // ------------------------------------------------------------------------------------ public bool UpdateOrderRow(ProdProcessCurrent.OrderRow orderRow) { int affectedRows = LocalOrderTableAdapter.Update(orderRow); return affectedRows == 1; } // ------------------------------------------------------------------------------------ public ProdProcessCurrent.BindStateRow CreateNewBindStateRow() { ProdProcessCurrent.BindStateDataTable dataTable = new ProdProcessCurrent.BindStateDataTable(); ProdProcessCurrent.BindStateRow dataRow = dataTable.NewBindStateRow(); dataRow.DateBind = DateTime.Now; dataRow.UserBind = Membership.GetUser().UserName; dataRow.MixID = 0; dataRow.Causal = BINDSTATE_CAUSAL_UNLOAD; dataRow.Type = BINDSTATE_TYPE_INITIAL; return dataRow; } // ------------------------------------------------------------------------------------ public bool AddNewBindStateRow(ProdProcessCurrent.BindStateRow dataRow) { ProdProcessCurrent.BindStateDataTable dataTable = new ProdProcessCurrent.BindStateDataTable(); dataTable.Rows.Add(dataRow.ItemArray); int affectedRows = BindState.Update(dataTable); return affectedRows == 1; } // ------------------------------------------------------------------------------------ public bool UpdateBindStateRow(ProdProcessCurrent.BindStateRow dataRow) { int affectedRows = BindState.Update(dataRow); return affectedRows == 1; } // ------------------------------------------------------------------------------------ public bool DeleteBindStateRow(ProdProcessCurrent.BindStateRow dataRow) { dataRow.Delete(); int affectedRows = BindState.Update(dataRow); return affectedRows == 1; } // ------------------------------------------------------------------------------------ public ProdProcessCurrent.OrderComponentsRow CreateNewOrderComponents() { ProdProcessCurrent.OrderComponentsDataTable dataTable = new ProdProcessCurrent.OrderComponentsDataTable(); ProdProcessCurrent.OrderComponentsRow dataRow = dataTable.NewOrderComponentsRow(); dataRow.DateLastModify = DateTime.Now; dataRow.UserLastModify = Membership.GetUser().UserName; dataRow.DateRegistration = DateTime.Now; dataRow.DateSupply = DateTime.Now; dataRow.State = ORDERCOMPONENTS_STATE_INITIAL; dataRow.Quantity = 0; return dataRow; } // ------------------------------------------------------------------------------------ public bool AddNewOrderComponentsRow(ProdProcessCurrent.OrderComponentsRow dataRow) { ProdProcessCurrent.OrderComponentsDataTable dataTable = new ProdProcessCurrent.OrderComponentsDataTable(); dataTable.Rows.Add(dataRow.ItemArray); int affectedRows = OrderComponents.Update(dataTable); return affectedRows == 1; } // ------------------------------------------------------------------------------------ public bool UpdateOrderComponentsRow(ProdProcessCurrent.OrderComponentsRow dataRow) { int affectedRows = OrderComponents.Update(dataRow); return affectedRows == 1; } public void UpdateOrderComponents(int ID, float Quantity, string Notes, string SupplierName, string State, DateTime DateSupply) { ProdProcessCurrent.OrderComponentsRow singleOrderComponent = GetOrderComponentsBy(ID); if (singleOrderComponent == null) return; singleOrderComponent.Quantity = Quantity; singleOrderComponent.Notes = Notes; singleOrderComponent.SupplierName = SupplierName; singleOrderComponent.State = Int32.Parse(State); singleOrderComponent.DateSupply = DateSupply; UpdateOrderComponentsRow(singleOrderComponent); } // ------------------------------------------------------------------------------------ // ------------------------------------------------------------------------------------ // Procedures // ------------------------------------------------------------------------------------ public int CountBindStateWithSameOrder( int OrderSetNo, int OrderSerialNo, int Type) { int? returnedValue = BindState.CountSameOrder(OrderSetNo, OrderSerialNo, Type); if (returnedValue.HasValue) return returnedValue.Value; return 0; } public int GetBindStateMaxUnloadSerial(int Type) { int? returnedValue = BindState.MaxUnloadSerial(Type); if (returnedValue.HasValue) return returnedValue.Value; return 0; } public static DataTable GetRepeaterViewsDataSource() { DataTable repeaterViewsData = new DataTable(); repeaterViewsData.Columns.Add("Label", typeof(string)); repeaterViewsData.Columns.Add("State", typeof(Int32)); repeaterViewsData.Rows.Add(new Object[] { "Ordini in stato di attesa", SchedulingManagement.ORDERPRODUCTS_INITSTATE }); repeaterViewsData.Rows.Add(new Object[] { "Ordini confermati", SchedulingManagement.ORDERPRODUCTS_CONFIRMED }); repeaterViewsData.Rows.Add(new Object[] { "Ordini sospesi/rifutati", SchedulingManagement.ORDERPRODUCTS_REFUSED }); //repeaterViewsData.Rows.Add(new Object[] { // "Da magazzino", SchedulingManagement.ORDERPRODUCTS_FROMSTOCK }); return repeaterViewsData; } public void UnbindAll(int OrderSetNo, int OrderSerialNo, string ProductID) { ProdProcessCurrent.BindStateDataTable bindStateDataTable = GetBindStateBy(OrderSetNo, OrderSerialNo, ProductID); foreach (ProdProcessCurrent.BindStateRow bindStateRow in bindStateDataTable) { bindStateRow.Delete(); } BindState.Update(bindStateDataTable); } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Diagnostics; using System.IO; using System.Linq; using System.Text.RegularExpressions; using NDesk.Options; using Sep.Git.Tfs.Core; using StructureMap; using Sep.Git.Tfs.Util; using Sep.Git.Tfs.Core.TfsInterop; namespace Sep.Git.Tfs.Commands { [Pluggable("clone")] [Description("clone [options] tfs-url-or-instance-name repository-path <git-repository-path>\n ex : git tfs clone http://myTfsServer:8080/tfs/TfsRepository $/ProjectName/ProjectBranch\n")] public class Clone : GitTfsCommand { private readonly Fetch fetch; private readonly Init init; private readonly Globals globals; private readonly InitBranch initBranch; private bool resumable; private TextWriter stdout; public Clone(Globals globals, Fetch fetch, Init init, InitBranch initBranch, TextWriter stdout) { this.fetch = fetch; this.init = init; this.globals = globals; this.initBranch = initBranch; this.stdout = stdout; globals.GcCountdown = globals.GcPeriod; } public OptionSet OptionSet { get { return init.OptionSet.Merge(fetch.OptionSet) .Add("resumable", "if an error occurred, try to continue when you restart clone with same parameters", v => resumable = v != null); } } public int Run(string tfsUrl, string tfsRepositoryPath) { return Run(tfsUrl, tfsRepositoryPath, Path.GetFileName(tfsRepositoryPath)); } public int Run(string tfsUrl, string tfsRepositoryPath, string gitRepositoryPath) { var currentDir = Environment.CurrentDirectory; var repositoryDirCreated = InitGitDir(gitRepositoryPath); // TFS string representations of repository paths do not end in trailing slashes if (tfsRepositoryPath != GitTfsConstants.TfsRoot) tfsRepositoryPath = (tfsRepositoryPath ?? string.Empty).TrimEnd('/'); int retVal = 0; try { if (repositoryDirCreated) { retVal = init.Run(tfsUrl, tfsRepositoryPath, gitRepositoryPath); } else { try { Environment.CurrentDirectory = gitRepositoryPath; globals.Repository = init.GitHelper.MakeRepository(globals.GitDir); } catch (Exception) { retVal = init.Run(tfsUrl, tfsRepositoryPath, gitRepositoryPath); } } VerifyTfsPathToClone(tfsRepositoryPath); } catch { if (!resumable) { try { // if we appeared to be inside repository dir when exception was thrown - we won't be able to delete it Environment.CurrentDirectory = currentDir; if (repositoryDirCreated) Directory.Delete(gitRepositoryPath, recursive: true); else CleanDirectory(gitRepositoryPath); } catch (IOException e) { // swallow IOException. Smth went wrong before this and we're much more interested in that error string msg = String.Format("warning: Something went wrong while cleaning file after internal error (See below).\n Can't clean up files because of IOException:\n{0}\n", e.IndentExceptionMessage()); Trace.WriteLine(msg); } catch (UnauthorizedAccessException e) { // swallow it also string msg = String.Format("warning: Something went wrong while cleaning file after internal error (See below).\n Can't clean up files because of UnauthorizedAccessException:\n{0}\n", e.IndentExceptionMessage()); Trace.WriteLine(msg); } } throw; } bool errorOccurs = false; try { if (tfsRepositoryPath == GitTfsConstants.TfsRoot) fetch.BranchStrategy = BranchStrategy.None; globals.Repository.SetConfig(GitTfsConstants.IgnoreBranches, (fetch.BranchStrategy == BranchStrategy.None).ToString()); if (retVal == 0) { fetch.Run(fetch.BranchStrategy == BranchStrategy.All); globals.Repository.GarbageCollect(); } if (fetch.BranchStrategy == BranchStrategy.All && initBranch != null) { initBranch.CloneAllBranches = true; retVal = initBranch.Run(); } } catch (GitTfsException) { errorOccurs = true; throw; } catch (Exception ex) { errorOccurs = true; throw new GitTfsException("error: a problem occurred when trying to clone the repository. Try to solve the problem described below.\nIn any case, after, try to continue using command `git tfs " + (fetch.BranchStrategy == BranchStrategy.All ? "branch init --all" : "fetch") + "`\n", ex); } finally { try { if (!init.IsBare) globals.Repository.Merge(globals.Repository.ReadTfsRemote(globals.RemoteId).RemoteRef); } catch (Exception) { //Swallow exception because the previously thrown exception is more important... if (!errorOccurs) throw; } } return retVal; } private void VerifyTfsPathToClone(string tfsRepositoryPath) { if (initBranch == null) return; try { var remote = globals.Repository.ReadTfsRemote(GitTfsConstants.DefaultRepositoryId); if (!remote.Tfs.IsExistingInTfs(tfsRepositoryPath)) throw new GitTfsException("error: the path " + tfsRepositoryPath + " you want to clone doesn't exist!") .WithRecommendation("To discover which branch to clone, you could use the command :\ngit tfs list-remote-branches " + remote.TfsUrl); if (!remote.Tfs.CanGetBranchInformation) return; var tfsTrunkRepository = remote.Tfs.GetRootTfsBranchForRemotePath(tfsRepositoryPath, false); if (tfsTrunkRepository == null) { var tfsRootBranches = remote.Tfs.GetAllTfsRootBranchesOrderedByCreation(); if (!tfsRootBranches.Any()) { stdout.WriteLine("info: no TFS root found !\n\nPS:perhaps you should convert your trunk folder into a branch in TFS."); return; } var cloneMsg = " => If you want to manage branches with git-tfs, clone one of this branch instead :\n" + " - " + tfsRootBranches.Aggregate((s1, s2) => s1 + "\n - " + s2) + "\n\nPS:if your branch is not listed here, perhaps you should convert the containing folder to a branch in TFS."; if (fetch.BranchStrategy == BranchStrategy.All) throw new GitTfsException("error: cloning the whole repository or too high in the repository path doesn't permit to manage branches!\n" + cloneMsg); stdout.WriteLine("warning: you are going to clone the whole repository or too high in the repository path !\n" + cloneMsg); return; } var tfsBranchesPath = tfsTrunkRepository.GetAllChildren(); var tfsPathToClone = tfsRepositoryPath.TrimEnd('/').ToLower(); var tfsTrunkRepositoryPath = tfsTrunkRepository.Path; if (tfsPathToClone != tfsTrunkRepositoryPath.ToLower()) { if (tfsBranchesPath.Select(e=>e.Path.ToLower()).Contains(tfsPathToClone)) stdout.WriteLine("info: you are going to clone a branch instead of the trunk ( {0} )\n" + " => If you want to manage branches with git-tfs, clone {0} with '--branches=all' option instead...)", tfsTrunkRepositoryPath); else stdout.WriteLine("warning: you are going to clone a subdirectory of a branch and won't be able to manage branches :(\n" + " => If you want to manage branches with git-tfs, clone " + tfsTrunkRepositoryPath + " with '--branches=all' option instead...)"); } } catch (GitTfsException) { throw; } catch (Exception ex) { stdout.WriteLine("warning: a server error occurs when trying to verify the tfs path cloned:\n " + ex.Message + "\n try to continue anyway..."); } } private bool InitGitDir(string gitRepositoryPath) { bool repositoryDirCreated = false; var di = new DirectoryInfo(gitRepositoryPath); if (di.Exists) { bool isDebuggerAttached = false; #if DEBUG isDebuggerAttached = Debugger.IsAttached; #endif if (!isDebuggerAttached && !resumable) { if (di.EnumerateFileSystemInfos().Any()) throw new GitTfsException("error: Specified git repository directory is not empty"); } } else { repositoryDirCreated = true; di.Create(); } return repositoryDirCreated; } private static void CleanDirectory(string gitRepositoryPath) { var di = new DirectoryInfo(gitRepositoryPath); foreach (var fileSystemInfo in di.EnumerateDirectories()) fileSystemInfo.Delete(true); foreach (var fileSystemInfo in di.EnumerateFiles()) fileSystemInfo.Delete(); } } }
#pragma warning disable 1634, 1691 //----------------------------------------------------------------------------- // // <copyright file="PathNode.cs" company="Microsoft"> // Copyright (C) Microsoft Corporation. All rights reserved. // </copyright> // // Description: // PathNode represents a node in a path (a subset of the element tree). // PathNodes can have other PathNodes as children. Each refers to a // single element in the element tree. // Spec: http://team/sites/ag/Specifications/Anchoring%20Namespace%20Spec.doc // // History: // 01/01/2003: magedz: Created - based on architectural discussions and design // by axelk, rruiz, magedz // 07/23/2003: rruiz: Ported to WCP // 08/18/2003: rruiz: Updated as per spec: made the class public. // //----------------------------------------------------------------------------- using System; using System.Diagnostics; using System.Collections; using System.ComponentModel; using System.Windows; using System.Windows.Annotations; using System.Windows.Media; using System.Windows.Markup; using MS.Utility; namespace MS.Internal.Annotations.Anchoring { /// <summary> /// PathNode represents a node in a path (a subset of the element tree). /// PathNodes can have other PathNodes as children. Each refers to a /// single element in the element tree. /// </summary> internal sealed class PathNode { //------------------------------------------------------ // // Constructors // //------------------------------------------------------ #region Constructors /// <summary> /// Creates an instance of PathNode that refers to the specified tree node. /// </summary> /// <param name="node">the tree node represented by this instance </param> /// <exception cref="ArgumentNullException">node is null</exception> internal PathNode(DependencyObject node) { if (node == null) throw new ArgumentNullException("node"); _node = node; } #endregion Constructors //------------------------------------------------------ // // Public Methods // //------------------------------------------------------ #region Public Methods /// <summary> /// Determines if obj is a PathNode and refers to the same tree node /// as this instance. /// </summary> /// <param name="obj">the Object to test for equality </param> /// <returns>true if obj refers to the same tree node as this instance </returns> public override bool Equals(Object obj) { PathNode otherNode = obj as PathNode; if (otherNode == null) return false; return _node.Equals(otherNode.Node); } /// <summary> /// Generates a hash value for this PathNode based on the tree node /// it refers to. /// </summary> /// <returns>a hash value for this instance based on its tree node</returns> public override int GetHashCode() { if (_node == null) return base.GetHashCode(); return _node.GetHashCode(); } #endregion Public Methods //------------------------------------------------------ // // Public Operators // //------------------------------------------------------ //------------------------------------------------------ // // Public Events // //------------------------------------------------------ //------------------------------------------------------ // // Public Properties // //------------------------------------------------------ #region Public Properties /// <summary> /// Returns the tree node referred to by this instance of PathNode. /// </summary> /// <returns>the tree node referred to by this instance</returns> public DependencyObject Node { get { return _node; } } /// <summary> /// Returns a list of PathNodes that are children of this instance. /// This set of children is a subset of the children of the tree node /// referred to by this PathNode. /// </summary> /// <returns>list of PathNodes that are children of this instance</returns> public IList Children { get { return _children; } } #endregion Public Properties //------------------------------------------------------ // // Internal Methods // //------------------------------------------------------ #region Internal Methods /// <summary> /// Builds a path of PathNodes representing the nodes between all of /// the nodes and the root of the tree. /// </summary> /// <returns>the instance referring to the root of the tree; its /// children/descendants only include the nodes between the root and /// all of the nodes</returns> /// <exception cref="ArgumentNullException">nodes is null</exception> internal static PathNode BuildPathForElements(ICollection nodes) { if (nodes == null) throw new ArgumentNullException("nodes"); PathNode firstPathNode = null; foreach (DependencyObject node in nodes) { PathNode branch = BuildPathForElement(node); if (firstPathNode == null) firstPathNode = branch; else AddBranchToPath(firstPathNode, branch); } // make all the children readonly so we do not need to // lock the PathNode when getting the children if (firstPathNode != null) firstPathNode.FreezeChildren(); return firstPathNode; } #endregion Internal Methods //------------------------------------------------------ // // Internal Operators // //------------------------------------------------------ //------------------------------------------------------ // // Internal Events // //------------------------------------------------------ //------------------------------------------------------ // // Internal Properties // //------------------------------------------------------ #region Internal Properties /// <summary> /// Property used to point content tree root's to their 'parent'. /// For instance, the root of a PageViewer's content tree would point /// to the DocumentPaginator that is holding on to the tree. /// </summary> #pragma warning suppress 7009 internal static readonly DependencyProperty HiddenParentProperty = DependencyProperty.RegisterAttached("HiddenParent", typeof(DependencyObject), typeof(PathNode)); #endregion Internal Properties //------------------------------------------------------ // // Private Methods // //------------------------------------------------------ #region Private Methods /// <summary> /// Get the parent of the passed in object. /// </summary> /// <param name="node">the node whose parent is requested</param> /// <returns>the parent of this node where parent is defined to be the /// first FrameworkElement/FrameworkContentElemnt found walking up the /// node's parent chain, with a preference for the visual tree vs the /// logical tree</returns> internal static DependencyObject GetParent(DependencyObject node) { Debug.Assert(node != null, "node can not be null"); DependencyObject current = node; DependencyObject parent = null; while (true) { // Try for hidden parent first above all others parent = (DependencyObject)current.GetValue(PathNode.HiddenParentProperty); if (parent == null) { // Try for Visual parent Visual visual = current as Visual; if (visual != null) { // This is a Visual node, get parent parent = VisualTreeHelper.GetParent(visual); } } if (parent == null) { // Try for Model parent parent = LogicalTreeHelper.GetParent(current); } // Check if located a parent, if so, check if it's the correct type if ((parent == null) || FrameworkElement.DType.IsInstanceOfType(parent) || FrameworkContentElement.DType.IsInstanceOfType(parent)) { break; } // Parent found but not of correct type, continue current = parent; parent = null; } return parent; } /// <summary> /// Builds a path from an element to the root of its tree. Every /// element in between the element and the root is added to the /// path. /// </summary> /// <param name="node">the element to build a path for</param> /// <returns>the PathNode instance referring to the root of the tree; its /// children/descendants only include the nodes between the root and /// node</returns> private static PathNode BuildPathForElement(DependencyObject node) { Debug.Assert(node != null, "node can not be null"); PathNode childNode = null; while (node != null) { PathNode pathNode = new PathNode(node); if (childNode != null) pathNode.AddChild(childNode); childNode = pathNode; // If we find a node that has the service set on it, we should stop // after processing it. For cases without a service like unit tests, // this node won't be found and we'll continue to the root. if (node.ReadLocalValue(AnnotationService.ServiceProperty) != DependencyProperty.UnsetValue) break; node = PathNode.GetParent(node); } return childNode; } /// <summary> /// Adds a branch to an existing path, removing any duplicate /// nodes as necessary. Assumes that both paths are full paths /// up to the root of the same tree. If the paths are not full, /// the method will give incorrect results. If the paths are /// full but belong to different trees (and therefore have /// different roots) the method will throw. /// </summary> /// <param name="path">path to add branch to</param> /// <param name="branch">branch to be added; should be a linear path /// (no more than one child for any node)</param> /// <returns>the path with branch having been added in and duplicate /// nodes pruned</returns> private static PathNode AddBranchToPath(PathNode path, PathNode branch) { Debug.Assert(path != null, "path can not be null"); Debug.Assert(branch != null, "branch can not be null"); // The paths must be in the same tree and therefore have the // same root. Debug.Assert(path.Node.Equals(branch.Node), "path.Node is not equal to branch.Node"); PathNode fp = path; PathNode sp = branch; // Continue down while (fp.Node.Equals(sp.Node) && sp._children.Count > 0) { // if the firstpath component equals the second path component // then we try to find the second path child component // inside the first path children bool found = false; PathNode branchNode = (PathNode)sp._children[0]; foreach (PathNode fpn in fp._children) { if (fpn.Equals(branchNode)) { // if we found one we keep moving along both the first path and the second path found = true; sp = branchNode; fp = fpn; break; } } if (found) continue; // if we can not find the second path child in the first // path child, we then just add the second path child // to the set of first path children fp.AddChild(branchNode); break; } return path; } private void AddChild(object child) { _children.Add(child); } /// <summary> /// Once the node has been constructed via BuildPathForElements /// we can not modify any more the childeren. We make the /// children trough the entire PathNode tree readonly /// </summary> private void FreezeChildren() { foreach (PathNode node in _children) { node.FreezeChildren(); } _children = ArrayList.ReadOnly(_children); } #endregion Private Methods //------------------------------------------------------ // // Private Fields // //------------------------------------------------------ #region Private Fields // The element pointed to by this PathNode private DependencyObject _node; // The array of children of this PathNode private ArrayList _children = new ArrayList(1); #endregion Private Fields } }
using NUnit.Framework; using Parse; using Parse.Core.Internal; using System; using System.Collections.Generic; namespace ParseTest { [TestFixture] public class DecoderTests { [Test] public void TestParseDate() { DateTime dateTime = (DateTime)ParseDecoder.Instance.Decode(ParseDecoder.ParseDate("1990-08-30T12:03:59.000Z")); Assert.AreEqual(1990, dateTime.Year); Assert.AreEqual(8, dateTime.Month); Assert.AreEqual(30, dateTime.Day); Assert.AreEqual(12, dateTime.Hour); Assert.AreEqual(3, dateTime.Minute); Assert.AreEqual(59, dateTime.Second); Assert.AreEqual(0, dateTime.Millisecond); } [Test] public void TestDecodePrimitives() { Assert.AreEqual(1, ParseDecoder.Instance.Decode(1)); Assert.AreEqual(0.3, ParseDecoder.Instance.Decode(0.3)); Assert.AreEqual("halyosy", ParseDecoder.Instance.Decode("halyosy")); Assert.IsNull(ParseDecoder.Instance.Decode(null)); } [Test] public void TestDecodeFieldOperation() { IDictionary<string, object> value = new Dictionary<string, object>() { { "__op", "Increment" }, { "amount", "322" } }; // Decoding ParseFieldOperation is not supported on .NET now. We only need this for LDS. Assert.Throws<NotImplementedException>(() => ParseDecoder.Instance.Decode(value)); } [Test] public void TestDecodeDate() { IDictionary<string, object> value = new Dictionary<string, object>() { { "__type", "Date" }, { "iso", "1990-08-30T12:03:59.000Z" } }; DateTime dateTime = (DateTime)ParseDecoder.Instance.Decode(value); Assert.AreEqual(1990, dateTime.Year); Assert.AreEqual(8, dateTime.Month); Assert.AreEqual(30, dateTime.Day); Assert.AreEqual(12, dateTime.Hour); Assert.AreEqual(3, dateTime.Minute); Assert.AreEqual(59, dateTime.Second); Assert.AreEqual(0, dateTime.Millisecond); } [Test] public void TestDecodeImproperDate() { IDictionary<string, object> value = new Dictionary<string, object>() { { "__type", "Date" }, { "iso", "1990-08-30T12:03:59.0Z" } }; DateTime dateTime = (DateTime)ParseDecoder.Instance.Decode(value); Assert.AreEqual(1990, dateTime.Year); Assert.AreEqual(8, dateTime.Month); Assert.AreEqual(30, dateTime.Day); Assert.AreEqual(12, dateTime.Hour); Assert.AreEqual(3, dateTime.Minute); Assert.AreEqual(59, dateTime.Second); Assert.AreEqual(0, dateTime.Millisecond); // Test multiple trailing zeroes value = new Dictionary<string, object>() { { "__type", "Date" }, { "iso", "1990-08-30T12:03:59.00Z" } }; dateTime = (DateTime)ParseDecoder.Instance.Decode(value); Assert.AreEqual(1990, dateTime.Year); Assert.AreEqual(8, dateTime.Month); Assert.AreEqual(30, dateTime.Day); Assert.AreEqual(12, dateTime.Hour); Assert.AreEqual(3, dateTime.Minute); Assert.AreEqual(59, dateTime.Second); Assert.AreEqual(0, dateTime.Millisecond); } [Test] public void TestDecodeBytes() { IDictionary<string, object> value = new Dictionary<string, object>() { { "__type", "Bytes" }, { "base64", "VGhpcyBpcyBhbiBlbmNvZGVkIHN0cmluZw==" } }; byte[] bytes = ParseDecoder.Instance.Decode(value) as byte[]; Assert.AreEqual("This is an encoded string", System.Text.Encoding.UTF8.GetString(bytes)); } [Test] public void TestDecodePointer() { IDictionary<string, object> value = new Dictionary<string, object>() { { "__type", "Pointer" }, { "className", "Corgi" }, { "objectId", "lLaKcolnu" } }; ParseObject obj = ParseDecoder.Instance.Decode(value) as ParseObject; Assert.IsFalse(obj.IsDataAvailable); Assert.AreEqual("Corgi", obj.ClassName); Assert.AreEqual("lLaKcolnu", obj.ObjectId); } [Test] public void TestDecodeFile() { IDictionary<string, object> value1 = new Dictionary<string, object>() { { "__type", "File" }, { "name", "Corgi.png" }, { "url", "http://corgi.xyz/gogo.png" } }; ParseFile file1 = ParseDecoder.Instance.Decode(value1) as ParseFile; Assert.AreEqual("Corgi.png", file1.Name); Assert.AreEqual("http://corgi.xyz/gogo.png", file1.Url.AbsoluteUri); Assert.IsFalse(file1.IsDirty); IDictionary<string, object> value2 = new Dictionary<string, object>() { { "__type", "File" }, { "name", "Corgi.png" } }; Assert.Throws<KeyNotFoundException>(() => ParseDecoder.Instance.Decode(value2)); } [Test] public void TestDecodeGeoPoint() { IDictionary<string, object> value1 = new Dictionary<string, object>() { { "__type", "GeoPoint" }, { "latitude", 0.9 }, { "longitude", 0.3 } }; ParseGeoPoint point1 = (ParseGeoPoint)ParseDecoder.Instance.Decode(value1); Assert.IsNotNull(point1); Assert.AreEqual(0.9, point1.Latitude); Assert.AreEqual(0.3, point1.Longitude); IDictionary<string, object> value2 = new Dictionary<string, object>() { { "__type", "GeoPoint" }, { "latitude", 0.9 } }; Assert.Throws<KeyNotFoundException>(() => ParseDecoder.Instance.Decode(value2)); } [Test] public void TestDecodeObject() { IDictionary<string, object> value = new Dictionary<string, object>() { { "__type", "Object" }, { "className", "Corgi" }, { "objectId", "lLaKcolnu" }, { "createdAt", "2015-06-22T21:23:41.733Z" }, { "updatedAt", "2015-06-22T22:06:41.733Z" } }; ParseObject obj = ParseDecoder.Instance.Decode(value) as ParseObject; Assert.IsTrue(obj.IsDataAvailable); Assert.AreEqual("Corgi", obj.ClassName); Assert.AreEqual("lLaKcolnu", obj.ObjectId); Assert.IsNotNull(obj.CreatedAt); Assert.IsNotNull(obj.UpdatedAt); } [Test] public void TestDecodeRelation() { IDictionary<string, object> value = new Dictionary<string, object>() { { "__type", "Relation" }, { "className", "Corgi" }, { "objectId", "lLaKcolnu" } }; ParseRelation<ParseObject> relation = ParseDecoder.Instance.Decode(value) as ParseRelation<ParseObject>; Assert.IsNotNull(relation); Assert.AreEqual("Corgi", relation.GetTargetClassName()); } [Test] public void TestDecodeDictionary() { IDictionary<string, object> value = new Dictionary<string, object>() { { "megurine", "luka" }, { "hatsune", new ParseObject("Miku") }, { "decodedGeoPoint", new Dictionary<string, object>() { { "__type", "GeoPoint" }, { "latitude", 0.9 }, { "longitude", 0.3 } } }, { "listWithSomething", new List<object>() { new Dictionary<string, object>() { { "__type", "GeoPoint" }, { "latitude", 0.9 }, { "longitude", 0.3 } } } } }; IDictionary<string, object> dict = ParseDecoder.Instance.Decode(value) as IDictionary<string, object>; Assert.AreEqual("luka", dict["megurine"]); Assert.IsTrue(dict["hatsune"] is ParseObject); Assert.IsTrue(dict["decodedGeoPoint"] is ParseGeoPoint); Assert.IsTrue(dict["listWithSomething"] is IList<object>); var decodedList = dict["listWithSomething"] as IList<object>; Assert.IsTrue(decodedList[0] is ParseGeoPoint); IDictionary<object, string> randomValue = new Dictionary<object, string>() { { "ultimate", "elements" }, { new ParseACL(), "lLaKcolnu" } }; IDictionary<object, string> randomDict = ParseDecoder.Instance.Decode(randomValue) as IDictionary<object, string>; Assert.AreEqual("elements", randomDict["ultimate"]); Assert.AreEqual(2, randomDict.Keys.Count); } [Test] public void TestDecodeList() { IList<object> value = new List<object>() { 1, new ParseACL(), "wiz", new Dictionary<string, object>() { { "__type", "GeoPoint" }, { "latitude", 0.9 }, { "longitude", 0.3 } }, new List<object>() { new Dictionary<string, object>() { { "__type", "GeoPoint" }, { "latitude", 0.9 }, { "longitude", 0.3 } } } }; IList<object> list = ParseDecoder.Instance.Decode(value) as IList<object>; Assert.AreEqual(1, list[0]); Assert.IsTrue(list[1] is ParseACL); Assert.AreEqual("wiz", list[2]); Assert.IsTrue(list[3] is ParseGeoPoint); Assert.IsTrue(list[4] is IList<object>); var decodedList = list[4] as IList<object>; Assert.IsTrue(decodedList[0] is ParseGeoPoint); } [Test] public void TestDecodeArray() { int[] value = new int[] { 1, 2, 3, 4 }; int[] array = ParseDecoder.Instance.Decode(value) as int[]; Assert.AreEqual(4, array.Length); Assert.AreEqual(1, array[0]); Assert.AreEqual(2, array[1]); } } }
namespace FakeItEasy.Configuration { using System; using System.Collections.Generic; using System.Linq; using FakeItEasy.Core; internal class RuleBuilder : IVoidArgumentValidationConfiguration, IRepeatConfiguration, IAfterCallSpecifiedConfiguration, IAfterCallSpecifiedWithOutAndRefParametersConfiguration, ICallCollectionAndCallMatcherAccessor { private readonly FakeAsserter.Factory asserterFactory; private readonly FakeManager manager; internal RuleBuilder(BuildableCallRule ruleBeingBuilt, FakeManager manager, FakeAsserter.Factory asserterFactory) { this.RuleBeingBuilt = ruleBeingBuilt; this.manager = manager; this.asserterFactory = asserterFactory; } /// <summary> /// Represents a delegate that creates a configuration object from /// a fake object and the rule to build. /// </summary> /// <param name="ruleBeingBuilt">The rule that's being built.</param> /// <param name="fakeObject">The fake object the rule is for.</param> /// <returns>A configuration object.</returns> internal delegate RuleBuilder Factory(BuildableCallRule ruleBeingBuilt, FakeManager fakeObject); public BuildableCallRule RuleBeingBuilt { get; private set; } public IEnumerable<ICompletedFakeObjectCall> Calls { get { return this.manager.RecordedCallsInScope; } } public ICallMatcher Matcher { get { return new RuleMatcher(this); } } public void NumberOfTimes(int numberOfTimesToRepeat) { if (numberOfTimesToRepeat <= 0) { #if SILVERLIGHT throw new ArgumentOutOfRangeException("numberOfTimesToRepeat", "The number of times to repeat is not greater than zero."); #else throw new ArgumentOutOfRangeException("numberOfTimesToRepeat", numberOfTimesToRepeat, "The number of times to repeat is not greater than zero."); #endif } this.RuleBeingBuilt.NumberOfTimesToCall = numberOfTimesToRepeat; } public virtual IAfterCallSpecifiedConfiguration Throws(Func<IFakeObjectCall, Exception> exceptionFactory) { this.RuleBeingBuilt.Applicator = x => { throw exceptionFactory(x); }; return this; } public IVoidConfiguration WhenArgumentsMatch(Func<ArgumentCollection, bool> argumentsPredicate) { Guard.AgainstNull(argumentsPredicate, "argumentsPredicate"); this.RuleBeingBuilt.UsePredicateToValidateArguments(argumentsPredicate); return this; } public virtual IAfterCallSpecifiedConfiguration DoesNothing() { this.RuleBeingBuilt.Applicator = x => { }; return this; } public virtual IVoidConfiguration Invokes(Action<IFakeObjectCall> action) { Guard.AgainstNull(action, "action"); this.RuleBeingBuilt.Actions.Add(action); return this; } public virtual IAfterCallSpecifiedConfiguration CallsBaseMethod() { this.RuleBeingBuilt.Applicator = x => { }; this.RuleBeingBuilt.CallBaseMethod = true; return this; } public virtual IAfterCallSpecifiedConfiguration AssignsOutAndRefParametersLazily(Func<IFakeObjectCall, ICollection<object>> valueProducer) { Guard.AgainstNull(valueProducer, "valueProducer"); this.RuleBeingBuilt.OutAndRefParametersValueProducer = valueProducer; return this; } public void MustHaveHappened(Repeated repeatConstraint) { Guard.AgainstNull(repeatConstraint, "repeatConstraint"); this.manager.RemoveRule(this.RuleBeingBuilt); var asserter = this.asserterFactory.Invoke(this.Calls.Cast<IFakeObjectCall>()); var description = new StringBuilderOutputWriter(); this.RuleBeingBuilt.WriteDescriptionOfValidCall(description); asserter.AssertWasCalled(this.Matcher.Matches, description.Builder.ToString(), repeatConstraint.Matches, repeatConstraint.ToString()); } public class ReturnValueConfiguration<TMember> : IAnyCallConfigurationWithReturnTypeSpecified<TMember>, ICallCollectionAndCallMatcherAccessor { public RuleBuilder ParentConfiguration { get; set; } public ICallMatcher Matcher { get { return this.ParentConfiguration.Matcher; } } public IEnumerable<ICompletedFakeObjectCall> Calls { get { return this.ParentConfiguration.Calls; } } public IAfterCallSpecifiedConfiguration Throws(Func<IFakeObjectCall, Exception> exceptionFactory) { return this.ParentConfiguration.Throws(exceptionFactory); } public IAfterCallSpecifiedWithOutAndRefParametersConfiguration ReturnsLazily(Func<IFakeObjectCall, TMember> valueProducer) { Guard.AgainstNull(valueProducer, "valueProducer"); this.ParentConfiguration.RuleBeingBuilt.Applicator = x => x.SetReturnValue(valueProducer(x)); return this.ParentConfiguration; } public IReturnValueConfiguration<TMember> Invokes(Action<IFakeObjectCall> action) { Guard.AgainstNull(action, "action"); this.ParentConfiguration.RuleBeingBuilt.Actions.Add(action); return this; } public IAfterCallSpecifiedConfiguration CallsBaseMethod() { return this.ParentConfiguration.CallsBaseMethod(); } public IReturnValueConfiguration<TMember> WhenArgumentsMatch(Func<ArgumentCollection, bool> argumentsPredicate) { Guard.AgainstNull(argumentsPredicate, "argumentsPredicate"); this.ParentConfiguration.RuleBeingBuilt.UsePredicateToValidateArguments(argumentsPredicate); return this; } public void MustHaveHappened(Repeated repeatConstraint) { this.ParentConfiguration.MustHaveHappened(repeatConstraint); } public IAnyCallConfigurationWithReturnTypeSpecified<TMember> Where(Func<IFakeObjectCall, bool> predicate, Action<IOutputWriter> descriptionWriter) { this.ParentConfiguration.RuleBeingBuilt.ApplyWherePredicate(predicate, descriptionWriter); return this; } } private class RuleMatcher : ICallMatcher { private readonly RuleBuilder builder; public RuleMatcher(RuleBuilder builder) { this.builder = builder; } public bool Matches(IFakeObjectCall call) { Guard.AgainstNull(call, "call"); return this.builder.RuleBeingBuilt.IsApplicableTo(call) && ReferenceEquals(this.builder.manager.Object, call.FakedObject); } public override string ToString() { return this.builder.RuleBeingBuilt.ToString(); } } } }
//------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. //------------------------------------------------------------ namespace System.Activities.Core.Presentation { using System; using System.Activities; using System.Activities.Presentation; using System.Activities.Presentation.Metadata; using System.Activities.Presentation.Model; using System.Activities.Statements; using System.ComponentModel; using System.Diagnostics; using System.Windows; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Input; using System.Windows.Media; using System.Windows.Threading; using System.Runtime; using System.Collections.Generic; using System.Activities.Presentation.View; using System.Diagnostics.CodeAnalysis; using System.Collections.ObjectModel; using System.Globalization; using System.Activities.Presentation.View.OutlineView; partial class SwitchDesigner { const string ExpandViewStateKey = "IsExpanded"; public static readonly DependencyProperty CaseTypeProperty = DependencyProperty.Register( "CaseType", typeof(Type), typeof(SwitchDesigner), new UIPropertyMetadata(null)); public static readonly DependencyProperty SelectedCaseProperty = DependencyProperty.Register( "SelectedCase", typeof(ModelItem), typeof(SwitchDesigner), new UIPropertyMetadata(null)); public static readonly DependencyProperty ShowDefaultCaseExpandedProperty = DependencyProperty.Register( "ShowDefaultCaseExpanded", typeof(bool), typeof(SwitchDesigner), new UIPropertyMetadata(false)); public static readonly DependencyProperty NewKeyProperty = DependencyProperty.Register( "NewKey", typeof(object), typeof(SwitchDesigner), new UIPropertyMetadata(null)); static TypeResolvingOptions argumentTypeResolvingOptions; TextBlock addNewCaseLabel; CaseKeyBox caseKeyBox; public bool ShowDefaultCaseExpanded { get { return (bool)this.GetValue(ShowDefaultCaseExpandedProperty); } set { this.SetValue(ShowDefaultCaseExpandedProperty, value); } } ModelItem SelectedCase { get { return (ModelItem)this.GetValue(SelectedCaseProperty); } set { this.SetValue(SelectedCaseProperty, value); } } Type CaseType { get { return (Type)GetValue(CaseTypeProperty); } set { SetValue(CaseTypeProperty, value); } } object NewKey { get { return GetValue(NewKeyProperty); } set { SetValue(NewKeyProperty, value); } } public CaseKeyValidationCallbackDelegate CheckDuplicateCaseKey { get { return (object obj, out string reason) => { reason = string.Empty; if (ContainsCaseKey(obj)) { string key = obj != null ? obj.ToString() : "(null)"; reason = string.Format(CultureInfo.CurrentCulture, SR.DuplicateCaseKey, key); return false; } return true; }; } } static List<Type> defaultTypes; static List<Type> DefaultTypes { get { if (defaultTypes == null) { defaultTypes = new List<Type> { typeof(bool), typeof(int), typeof(string), }; } return defaultTypes; } } static TypeResolvingOptions ArgumentTypeResolvingOptions { get { if (argumentTypeResolvingOptions == null) { argumentTypeResolvingOptions = new TypeResolvingOptions(DefaultTypes) { Filter = null, }; } return argumentTypeResolvingOptions; } } [SuppressMessage("Microsoft.Usage", "CA2214:DoNotCallOverridableMethodsInConstructors")] public SwitchDesigner() { InitializeComponent(); this.Loaded += new RoutedEventHandler(OnLoaded); this.Unloaded += new RoutedEventHandler(OnUnloaded); this.Resources.Add("ModelItemKeyValuePairType", typeof(ModelItemKeyValuePair<,>)); } protected override void OnModelItemChanged(object newItem) { base.OnModelItemChanged(newItem); Type modelItemType = this.ModelItem.ItemType; Type[] types = modelItemType.GetGenericArguments(); Fx.Assert(types.Length == 1, "Switch should have exactly one generic argument"); this.CaseType = types[0]; } void OnModelItemPropertyChanged(object sender, PropertyChangedEventArgs e) { if (e.PropertyName == "Default" && !this.ShowDefaultCaseExpanded) { ExpandDefaultView(); this.UpdateSelection(null); } } void OnLoaded(object sender, RoutedEventArgs e) { this.Context.Items.Subscribe<Selection>(OnSelectionChanged); this.ModelItem.PropertyChanged += OnModelItemPropertyChanged; ViewStateService viewStateService = this.Context.Services.GetService<ViewStateService>(); foreach (ModelItem modelItem in this.ModelItem.Properties["Cases"].Dictionary.Properties["ItemsCollection"].Collection) { bool? isExpanded = (bool?)viewStateService.RetrieveViewState(modelItem, ExpandViewStateKey); if (isExpanded != null && isExpanded.Value) { this.SelectedCase = modelItem; CollapseDefaultView(); break; } } } void OnUnloaded(object sender, RoutedEventArgs e) { this.ModelItem.PropertyChanged -= OnModelItemPropertyChanged; this.Context.Items.Unsubscribe<Selection>(OnSelectionChanged); } void OnSelectionChanged(Selection selection) { if (this.IsDescendantOfDefault(selection.PrimarySelection)) { this.ExpandDefaultView(); } else { foreach (ModelItem caseObject in this.ModelItem.Properties["Cases"].Dictionary.Properties["ItemsCollection"].Collection) { if (IsDescendantOfCase(caseObject, selection.PrimarySelection)) { UpdateSelection(caseObject); break; } } } } static bool IsAncestorOf(ModelItem ancester, ModelItem descendant) { if (ancester == null) { return false; } ModelItem itr = descendant; while (itr != null) { if (itr == ancester) { return true; } itr = itr.Parent; } return false; } bool IsDescendantOfDefault(ModelItem descendant) { if (descendant == null) { return false; } else { ModelItem defaultValue = this.ModelItem.Properties["Default"].Value; return IsAncestorOf(defaultValue, descendant); } } internal static bool IsDescendantOfCase(ModelItem caseObject, ModelItem descendant) { Fx.Assert(caseObject != null, "Case object mustn't be null."); if (caseObject == descendant) { return true; } else { ModelItem caseValue = caseObject.Properties["Value"].Value; return IsAncestorOf(caseValue, descendant); } } void UpdateSelection(ModelItem newSelectedCase) { ModelItem oldSelectedCase = this.SelectedCase; this.SelectedCase = newSelectedCase; this.Dispatcher.BeginInvoke(DispatcherPriority.Normal, (Action)(() => { if (oldSelectedCase != null) { CaseDesigner oldSelectedCaseDesigner = (CaseDesigner)oldSelectedCase.View; if (oldSelectedCaseDesigner != null) { oldSelectedCaseDesigner.ExpandState = false; oldSelectedCaseDesigner.PinState = false; } } if (newSelectedCase != null) { CollapseDefaultView(); CaseDesigner newSelectedCaseDesigner = (CaseDesigner)newSelectedCase.View; if (newSelectedCaseDesigner != null) { newSelectedCaseDesigner.ExpandState = true; newSelectedCaseDesigner.PinState = true; } } })); } internal static void RegisterMetadata(AttributeTableBuilder builder) { Type type = typeof(Switch<>); builder.AddCustomAttributes(type, new DesignerAttribute(typeof(SwitchDesigner))); builder.AddCustomAttributes(type, type.GetProperty("Default"), BrowsableAttribute.No); builder.AddCustomAttributes(type, new TypeResolvingOptionsAttribute(ArgumentTypeResolvingOptions)); // Hide Cases node in the treeview and display its child nodes directly. builder.AddCustomAttributes(type, type.GetProperty("Cases"), new ShowPropertyInOutlineViewAttribute() { CurrentPropertyVisible = false, ChildNodePrefix = "Case : " }); } void OnDefaultCaseViewMouseDown(object sender, MouseButtonEventArgs e) { if (e.LeftButton == MouseButtonState.Pressed && e.ClickCount == 2) { SwitchTryCatchDesignerHelper.MakeRootDesigner(this); e.Handled = true; } else if (e.LeftButton == MouseButtonState.Pressed) { ExpandDefaultView(); Keyboard.Focus((IInputElement)sender); } else if (e.RightButton == MouseButtonState.Pressed) { if (this.IsDefaultCaseViewExpanded()) { Keyboard.Focus((IInputElement)sender); } e.Handled = true; } } void OnDefaultCaseViewMouseUp(object sender, MouseButtonEventArgs e) { // avoid context menu upon right-click when it's collapsed if (!IsDefaultCaseViewExpanded() && e.RightButton == MouseButtonState.Released) { e.Handled = true; } } bool IsDefaultCaseViewExpanded() { DesignerView designerView = this.Context.Services.GetService<DesignerView>(); return this.ShowDefaultCaseExpanded || designerView.ShouldExpandAll; } void OnDefaultCaseViewKeyDown(object sender, KeyEventArgs e) { if (sender == e.OriginalSource && (e.Key == Key.Space || e.Key == Key.Enter)) { ExpandDefaultView(); e.Handled = true; } } void ExpandDefaultView() { UpdateSelection(null); this.ShowDefaultCaseExpanded = true; } void CollapseDefaultView() { this.ShowDefaultCaseExpanded = false; } void OnAddNewCaseLabelLoaded(object sender, RoutedEventArgs e) { this.addNewCaseLabel = (TextBlock)sender; this.addNewCaseLabel.Visibility = Visibility.Collapsed; } void OnAddNewCaseLabelUnloaded(object sender, RoutedEventArgs e) { this.addNewCaseLabel = null; } void OnNewKeyTextBoxGotFocus(object sender, RoutedEventArgs e) { this.addNewCaseLabel.Visibility = Visibility.Visible; } void OnNewKeyCommitted(object sender, RoutedEventArgs e) { this.addNewCaseLabel.Visibility = Visibility.Collapsed; try { AddNewCase(this.NewKey); } catch (ArgumentException ex) { ErrorReporting.ShowErrorMessage(ex.Message); } } void OnNewKeyEditCancelled(object sender, RoutedEventArgs e) { this.addNewCaseLabel.Visibility = Visibility.Collapsed; } void OnCaseKeyBoxLoaded(object sender, RoutedEventArgs e) { this.caseKeyBox = (CaseKeyBox)sender; } void AddNewCase(object newKey) { Type caseType = typeof(ModelItemKeyValuePair<,>).MakeGenericType(new Type[] { this.CaseType, typeof(Activity) }); object mutableKVPair = Activator.CreateInstance(caseType, new object[] { newKey, null }); ModelProperty casesProp = this.ModelItem.Properties["Cases"]; Fx.Assert(casesProp != null, "Property Cases is not available"); ModelItem cases = casesProp.Value; Fx.Assert(cases != null, "Cannot get ModelItem from property Cases"); ModelProperty itemsCollectionProp = cases.Properties["ItemsCollection"]; Fx.Assert(itemsCollectionProp != null, "Cannot get property ItemsCollection from Cases"); ModelItemCollection itemsCollection = itemsCollectionProp.Collection; Fx.Assert(itemsCollection != null, "Cannot get ModelItemCollection from property ItemsCollection"); itemsCollection.Add(mutableKVPair); this.caseKeyBox.ResetText(); } bool ContainsCaseKey(object key) { Type caseType = typeof(ModelItemKeyValuePair<,>).MakeGenericType(new Type[] { this.CaseType, typeof(Activity) }); ModelProperty casesProp = this.ModelItem.Properties["Cases"]; ModelItem cases = casesProp.Value; ModelProperty itemsCollectionProp = cases.Properties["ItemsCollection"]; ModelItemCollection itemsCollection = itemsCollectionProp.Collection; foreach (ModelItem item in itemsCollection) { object itemKey = caseType.GetProperty("Key").GetGetMethod().Invoke(item.GetCurrentValue(), null); if ((itemKey != null && itemKey.Equals(key)) || (itemKey == key)) { return true; } } return false; } } }
/* * Copyright 1999-2012 Alibaba Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Tup.Cobar4Net.Parser.Ast.Expression; using Tup.Cobar4Net.Parser.Ast.Expression.Comparison; using Tup.Cobar4Net.Parser.Ast.Expression.Logical; using Tup.Cobar4Net.Parser.Ast.Expression.Misc; using Tup.Cobar4Net.Parser.Ast.Expression.Primary; using Tup.Cobar4Net.Parser.Ast.Expression.Primary.Function; using Tup.Cobar4Net.Parser.Ast.Expression.Primary.Function.Cast; using Tup.Cobar4Net.Parser.Ast.Expression.Primary.Function.Datetime; using Tup.Cobar4Net.Parser.Ast.Expression.Primary.Function.Groupby; using Tup.Cobar4Net.Parser.Ast.Expression.Primary.Function.String; using Tup.Cobar4Net.Parser.Ast.Expression.Primary.Literal; using Tup.Cobar4Net.Parser.Ast.Expression.String; using Tup.Cobar4Net.Parser.Ast.Expression.Type; using Tup.Cobar4Net.Parser.Ast.Fragment; using Tup.Cobar4Net.Parser.Ast.Fragment.Ddl; using Tup.Cobar4Net.Parser.Ast.Fragment.Tableref; using Tup.Cobar4Net.Parser.Ast.Stmt.Dal; using Tup.Cobar4Net.Parser.Ast.Stmt.Ddl; using Tup.Cobar4Net.Parser.Ast.Stmt.Dml; using Tup.Cobar4Net.Parser.Ast.Stmt.Extension; using Tup.Cobar4Net.Parser.Ast.Stmt.Mts; namespace Tup.Cobar4Net.Parser.Visitor { /// <author> /// <a href="mailto:shuo.qius@alibaba-inc.com">QIU Shuo</a> /// </author> public interface ISqlAstVisitor { void Visit(BetweenAndExpression node); void Visit(InExpressionList node); void Visit(LikeExpression node); void Visit(CollateExpression node); void Visit(UserExpression node); void Visit(UnaryOperatorExpression node); void Visit(BinaryOperatorExpression node); void Visit(PolyadicOperatorExpression node); void Visit(LogicalAndExpression node); void Visit(LogicalOrExpression node); void Visit(ComparisionIsExpression node); void Visit(ComparisionEqualsExpression node); void Visit(ComparisionNullSafeEqualsExpression node); void Visit(InExpression node); // ------------------------------------------------------- void Visit(FunctionExpression node); void Visit(Char node); void Visit(Convert node); void Visit(Trim node); void Visit(Cast node); void Visit(Avg node); void Visit(Max node); void Visit(Min node); void Visit(Sum node); void Visit(Count node); void Visit(GroupConcat node); void Visit(Extract node); void Visit(Timestampdiff node); void Visit(Timestampadd node); void Visit(GetFormat node); // ------------------------------------------------------- void Visit(IntervalPrimary node); void Visit(LiteralBitField node); void Visit(LiteralBoolean node); void Visit(LiteralHexadecimal node); void Visit(LiteralNull node); void Visit(LiteralNumber node); void Visit(LiteralString node); void Visit(CaseWhenOperatorExpression node); void Visit(DefaultValue node); void Visit(ExistsPrimary node); void Visit(PlaceHolder node); void Visit(Identifier node); void Visit(MatchExpression node); void Visit(ParamMarker node); void Visit(RowExpression node); void Visit(SysVarPrimary node); void Visit(UsrDefVarPrimary node); // ------------------------------------------------------- void Visit(IndexHint node); void Visit(InnerJoin node); void Visit(NaturalJoin node); void Visit(OuterJoin node); void Visit(StraightJoin node); void Visit(SubqueryFactor node); void Visit(TableReferences node); void Visit(TableRefFactor node); void Visit(Dual dual); void Visit(GroupBy node); void Visit(Limit node); void Visit(OrderBy node); void Visit(ColumnDefinition node); void Visit(IndexOption node); void Visit(IndexColumnName node); void Visit(TableOptions node); void Visit(DataType node); // ------------------------------------------------------- void Visit(ShowAuthors node); void Visit(ShowBinaryLog node); void Visit(ShowBinLogEvent node); void Visit(ShowCharaterSet node); void Visit(ShowCollation node); void Visit(ShowColumns node); void Visit(ShowContributors node); void Visit(ShowCreate node); void Visit(ShowDatabases node); void Visit(ShowEngine node); void Visit(ShowEngines node); void Visit(ShowErrors node); void Visit(ShowEvents node); void Visit(ShowFunctionCode node); void Visit(ShowFunctionStatus node); void Visit(ShowGrants node); void Visit(ShowIndex node); void Visit(ShowMasterStatus node); void Visit(ShowOpenTables node); void Visit(ShowPlugins node); void Visit(ShowPrivileges node); void Visit(ShowProcedureCode node); void Visit(ShowProcedureStatus node); void Visit(ShowProcesslist node); void Visit(ShowProfile node); void Visit(ShowProfiles node); void Visit(ShowSlaveHosts node); void Visit(ShowSlaveStatus node); void Visit(ShowStatus node); void Visit(ShowTables node); void Visit(ShowTableStatus node); void Visit(ShowTriggers node); void Visit(ShowVariables node); void Visit(ShowWarnings node); void Visit(DalSetStatement node); void Visit(DalSetNamesStatement node); void Visit(DalSetCharacterSetStatement node); // ------------------------------------------------------- void Visit(DmlCallStatement node); void Visit(DmlDeleteStatement node); void Visit(DmlInsertStatement node); void Visit(DmlReplaceStatement node); void Visit(DmlSelectStatement node); void Visit(DmlSelectUnionStatement node); void Visit(DmlUpdateStatement node); void Visit(MTSSetTransactionStatement node); void Visit(MTSSavepointStatement node); void Visit(MTSReleaseStatement node); void Visit(MTSRollbackStatement node); void Visit(DdlTruncateStatement node); void Visit(DdlAlterTableStatement node); void Visit(DdlCreateIndexStatement node); void Visit(DdlCreateTableStatement node); void Visit(DdlRenameTableStatement node); void Visit(DdlDropIndexStatement node); void Visit(DdlDropTableStatement node); void Visit(DdlAlterTableStatement.AlterSpecification node); void Visit(DescTableStatement node); void Visit(ExtDdlCreatePolicy node); void Visit(ExtDdlDropPolicy node); } }
// Release.cs // // Copyright (c) 2008 Scott Peterson <lunchtimemama@gmail.com> // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Text; using System.Xml; namespace MusicBrainz { public sealed class Release : MusicBrainzItem { #region Private const string EXTENSION = "release"; ReleaseType? type; ReleaseStatus? status; string language; string script; string asin; ReadOnlyCollection<Disc> discs; ReadOnlyCollection<Event> events; ReadOnlyCollection<Track> tracks; int? track_number; #endregion #region Constructors Release (string id) : base (id) { } internal Release (XmlReader reader) : base (reader, null, false) { } #endregion #region Protected internal override string UrlExtension { get { return EXTENSION; } } static readonly string [] track_params = new string [] { "tracks", "track-level-rels", "artist" }; internal override void CreateIncCore (StringBuilder builder) { AppendIncParameters (builder, "release-events", "labels"); if (discs == null) AppendIncParameters (builder, "discs"); if (tracks == null) { AppendIncParameters (builder, track_params); AllRelsLoaded = false; } base.CreateIncCore (builder); } internal override void LoadMissingDataCore () { Release release = new Release (Id); type = release.GetReleaseType (); status = release.GetReleaseStatus (); language = release.GetLanguage (); script = release.GetScript (); asin = release.GetAsin (); events = release.GetEvents (); if (discs == null) discs = release.GetDiscs (); if (tracks == null) tracks = release.GetTracks (); base.LoadMissingDataCore (release); } internal override void ProcessAttributes (XmlReader reader) { // How sure am I about getting the type and status in the "Type Status" format? // MB really ought to specify these two things seperatly. string type_string = reader ["type"]; if (type_string != null) { foreach (string token in type_string.Split (' ')) { if (type == null) { type = Utils.StringToEnumOrNull<ReleaseType> (token); if (type != null) continue; } this.status = Utils.StringToEnumOrNull<ReleaseStatus> (token); } } } internal override void ProcessXmlCore (XmlReader reader) { switch (reader.Name) { case "text-representation": language = reader["language"]; script = reader["script"]; break; case "asin": asin = reader.ReadString (); break; case "disc-list": if (reader.ReadToDescendant ("disc")) { List<Disc> discs = new List<Disc> (); do discs.Add (new Disc (reader.ReadSubtree ())); while (reader.ReadToNextSibling ("disc")); this.discs = discs.AsReadOnly (); } break; case "release-event-list": if (!AllDataLoaded) { reader.Skip (); // FIXME this is a workaround for Mono bug 334752 return; } if (reader.ReadToDescendant ("event")) { List<Event> events = new List<Event> (); do events.Add (new Event (reader.ReadSubtree ())); while (reader.ReadToNextSibling ("event")); this.events = events.AsReadOnly (); } break; case "track-list": string offset = reader["offset"]; if (offset != null) track_number = int.Parse (offset) + 1; if (reader.ReadToDescendant ("track")) { List<Track> tracks = new List<Track> (); do tracks.Add (new Track (reader.ReadSubtree (), GetArtist (), AllDataLoaded)); while (reader.ReadToNextSibling ("track")); this.tracks = tracks.AsReadOnly (); } break; default: base.ProcessXmlCore (reader); break; } } #endregion #region Public [Queryable ("reid")] public override string Id { get { return base.Id; } } [Queryable ("release")] public override string GetTitle () { return base.GetTitle (); } [Queryable ("type")] public ReleaseType GetReleaseType () { return GetPropertyOrDefault (ref type, ReleaseType.None); } [Queryable ("status")] public ReleaseStatus GetReleaseStatus () { return GetPropertyOrDefault (ref status, ReleaseStatus.None); } public string GetLanguage () { return GetPropertyOrNull (ref language); } [Queryable ("script")] public string GetScript () { return GetPropertyOrNull (ref script); } [Queryable ("asin")] public string GetAsin () { return GetPropertyOrNull (ref asin); } [QueryableMember("Count", "discids")] public ReadOnlyCollection<Disc> GetDiscs () { return GetPropertyOrNew (ref discs); } public ReadOnlyCollection<Event> GetEvents () { return GetPropertyOrNew (ref events); } [QueryableMember ("Count", "tracks")] public ReadOnlyCollection<Track> GetTracks () { return GetPropertyOrNew (ref tracks); } internal int TrackNumber { get { return track_number ?? -1; } } #endregion #region Static public static Release Get (string id) { if (id == null) throw new ArgumentNullException ("id"); return new Release (id); } public static Query<Release> Query (string title) { if (title == null) throw new ArgumentNullException ("title"); ReleaseQueryParameters parameters = new ReleaseQueryParameters (); parameters.Title = title; return Query (parameters); } public static Query<Release> Query (string title, string artist) { if (title == null) throw new ArgumentNullException ("title"); if (artist == null) throw new ArgumentNullException ("artist"); ReleaseQueryParameters parameters = new ReleaseQueryParameters (); parameters.Title = title; parameters.Artist = artist; return Query (parameters); } public static Query<Release> Query (Disc disc) { if (disc == null) throw new ArgumentNullException ("disc"); ReleaseQueryParameters parameters = new ReleaseQueryParameters (); parameters.DiscId = disc.Id; return Query (parameters); } public static Query<Release> Query (ReleaseQueryParameters parameters) { if (parameters == null) throw new ArgumentNullException ("parameters"); return new Query<Release> (EXTENSION, parameters.ToString ()); } public static Query<Release> QueryFromDevice(string device) { if (device == null) throw new ArgumentNullException ("device"); ReleaseQueryParameters parameters = new ReleaseQueryParameters (); parameters.DiscId = LocalDisc.GetFromDevice (device).Id; return Query (parameters); } public static Query<Release> QueryLucene (string luceneQuery) { if (luceneQuery == null) throw new ArgumentNullException ("luceneQuery"); return new Query<Release> (EXTENSION, CreateLuceneParameter (luceneQuery)); } public static implicit operator string (Release release) { return release.ToString (); } #endregion } #region Ancillary Types public enum ReleaseType { None, Album, Single, EP, Compilation, Soundtrack, Spokenword, Interview, Audiobook, Live, Remix, Other } public enum ReleaseStatus { None, Official, Promotion, Bootleg, PsudoRelease } public enum ReleaseFormat { None, Cartridge, Cassette, CD, DAT, Digital, DualDisc, DVD, LaserDisc, MiniDisc, Other, ReelToReel, SACD, Vinyl } public sealed class ReleaseQueryParameters : ItemQueryParameters { string disc_id; public string DiscId { get { return disc_id; } set { disc_id = value; } } string date; public string Date { get { return date; } set { date = value; } } string asin; public string Asin { get { return asin; } set { asin = value; } } string language; public string Language { get { return language; } set { language = value; } } string script; public string Script { get { return script; } set { script = value; } } internal override void ToStringCore (StringBuilder builder) { if (disc_id != null) { builder.Append ("&discid="); builder.Append (disc_id); } if (date != null) { builder.Append ("&date="); Utils.PercentEncode (builder, date); } if (asin != null) { builder.Append ("&asin="); builder.Append (asin); } if (language != null) { builder.Append ("&lang="); builder.Append (language); } if (script != null) { builder.Append ("&script="); builder.Append (script); } } } #endregion }
/* * Copyright (c) 2006-2008, openmetaverse.org * All rights reserved. * * - Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Neither the name of the openmetaverse.org nor the names * of its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections.Generic; using OpenMetaverse.Packets; namespace OpenMetaverse { /// <summary> /// Class for controlling various system settings. /// </summary> /// <remarks>Some values are readonly because they affect things that /// happen when the GridClient object is initialized, so changing them at /// runtime won't do any good. Non-readonly values may affect things that /// happen at login or dynamically</remarks> public class Settings { #region Login/Networking Settings /// <summary>Main grid login server</summary> public const string AGNI_LOGIN_SERVER = "https://login.agni.lindenlab.com/cgi-bin/login.cgi"; /// <summary>Beta grid login server</summary> public const string ADITI_LOGIN_SERVER = "https://login.aditi.lindenlab.com/cgi-bin/login.cgi"; /// <summary>The relative directory where external resources are kept</summary> public static string RESOURCE_DIR = "openmetaverse_data"; /// <summary>Login server to connect to</summary> public string LOGIN_SERVER = AGNI_LOGIN_SERVER; /// <summary>IP Address the client will bind to</summary> public static System.Net.IPAddress BIND_ADDR = System.Net.IPAddress.Any; /// <summary>Use XML-RPC Login or LLSD Login, default is XML-RPC Login</summary> public bool USE_LLSD_LOGIN = false; #endregion #region Inventory /// <summary> /// InventoryManager requests inventory information on login, /// GridClient initializes an Inventory store for main inventory. /// </summary> public const bool ENABLE_INVENTORY_STORE = true; /// <summary> /// InventoryManager requests library information on login, /// GridClient initializes an Inventory store for the library. /// </summary> public const bool ENABLE_LIBRARY_STORE = true; #endregion #region Timeouts and Intervals /// <summary>Number of milliseconds before an asset transfer will time /// out</summary> public int TRANSFER_TIMEOUT = 90 * 1000; /// <summary>Number of milliseconds before a teleport attempt will time /// out</summary> public int TELEPORT_TIMEOUT = 40 * 1000; /// <summary>Number of milliseconds before NetworkManager.Logout() will /// time out</summary> public int LOGOUT_TIMEOUT = 5 * 1000; /// <summary>Number of milliseconds before a CAPS call will time out</summary> /// <remarks>Setting this too low will cause web requests time out and /// possibly retry repeatedly</remarks> public int CAPS_TIMEOUT = 60 * 1000; /// <summary>Number of milliseconds for xml-rpc to timeout</summary> public int LOGIN_TIMEOUT = 60 * 1000; /// <summary>Milliseconds before a packet is assumed lost and resent</summary> public int RESEND_TIMEOUT = 4000; /// <summary>Milliseconds without receiving a packet before the /// connection to a simulator is assumed lost</summary> public int SIMULATOR_TIMEOUT = 30 * 1000; /// <summary>Milliseconds to wait for a simulator info request through /// the grid interface</summary> public int MAP_REQUEST_TIMEOUT = 5 * 1000; /// <summary>Number of milliseconds between sending pings to each sim</summary> public const int PING_INTERVAL = 2200; /// <summary>Number of milliseconds between sending camera updates</summary> public const int DEFAULT_AGENT_UPDATE_INTERVAL = 500; /// <summary>Number of milliseconds between updating the current /// positions of moving, non-accelerating and non-colliding objects</summary> public const int INTERPOLATION_INTERVAL = 250; /// <summary>Millisecond interval between ticks, where all ACKs are /// sent out and the age of unACKed packets is checked</summary> public const int NETWORK_TICK_INTERVAL = 500; #endregion #region Sizes /// <summary>The initial size of the packet inbox, where packets are /// stored before processing</summary> public const int PACKET_INBOX_SIZE = 100; /// <summary>Maximum size of packet that we want to send over the wire</summary> public const int MAX_PACKET_SIZE = 1200; /// <summary>The maximum value of a packet sequence number before it /// rolls over back to one</summary> public const int MAX_SEQUENCE = 0xFFFFFF; /// <summary>The maximum size of the sequence number archive, used to /// check for resent and/or duplicate packets</summary> public const int PACKET_ARCHIVE_SIZE = 200; /// <summary>Maximum number of queued ACKs to be sent before SendAcks() /// is forced</summary> public int MAX_PENDING_ACKS = 10; /// <summary>Network stats queue length (seconds)</summary> public int STATS_QUEUE_SIZE = 5; #endregion #region Configuration options (mostly booleans) /// <summary>Enable to process packets synchronously, where all of the /// callbacks for each packet must return before the next packet is /// processed</summary> /// <remarks>This is an experimental feature and is not completely /// reliable yet. Ideally it would reduce context switches and thread /// overhead, but several calls currently block for a long time and /// would need to be rewritten as asynchronous code before this is /// feasible</remarks> public bool SYNC_PACKETCALLBACKS = false; /// <summary>Enable/disable storing terrain heightmaps in the /// TerrainManager</summary> public bool STORE_LAND_PATCHES = false; /// <summary>Enable/disable sending periodic camera updates</summary> public bool SEND_AGENT_UPDATES = true; /// <summary>Enable/disable automatically setting agent appearance at /// login and after sim crossing</summary> public bool SEND_AGENT_APPEARANCE = true; /// <summary>Enable/disable automatically setting the bandwidth throttle /// after connecting to each simulator</summary> /// <remarks>The default throttle uses the equivalent of the maximum /// bandwidth setting in the official client. If you do not set a /// throttle your connection will by default be throttled well below /// the minimum values and you may experience connection problems</remarks> public bool SEND_AGENT_THROTTLE = true; /// <summary>Enable/disable the sending of pings to monitor lag and /// packet loss</summary> public bool SEND_PINGS = true; /// <summary>Should we connect to multiple sims? This will allow /// viewing in to neighboring simulators and sim crossings /// (Experimental)</summary> public bool MULTIPLE_SIMS = true; /// <summary>If true, all object update packets will be decoded in to /// native objects. If false, only updates for our own agent will be /// decoded. Registering an event handler will force objects for that /// type to always be decoded. If this is disabled the object tracking /// will have missing or partial prim and avatar information</summary> public bool ALWAYS_DECODE_OBJECTS = true; /// <summary>If true, when a cached object check is received from the /// server the full object info will automatically be requested</summary> public bool ALWAYS_REQUEST_OBJECTS = true; /// <summary>Whether to establish connections to HTTP capabilities /// servers for simulators</summary> public bool ENABLE_CAPS = true; /// <summary>Whether to decode sim stats</summary> public bool ENABLE_SIMSTATS = true; /// <summary>The capabilities servers are currently designed to /// periodically return a 502 error which signals for the client to /// re-establish a connection. Set this to true to log those 502 errors</summary> public bool LOG_ALL_CAPS_ERRORS = false; /// <summary>If true, any reference received for a folder or item /// the library is not aware of will automatically be fetched</summary> public bool FETCH_MISSING_INVENTORY = true; /// <summary>If true, and <code>SEND_AGENT_UPDATES</code> is true, /// AgentUpdate packets will continuously be sent out to give the bot /// smoother movement and autopiloting</summary> public bool DISABLE_AGENT_UPDATE_DUPLICATE_CHECK = true; /// <summary>If true, currently visible avatars will be stored /// in dictionaries inside <code>Simulator.ObjectAvatars</code>. /// If false, a new Avatar or Primitive object will be created /// each time an object update packet is received</summary> public bool AVATAR_TRACKING = true; /// <summary>If true, currently visible avatars will be stored /// in dictionaries inside <code>Simulator.ObjectPrimitives</code>. /// If false, a new Avatar or Primitive object will be created /// each time an object update packet is received</summary> public bool OBJECT_TRACKING = true; /// <summary>If true, position and velocity will periodically be /// interpolated (extrapolated, technically) for objects and /// avatars that are being tracked by the library. This is /// necessary to increase the accuracy of speed and position /// estimates for simulated objects</summary> public bool USE_INTERPOLATION_TIMER = true; /// <summary> /// If true, utilization statistics will be tracked. There is a minor penalty /// in CPU time for enabling this option. /// </summary> public bool TRACK_UTILIZATION = false; #endregion #region Parcel Tracking /// <summary>If true, parcel details will be stored in the /// <code>Simulator.Parcels</code> dictionary as they are received</summary> public bool PARCEL_TRACKING = true; /// <summary> /// If true, an incoming parcel properties reply will automatically send /// a request for the parcel access list /// </summary> public bool ALWAYS_REQUEST_PARCEL_ACL = true; /// <summary> /// if true, an incoming parcel properties reply will automatically send /// a request for the traffic count. /// </summary> public bool ALWAYS_REQUEST_PARCEL_DWELL = true; #endregion #region Asset Cache /// <summary> /// If true, images, and other assets downloaded from the server /// will be cached in a local directory /// </summary> public bool USE_ASSET_CACHE = true; /// <summary>Path to store cached texture data</summary> public string ASSET_CACHE_DIR = RESOURCE_DIR + "/cache"; /// <summary>Maximum size cached files are allowed to take on disk (bytes)</summary> public long ASSET_CACHE_MAX_SIZE = 1024 * 1024 * 1024; // 1GB #endregion #region Misc /// <summary>Default color used for viewer particle effects</summary> public Color4 DEFAULT_EFFECT_COLOR = new Color4(255, 0, 0, 255); /// <summary>Cost of uploading an asset</summary> /// <remarks>Read-only since this value is dynamically fetched at login</remarks> public int UPLOAD_COST { get { return priceUpload; } } /// <summary>Maximum number of times to resend a failed packet</summary> public int MAX_RESEND_COUNT = 3; /// <summary>Throttle outgoing packet rate</summary> public bool THROTTLE_OUTGOING_PACKETS = true; /// <summary>UUID of a texture used by some viewers to indentify type of client used</summary> public UUID CLIENT_IDENTIFICATION_TAG = UUID.Zero; #endregion #region Texture Pipeline /// <summary>The maximum number of concurrent texture downloads allowed</summary> /// <remarks>Increasing this number will not necessarily increase texture retrieval times due to /// simulator throttles</remarks> public int MAX_CONCURRENT_TEXTURE_DOWNLOADS = 4; /// <summary> /// The Refresh timer inteval is used to set the delay between checks for stalled texture downloads /// </summary> /// <remarks>This is a static variable which applies to all instances</remarks> public static float PIPELINE_REFRESH_INTERVAL = 500.0f; /// <summary> /// Textures taking longer than this value will be flagged as timed out and removed from the pipeline /// </summary> public int PIPELINE_REQUEST_TIMEOUT = 45*1000; #endregion #region Logging Configuration /// <summary> /// Get or set the minimum log level to output to the console by default /// /// If the library is not compiled with DEBUG defined and this level is set to DEBUG /// You will get no output on the console. This behavior can be overriden by creating /// a logger configuration file for log4net /// </summary> public static Helpers.LogLevel LOG_LEVEL = Helpers.LogLevel.Debug; /// <summary>Attach avatar names to log messages</summary> public bool LOG_NAMES = true; /// <summary>Log packet retransmission info</summary> public bool LOG_RESENDS = true; #endregion #region Private Fields private GridClient Client; private int priceUpload = 0; /// <summary>Constructor</summary> /// <param name="client">Reference to a GridClient object</param> public Settings(GridClient client) { Client = client; Client.Network.RegisterCallback(Packets.PacketType.EconomyData, EconomyDataHandler); } #endregion #region Packet Callbacks /// <summary>Process an incoming packet and raise the appropriate events</summary> /// <param name="sender">The sender</param> /// <param name="e">The EventArgs object containing the packet data</param> protected void EconomyDataHandler(object sender, PacketReceivedEventArgs e) { EconomyDataPacket econ = (EconomyDataPacket)e.Packet; priceUpload = econ.Info.PriceUpload; } #endregion } }
/* * Copyright (c) Contributors, OpenCurrency Team * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSim Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * Major changes. * Michael E. Steurer, 2011 * Institute for Information Systems and Computer Media * Graz University of Technology */ using System; using System.Collections.Generic; using System.Reflection; using log4net; using OpenMetaverse; using OpenSim.Framework; using OpenSim.Services.Interfaces; using OpenSim.Region.Framework.Scenes; using OpenSim.Region.Framework.Interfaces; namespace OMEconomy.OMBase { public class SceneHandler { private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); private Dictionary<ulong, Scene> m_scene = new Dictionary<ulong, Scene>(); private static object m_lock = new object(); private Dictionary<UUID, string> m_regionSecrets = new Dictionary<UUID, string> (); public string GetRegionSecret(UUID regionUUID) { return m_regionSecrets [regionUUID]; } public bool SetRegionSecret(UUID regionUUID, string secret) { bool updated_secret = false; if (m_regionSecrets.ContainsKey (regionUUID)) { updated_secret = true; } m_regionSecrets [regionUUID] = secret; return updated_secret; } public List<UUID> GetUniqueRegions() { List<UUID> uniqueRegions = new List<UUID>(); lock (m_scene) { foreach (Scene rs in m_scene.Values) { if (!uniqueRegions.Contains(rs.RegionInfo.originRegionID)) { uniqueRegions.Add(rs.RegionInfo.originRegionID); } } } return uniqueRegions; } public string GetRegionIP(Scene scene) { string regionIP = String.Empty; regionIP = "http://" + scene.RegionInfo.ExternalEndPoint.Address.ToString() + ":" + scene.RegionInfo.HttpPort.ToString() + "/"; regionIP = regionIP.EndsWith("/") ? regionIP : (regionIP + "/"); regionIP = regionIP.StartsWith("http://") ? regionIP : ("http://" + regionIP); return regionIP; } public void RemoveScene(Scene scene) { if (m_scene.ContainsKey(scene.RegionInfo.RegionHandle)) { m_scene.Remove(scene.RegionInfo.RegionHandle); } } public void AddScene(Scene scene) { if (m_scene.ContainsKey(scene.RegionInfo.RegionHandle)) { m_scene[scene.RegionInfo.RegionHandle] = scene; } else { m_scene.Add(scene.RegionInfo.RegionHandle, scene); } } public Scene GetSceneByUUID(UUID regionID) { lock (m_scene) { foreach (Scene rs in m_scene.Values) { if (rs.RegionInfo.originRegionID == regionID) { return rs; } } } return null; } public IClientAPI LocateClientObject(UUID agentID) { lock (m_scene) { foreach (Scene _scene in m_scene.Values) { ScenePresence tPresence = _scene.GetScenePresence(agentID); if (tPresence != null && !tPresence.IsChildAgent && tPresence.ControllingClient != null && tPresence.PresenceType == PresenceType.User) { return tPresence.ControllingClient; } } } return null; } public SceneObjectPart FindPrim(UUID primID) { lock (m_scene) { foreach (Scene s in m_scene.Values) { SceneObjectPart part = s.GetSceneObjectPart(primID); if (part != null) { return part; } } } return null; } public Scene LocateSceneClientIn(UUID agentID) { lock (m_scene) { foreach (Scene _scene in m_scene.Values) { ScenePresence tPresence = _scene.GetScenePresence(agentID); if (tPresence != null && !tPresence.IsChildAgent && tPresence.PresenceType == PresenceType.User) { return _scene; } } } return null; } public Scene GetRandomScene() { lock (m_scene) { foreach (Scene rs in m_scene.Values) return rs; } return null; } public string ResolveAgentName(UUID agentID) { Scene scene = GetRandomScene(); string agentName = String.Empty; IUserManagement userManager = scene.RequestModuleInterface<IUserManagement>(); if (userManager != null) { agentName = userManager.GetUserName(agentID); agentName = agentName == "(hippos)" ? String.Empty : agentName; } return agentName; } public string ResolveGroupName(UUID groupID) { Scene scene = GetRandomScene(); IGroupsModule gm = scene.RequestModuleInterface<IGroupsModule>(); try { string @group = gm.GetGroupRecord(groupID).GroupName; if (@group != null) { m_log.DebugFormat("[OMBASE]: Resolved group {0} to {1}", groupID, @group); return @group; } } catch (Exception) { m_log.ErrorFormat("[OMBASE]: Could not resolve group {0}", groupID); } return String.Empty; } public string GetObjectLocation(SceneObjectPart part) { int x = Convert.ToInt32(part.AbsolutePosition.X); int y = Convert.ToInt32(part.AbsolutePosition.Y); int z = Convert.ToInt32(part.AbsolutePosition.Z); return "<" + x + "/" + y + "/" + z + ">"; } private SceneHandler() { } private static volatile SceneHandler instance = null; public static SceneHandler getInstance() { lock (m_lock) { instance = instance == null ? new SceneHandler () : instance; return instance; } } } }
// Copyright (c) DotSpatial Team. All rights reserved. // Licensed under the MIT license. See License.txt file in the project root for full license information. using System; using System.ComponentModel; using System.IO; namespace DotSpatial.Data { /// <summary> /// RasterBounds /// </summary> public class RasterBounds : IRasterBounds { #region Fields private readonly int _numColumns; private readonly int _numRows; private double[] _affine; private string _worldFile; #endregion #region Constructors /// <summary> /// Initializes a new instance of the <see cref="RasterBounds"/> class. /// </summary> public RasterBounds() { _affine = new double[6]; } /// <summary> /// Initializes a new instance of the <see cref="RasterBounds"/> class. /// This attempts to read the very simple 6 number world file associated with an image. /// </summary> /// <param name="numRows">The number of rows in this raster</param> /// <param name="numColumns">The number of columns in this raster</param> /// <param name="worldFileName">A world file to attempt to read</param> public RasterBounds(int numRows, int numColumns, string worldFileName) { _numRows = numRows; _numColumns = numColumns; _affine = new double[6]; this.OpenWorldFile(worldFileName); } /// <summary> /// Initializes a new instance of the <see cref="RasterBounds"/> class. /// </summary> /// <param name="numRows">The number of rows for this raster</param> /// <param name="numColumns">The number of columns for this raster</param> /// <param name="affineCoefficients">The affine coefficients describing the location of this raster.</param> public RasterBounds(int numRows, int numColumns, double[] affineCoefficients) { _affine = affineCoefficients; _numRows = numRows; _numColumns = numColumns; } /// <summary> /// Initializes a new instance of the <see cref="RasterBounds"/> class that is georeferenced to the specified envelope. /// </summary> /// <param name="numRows">The number of rows</param> /// <param name="numColumns">The number of columns</param> /// <param name="bounds">The bounding envelope</param> public RasterBounds(int numRows, int numColumns, Extent bounds) { _affine = new double[6]; _numRows = numRows; _numColumns = numColumns; Extent = bounds; } #endregion #region Properties /// <summary> /// Gets or sets the double affine coefficients that control the world-file /// positioning of this image. X' and Y' are real world coords. /// X' = [0] + [1] * Column + [2] * Row /// Y' = [3] + [4] * Column + [5] * Row /// </summary> [Category("GeoReference")] [Description("X' = [0] + [1] * Column + [2] * Row, Y' = [3] + [4] * Column + [5] * Row")] public virtual double[] AffineCoefficients { get { return _affine; } set { _affine = value; } } /// <summary> /// Gets or sets the desired height per cell. This will keep the skew the same, but /// will adjust both the column based and row based height coefficients in order /// to match the specified cell height. This can be thought of as the height /// of a bounding box that contains an entire grid cell, no matter if it is skewed. /// </summary> public double CellHeight { get { double[] affine = AffineCoefficients; // whatever sign the coefficients are, they only increase the cell hight return Math.Abs(affine[4]) + Math.Abs(affine[5]); } set { double[] affine = AffineCoefficients; double columnFactor = affine[4] / CellWidth; double rowFactor = affine[5] / CellWidth; affine[4] = Math.Sign(affine[4]) * value * columnFactor; affine[5] = Math.Sign(affine[5]) * value * rowFactor; AffineCoefficients = affine; // use the setter for overriding classes } } /// <summary> /// Gets or sets the desired width per cell. This will keep the skew the same, but /// will adjust both the column based and row based width coefficients in order /// to match the specified cell width. This can be thought of as the width /// of a bounding box that contains an entire grid cell, no matter if it is skewed. /// </summary> public double CellWidth { get { double[] affine = AffineCoefficients; // whatever sign the coefficients are, they only increase the cell width return Math.Abs(affine[1]) + Math.Abs(affine[2]); } set { double[] affine = AffineCoefficients; double columnFactor = affine[1] / CellWidth; double rowFactor = affine[2] / CellWidth; affine[1] = Math.Sign(affine[1]) * value * columnFactor; affine[2] = Math.Sign(affine[2]) * value * rowFactor; AffineCoefficients = affine; // use the setter for overriding classes } } /// <summary> /// Gets or sets the rectangular bounding box for this raster. /// </summary> public Extent Extent { get { double[] affine = AffineCoefficients; if (affine[1] == 0 || affine[5] == 0) return null; return new Extent(this.Left(), this.Bottom(), this.Right(), this.Top()); } set { // Preserve the skew, but translate and scale to fit the envelope. if (value != null) { X = value.X; Y = value.Y; Width = value.Width; Height = value.Height; } } } /// <summary> /// Gets or sets the height of the entire bounds. This is derived by considering both the /// column and row based contributions to the overall height. Changing this will keep /// the skew ratio the same, but adjust both portions so that the overall height /// will match the specified height. /// </summary> public double Height { get { return (Math.Abs(_affine[4]) * NumColumns) + (Math.Abs(_affine[5]) * NumRows); } set { if (Height == 0 && _numRows > 0) { _affine[5] = -(value / _numRows); _affine[4] = 0; return; } double columnFactor = NumColumns * Math.Abs(_affine[4]) / Height; double rowFactor = NumRows * Math.Abs(_affine[5]) / Height; double newColumnHeight = value * columnFactor; double newRowHeight = value * rowFactor; _affine[4] = Math.Sign(_affine[4]) * newColumnHeight / NumColumns; _affine[5] = Math.Sign(_affine[5]) * newRowHeight / NumRows; } } /// <summary> /// Gets the number of columns in the raster. /// </summary> [Category("General")] [Description("Gets the number of columns in the raster.")] public virtual int NumColumns => _numColumns; /// <summary> /// Gets the number of rows in the raster. /// </summary> [Category("General")] [Description("Gets the number of rows in the underlying raster.")] public virtual int NumRows => _numRows; /// <summary> /// Gets or sets the geographic width of this raster. This will include the skew term /// in the width estimate, so it will adjust both the width and the skew coefficient, /// but preserve the ratio of skew to cell width. /// </summary> public double Width { get { return (NumColumns * Math.Abs(_affine[1])) + (NumRows * Math.Abs(_affine[2])); } set { if (Width == 0 && _numColumns > 0) { _affine[1] = value / _numColumns; _affine[2] = 0; return; } double columnFactor = NumColumns * Math.Abs(_affine[1]) / Width; double rowFactor = NumRows * Math.Abs(_affine[2]) / Width; double newColumnWidth = value * columnFactor; double newRowWidth = value * rowFactor; _affine[1] = Math.Sign(_affine[1]) * newColumnWidth / NumColumns; _affine[2] = Math.Sign(_affine[2]) * newRowWidth / NumRows; } } /// <summary> /// Gets or sets the fileName of the wordfile that describes the geographic coordinates of this raster. If a relative path gets assigned it is changed to the absolute path including the file extension. /// </summary> [Category("GeoReference")] [Description("Returns the Geographic width of the envelope that completely contains this raster.")] public string WorldFile { get { return _worldFile; } set { _worldFile = Path.GetFullPath(value); } } /// <summary> /// Gets or sets the horizontal placement of the upper left corner of this bounds. Because /// of the skew, this upper left position may not actually be the same as the upper left /// corner of the image itself (_affine[0]). Instead, this is the top left corner of /// the rectangular extent for this raster. /// </summary> public double X { get { double xMin = double.MaxValue; double[] affine = AffineCoefficients; // in case this is an overridden property double nr = NumRows; double nc = NumColumns; // Because these coefficients can be negative, we can't make assumptions about what corner is furthest left. if (affine[0] < xMin) xMin = affine[0]; // TopLeft; if (affine[0] + (nc * affine[1]) < xMin) xMin = affine[0] + (nc * affine[1]); // TopRight; if (affine[0] + (nr * affine[2]) < xMin) xMin = affine[0] + (nr * affine[2]); // BottomLeft; if (affine[0] + (nc * affine[1]) + (nr * affine[2]) < xMin) xMin = affine[0] + (nc * affine[1]) + (nr * affine[2]); // BottomRight // the coordinate thus far is the center of the cell. The actual left is half a cell further left. return xMin - (Math.Abs(affine[1]) / 2) - (Math.Abs(affine[2]) / 2); } set { double dx = value - X; _affine[0] = _affine[0] + dx; // resetting affine[0] will shift everything else } } /// <summary> /// Gets or sets the vertical placement of the upper left corner of this bounds, which is the /// same as the top. The top left corner of the actual image may not be in this position /// because of skew, but this represents the maximum Y value of the rectangular extents /// that contains the image. /// </summary> public double Y { get { double yMax = double.MinValue; double[] affine = AffineCoefficients; // in case this is an overridden property double nr = NumRows; double nc = NumColumns; // Because these coefficients can be negative, we can't make assumptions about what corner is furthest left. if (affine[3] > yMax) yMax = affine[3]; // TopLeft; if (affine[3] + (nc * affine[4]) > yMax) yMax = affine[3] + (nc * affine[4]); // TopRight; if (affine[3] + (nr * affine[5]) > yMax) yMax = affine[3] + (nr * affine[5]); // BottomLeft; if (affine[3] + (nc * affine[4]) + (nr * affine[5]) > yMax) yMax = affine[3] + (nc * affine[4]) + (nr * affine[5]); // BottomRight // the value thus far is at the center of the cell. Return a value half a cell further return yMax + (Math.Abs(affine[4]) / 2) + (Math.Abs(affine[5]) / 2); } set { double dy = value - Y; _affine[3] += dy; // resets the dY } } #endregion #region Methods /// <summary> /// Returns a duplicate of this object as an object. /// </summary> /// <returns>A duplicate of this object as an object.</returns> public object Clone() { return MemberwiseClone(); } /// <summary> /// Creates a duplicate of this RasterBounds class. /// </summary> /// <returns>A RasterBounds that has the same properties but does not point to the same internal array.</returns> public RasterBounds Copy() { var result = (RasterBounds)MemberwiseClone(); result.AffineCoefficients = new double[6]; for (int i = 0; i < 6; i++) { result.AffineCoefficients[i] = _affine[i]; } return result; } /// <summary> /// Attempts to load the data from the given fileName. /// </summary> /// <param name="fileName">The file name.</param> public virtual void Open(string fileName) { this.OpenWorldFile(fileName); } /// <summary> /// Attempts to save the data to the file listed in WorldFile /// </summary> public virtual void Save() { this.SaveWorldFile(); } /// <summary> /// Creates a duplicate of this RasterBounds. /// </summary> /// <returns>The copy.</returns> IRasterBounds IRasterBounds.Copy() { return Copy(); } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. //------------------------------------------------------------------------------ using System.Data.Common; using System.Diagnostics; using System.Threading; namespace System.Data.SqlClient { internal enum TransactionState { Pending = 0, Active = 1, Aborted = 2, Committed = 3, Unknown = 4, } internal enum TransactionType { LocalFromTSQL = 1, LocalFromAPI = 2, Delegated = 3, Distributed = 4, Context = 5, // only valid in proc. }; internal sealed class SqlInternalTransaction { internal const long NullTransactionId = 0; private TransactionState _transactionState; private readonly TransactionType _transactionType; private long _transactionId; // passed in the MARS headers private int _openResultCount; // passed in the MARS headers private SqlInternalConnection _innerConnection; private bool _disposing; // used to prevent us from throwing exceptions while we're disposing private WeakReference _parent; // weak ref to the outer transaction object; needs to be weak to allow GC to occur. internal bool RestoreBrokenConnection { get; set; } internal bool ConnectionHasBeenRestored { get; set; } internal SqlInternalTransaction(SqlInternalConnection innerConnection, TransactionType type, SqlTransaction outerTransaction) : this(innerConnection, type, outerTransaction, NullTransactionId) { } internal SqlInternalTransaction(SqlInternalConnection innerConnection, TransactionType type, SqlTransaction outerTransaction, long transactionId) { _innerConnection = innerConnection; _transactionType = type; if (null != outerTransaction) { _parent = new WeakReference(outerTransaction); } _transactionId = transactionId; RestoreBrokenConnection = false; ConnectionHasBeenRestored = false; } internal bool HasParentTransaction { get { // Return true if we are an API started local transaction, or if we were a TSQL // started local transaction and were then wrapped with a parent transaction as // a result of a later API begin transaction. bool result = ((TransactionType.LocalFromAPI == _transactionType) || (TransactionType.LocalFromTSQL == _transactionType && _parent != null)); return result; } } internal bool IsAborted { get { return (TransactionState.Aborted == _transactionState); } } internal bool IsActive { get { return (TransactionState.Active == _transactionState); } } internal bool IsCommitted { get { return (TransactionState.Committed == _transactionState); } } internal bool IsCompleted { get { return (TransactionState.Aborted == _transactionState || TransactionState.Committed == _transactionState || TransactionState.Unknown == _transactionState); } } internal bool IsDelegated { get { bool result = (TransactionType.Delegated == _transactionType); return result; } } internal bool IsDistributed { get { bool result = (TransactionType.Distributed == _transactionType); return result; } } internal bool IsLocal { get { bool result = (TransactionType.LocalFromTSQL == _transactionType || TransactionType.LocalFromAPI == _transactionType ); return result; } } internal bool IsOrphaned { get { // An internal transaction is orphaned when its parent has been // reclaimed by GC. bool result; if (null == _parent) { // No parent, so we better be LocalFromTSQL. Should we even return in this case - // since it could be argued this is invalid? Debug.Fail("Why are we calling IsOrphaned with no parent?"); Debug.Assert(_transactionType == TransactionType.LocalFromTSQL, "invalid state"); result = false; } else if (null == _parent.Target) { // We have an parent, but parent was GC'ed. result = true; } else { // We have an parent, and parent is alive. result = false; } return result; } } internal bool IsZombied { get { return (null == _innerConnection); } } internal int OpenResultsCount { get { return _openResultCount; } } internal SqlTransaction Parent { get { SqlTransaction result = null; // Should we protect against this, since this probably is an invalid state? Debug.Assert(null != _parent, "Why are we calling Parent with no parent?"); if (null != _parent) { result = (SqlTransaction)_parent.Target; } return result; } } internal long TransactionId { get { return _transactionId; } set { Debug.Assert(NullTransactionId == _transactionId, "setting transaction cookie while one is active?"); _transactionId = value; } } internal void Activate() { _transactionState = TransactionState.Active; } private void CheckTransactionLevelAndZombie() { try { if (!IsZombied && GetServerTransactionLevel() == 0) { // If not zombied, not closed, and not in transaction, zombie. Zombie(); } } catch (Exception e) { if (!ADP.IsCatchableExceptionType(e)) { throw; } Zombie(); // If exception caught when trying to check level, zombie. } } internal void CloseFromConnection() { SqlInternalConnection innerConnection = _innerConnection; Debug.Assert(innerConnection != null, "How can we be here if the connection is null?"); bool processFinallyBlock = true; try { innerConnection.ExecuteTransaction(SqlInternalConnection.TransactionRequest.IfRollback, null, IsolationLevel.Unspecified, null, false); } catch (Exception e) { processFinallyBlock = ADP.IsCatchableExceptionType(e); throw; } finally { if (processFinallyBlock) { // Always ensure we're zombied; Yukon will send an EnvChange that // will cause the zombie, but only if we actually go to the wire; // Sphinx and Shiloh won't send the env change, so we have to handle // them ourselves. Zombie(); } } } internal void Commit() { if (_innerConnection.IsLockedForBulkCopy) { throw SQL.ConnectionLockedForBcpEvent(); } _innerConnection.ValidateConnectionForExecute(null); // If this transaction has been completed, throw exception since it is unusable. try { // COMMIT ignores transaction names, and so there is no reason to pass it anything. COMMIT // simply commits the transaction from the most recent BEGIN, nested or otherwise. _innerConnection.ExecuteTransaction(SqlInternalConnection.TransactionRequest.Commit, null, IsolationLevel.Unspecified, null, false); { ZombieParent(); } } catch (Exception e) { if (ADP.IsCatchableExceptionType(e)) { CheckTransactionLevelAndZombie(); } throw; } } internal void Completed(TransactionState transactionState) { Debug.Assert(TransactionState.Active < transactionState, "invalid transaction completion state?"); _transactionState = transactionState; Zombie(); } internal int DecrementAndObtainOpenResultCount() { int openResultCount = Interlocked.Decrement(ref _openResultCount); if (openResultCount < 0) { throw SQL.OpenResultCountExceeded(); } return openResultCount; } internal void Dispose() { this.Dispose(true); System.GC.SuppressFinalize(this); } private /*protected override*/ void Dispose(bool disposing) { if (disposing) { if (null != _innerConnection) { // implicitly rollback if transaction still valid _disposing = true; this.Rollback(); } } } private int GetServerTransactionLevel() { // This function is needed for those times when it is impossible to determine the server's // transaction level, unless the user's arguments were parsed - which is something we don't want // to do. An example when it is impossible to determine the level is after a rollback. using (SqlCommand transactionLevelCommand = new SqlCommand("set @out = @@trancount", (SqlConnection)(_innerConnection.Owner))) { transactionLevelCommand.Transaction = Parent; SqlParameter parameter = new SqlParameter("@out", SqlDbType.Int); parameter.Direction = ParameterDirection.Output; transactionLevelCommand.Parameters.Add(parameter); transactionLevelCommand.RunExecuteReader(CommandBehavior.Default, RunBehavior.UntilDone, returnStream: false); return (int)parameter.Value; } } internal int IncrementAndObtainOpenResultCount() { int openResultCount = Interlocked.Increment(ref _openResultCount); if (openResultCount < 0) { throw SQL.OpenResultCountExceeded(); } return openResultCount; } internal void InitParent(SqlTransaction transaction) { Debug.Assert(_parent == null, "Why do we have a parent on InitParent?"); _parent = new WeakReference(transaction); } internal void Rollback() { if (_innerConnection.IsLockedForBulkCopy) { throw SQL.ConnectionLockedForBcpEvent(); } _innerConnection.ValidateConnectionForExecute(null); try { // If no arg is given to ROLLBACK it will rollback to the outermost begin - rolling back // all nested transactions as well as the outermost transaction. _innerConnection.ExecuteTransaction(SqlInternalConnection.TransactionRequest.IfRollback, null, IsolationLevel.Unspecified, null, false); // Since Rollback will rollback to outermost begin, no need to check // server transaction level. This transaction has been completed. Zombie(); } catch (Exception e) { if (ADP.IsCatchableExceptionType(e)) { CheckTransactionLevelAndZombie(); if (!_disposing) { throw; } } else { throw; } } } internal void Rollback(string transactionName) { if (_innerConnection.IsLockedForBulkCopy) { throw SQL.ConnectionLockedForBcpEvent(); } _innerConnection.ValidateConnectionForExecute(null); // ROLLBACK takes either a save point name or a transaction name. It will rollback the // transaction to either the save point with the save point name or begin with the // transaction name. NOTE: for simplicity it is possible to give all save point names // the same name, and ROLLBACK will simply rollback to the most recent save point with the // save point name. if (string.IsNullOrEmpty(transactionName)) throw SQL.NullEmptyTransactionName(); try { _innerConnection.ExecuteTransaction(SqlInternalConnection.TransactionRequest.Rollback, transactionName, IsolationLevel.Unspecified, null, false); } catch (Exception e) { if (ADP.IsCatchableExceptionType(e)) { CheckTransactionLevelAndZombie(); } throw; } } internal void Save(string savePointName) { _innerConnection.ValidateConnectionForExecute(null); // ROLLBACK takes either a save point name or a transaction name. It will rollback the // transaction to either the save point with the save point name or begin with the // transaction name. So, to rollback a nested transaction you must have a save point. // SAVE TRANSACTION MUST HAVE AN ARGUMENT!!! Save Transaction without an arg throws an // exception from the server. So, an overload for SaveTransaction without an arg doesn't make // sense to have. Save Transaction does not affect the transaction level. if (string.IsNullOrEmpty(savePointName)) throw SQL.NullEmptyTransactionName(); try { _innerConnection.ExecuteTransaction(SqlInternalConnection.TransactionRequest.Save, savePointName, IsolationLevel.Unspecified, null, false); } catch (Exception e) { if (ADP.IsCatchableExceptionType(e)) { CheckTransactionLevelAndZombie(); } throw; } } internal void Zombie() { // Called by several places in the code to ensure that the outer // transaction object has been zombied and the parser has broken // it's reference to us. // NOTE: we'll be called from the TdsParser when it gets appropriate // ENVCHANGE events that indicate the transaction has completed, however // we cannot rely upon those events occurring in the case of pre-Yukon // servers (and when we don't go to the wire because the connection // is broken) so we can also be called from the Commit/Rollback/Save // methods to handle that case as well. // There are two parts to a full zombie: // 1) Zombie parent and disconnect outer transaction from internal transaction // 2) Disconnect internal transaction from connection and parser // Number 1 needs to be done whenever a SqlTransaction object is completed. Number // 2 is only done when a transaction is actually completed. Since users can begin // transactions both in and outside of the API, and since nested begins are not actual // transactions we need to distinguish between #1 and #2. ZombieParent(); SqlInternalConnection innerConnection = _innerConnection; _innerConnection = null; if (null != innerConnection) { innerConnection.DisconnectTransaction(this); } } private void ZombieParent() { if (null != _parent) { SqlTransaction parent = (SqlTransaction)_parent.Target; if (null != parent) { parent.Zombie(); } _parent = null; } } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; using System.IO; using System.Xml; namespace Ecosim.SceneData.AnimalPopulationModel { public class AnimalStartPopulationModel : IAnimalPopulationModel { public const string XML_ELEMENT = "startpopmodel"; [System.Serializable] public class Nests : AnimalPopulationModelDataBase { public string XML_ELEMENT = "nests"; // TODO: Save these data in Data maps so that the progress can be saved correctly/easily public class Nest { public const string XML_ELEMENT = "nest"; public Scene scene; public Nests parent; public int x; public int y; public int males { get { return malesMap.Get (x, y); } set { malesMap.Set (x, y, value); } } public int females { get { return femalesMap.Get (x, y); } set { femalesMap.Set (x, y, value); } } public int currentFood { get { return foodMap.Get (x, y); } set { foodMap.Set (x, y, value); } } public int totalCapacity; public int malesCapacity; public int femalesCapacity; private Data _malesMap; private Data _femalesMap; private Data _foodMap; public Data malesMap { get { if (_malesMap == null) { _malesMap = parent.model.animal.GetAssociatedData ("males"); if (_malesMap == null) { _malesMap = new BitMap16 (parent.model.animal.scene); parent.model.animal.AddAssociatedData ("males", _malesMap); } } return _malesMap; } } public Data femalesMap { get { if (_femalesMap == null) { _femalesMap = parent.model.animal.GetAssociatedData ("females"); if (_femalesMap == null) { _femalesMap = new BitMap16 (parent.model.animal.scene); parent.model.animal.AddAssociatedData ("females", _femalesMap); } } return _femalesMap; } } public Data foodMap { get { if (_foodMap == null) { _foodMap = parent.model.animal.GetAssociatedData ("food"); if (_foodMap == null) { _foodMap = new BitMap16 (parent.model.animal.scene); parent.model.animal.AddAssociatedData ("food", _foodMap); } } return _foodMap; } } public Nest (Nests parent) { this.parent = parent; } public static Nest Load (Nests parent, XmlTextReader reader, Scene scene) { Nest nest = new Nest (parent); nest.x = int.Parse(reader.GetAttribute ("x")); nest.y = int.Parse(reader.GetAttribute ("y")); nest.totalCapacity = int.Parse (reader.GetAttribute ("cap")); nest.malesCapacity = int.Parse (reader.GetAttribute ("mcap")); nest.femalesCapacity = int.Parse (reader.GetAttribute ("fcap")); IOUtil.ReadUntilEndElement(reader, XML_ELEMENT); return nest; } public void Save (XmlTextWriter writer, Scene scene) { writer.WriteStartElement (XML_ELEMENT); writer.WriteAttributeString ("x", x.ToString()); writer.WriteAttributeString ("y", y.ToString()); writer.WriteAttributeString ("cap", totalCapacity.ToString()); writer.WriteAttributeString ("mcap", malesCapacity.ToString()); writer.WriteAttributeString ("fcap", femalesCapacity.ToString()); writer.WriteEndElement (); } public void UpdateReferences (Scene scene) { } public override string ToString () { return string.Format ("[Nest] ({0},{1}) Males:{2}/{3}, Females:{4}/{5}, Food:{6}", x.ToString(), y.ToString(), males.ToString(), malesCapacity.ToString(), females.ToString(), femalesCapacity.ToString(), currentFood); } public int GetMalesAt (int year) { Data data = parent.model.animal.GetAssociatedData ("males", year); if (data != null) { return data.Get (x, y); } return 0; } public int GetFemalesAt (int year) { Data data = parent.model.animal.GetAssociatedData ("females", year); if (data != null) { return data.Get (x, y); } return 0; } } public Nest[] nests = new Nest[0]; public Nests (IAnimalPopulationModel model) : base (model) { } public void UpdateReferences (Scene scene) { foreach (Nest n in nests) { n.UpdateReferences (scene); } } public override void Load (XmlTextReader reader, Scene scene) { base.Load (reader, scene); List<Nest> nests = new List<Nest>(); if (!reader.IsEmptyElement) { while (reader.Read()) { string readerName = reader.Name.ToLower (); XmlNodeType nType = reader.NodeType; if (nType == XmlNodeType.Element) { if (readerName == Nest.XML_ELEMENT) { Nest nest = Nest.Load (this, reader, scene); if (nest != null) { nests.Add (nest); } } } else if ((nType == XmlNodeType.EndElement) && (readerName == XML_ELEMENT)) { break; } } } this.nests = nests.ToArray(); } public override void Save (XmlTextWriter writer, Scene scene) { writer.WriteStartElement (XML_ELEMENT); base.Save (writer, scene); foreach (Nest n in nests) { n.Save (writer, scene); } writer.WriteEndElement (); } } public Nests nests; public AnimalStartPopulationModel (AnimalType animal) : base (animal) { this.nests = new Nests (this); } public override void Load (XmlTextReader reader, Scene scene) { if (!reader.IsEmptyElement) { while (reader.Read()) { string readerName = reader.Name.ToLower (); XmlNodeType nType = reader.NodeType; if (nType == XmlNodeType.Element) { // Add more AnimalPopulationModelDataBases if (readerName == nests.XML_ELEMENT) { nests.Load (reader, scene); } } else if ((nType == XmlNodeType.EndElement) && (readerName == XML_ELEMENT)) { break; } } } } public override void Save (XmlTextWriter writer, Scene scene) { writer.WriteStartElement (XML_ELEMENT); nests.Save (writer, scene); writer.WriteEndElement (); } public override void UpdateReferences (Scene scene) { nests.UpdateReferences (scene); } public override string GetXMLElement () { return XML_ELEMENT; } public override void PrepareSuccession () { nests.PrepareSuccession (); } public override void DoSuccession () { nests.DoSuccession (); } public override void FinalizeSuccession () { nests.FinalizeSuccession (); } } }
using OpenKh.Engine; using OpenKh.Game.Field; using OpenKh.Game.Infrastructure; using System.Collections.Generic; using System.Numerics; using System.Text; using static OpenKh.Kh2.Ard.Event; namespace OpenKh.Game.Events { public class EventPlayer { private const int FramesPerSecond = 30; private const float TimeMul = 1.0f / FramesPerSecond; private readonly IField _field; private readonly IList<IEventEntry> _eventEntries; private readonly IList<SetCameraData> _cameras = new List<SetCameraData>(); private readonly Dictionary<int, string> _actors = new Dictionary<int, string>(); private double _secondsPrev; private double _seconds; private int _cameraId; private int _eventDuration; public bool IsEnd { get; private set; } public EventPlayer(IField field, IList<IEventEntry> eventEntries) { _field = field; _eventEntries = eventEntries; } public void Initialize() { IsEnd = false; foreach (var entry in _eventEntries) { switch (entry) { case SetEndFrame item: _eventDuration = item.EndFrame; break; case ReadAssets assets: foreach (var assetEntry in assets.Set) { switch (assetEntry) { case ReadActor item: _actors[item.ActorId] = item.Name; _field.AddActor(item.ActorId, item.ObjectId); _field.SetActorVisibility(item.ActorId, false); break; } } // Double for-loop to ensure to load actors first, then // animations to prevent crashes. foreach (var assetEntry in assets.Set) { switch (assetEntry) { case ReadMotion item: _field.SetActorAnimation( item.ActorId, GetAnmPath(item.ActorId, item.Name)); _field.SetActorVisibility(item.ActorId, item.UnknownIndex == 0); break; } } break; case EventStart item: _field.FadeFromBlack(item.FadeIn * TimeMul); break; case SetCameraData item: _cameras.Add(item); break; } } _cameraId = 0; } public void Update(double deltaTime) { _seconds += deltaTime; var nFrame = (int)(_seconds * FramesPerSecond); var nPrevFrame = (int)(_secondsPrev * FramesPerSecond); var cameraFrameTime = _seconds; if (nFrame >= _eventDuration) { IsEnd = true; return; } foreach (var entry in _eventEntries) { switch (entry) { case SeqActorPosition item: if (item.Frame > nPrevFrame && item.Frame <= nFrame) { _field.SetActorPosition( item.ActorId, item.PositionX, item.PositionY, -item.PositionZ, item.RotationY); } break; case SeqPlayAnimation item: if (item.FrameStart > nPrevFrame && item.FrameStart <= nFrame) { _field.SetActorAnimation( item.ActorId, GetAnmPath(item.ActorId, item.Path)); _field.SetActorVisibility(item.ActorId, true); } break; case SeqActorLeave item: if (item.Frame > nPrevFrame && item.Frame <= nFrame) _field.SetActorVisibility(item.ActorId, false); break; case SeqCamera item: if (nFrame >= item.FrameStart && nFrame < item.FrameEnd) { _cameraId = item.CameraId; var frameLength = item.FrameEnd - item.FrameStart; cameraFrameTime = (_seconds * FramesPerSecond - item.FrameStart) / 30f; } break; case SeqFade item: if (item.FrameIndex > nPrevFrame && item.FrameIndex <= nFrame) { switch (item.Type) { case SeqFade.FadeType.FromBlack: case SeqFade.FadeType.FromBlackVariant: _field.FadeFromBlack(item.Duration * TimeMul); break; case SeqFade.FadeType.FromWhite: case SeqFade.FadeType.FromWhiteVariant: _field.FadeFromWhite(item.Duration * TimeMul); break; case SeqFade.FadeType.ToBlack: case SeqFade.FadeType.ToBlackVariant: _field.FadeToBlack(item.Duration * TimeMul); break; case SeqFade.FadeType.ToWhite: case SeqFade.FadeType.ToWhiteVariant: _field.FadeToWhite(item.Duration * TimeMul); break; } } break; case SeqSubtitle item: if (nFrame >= item.FrameStart && nPrevFrame < item.FrameStart) { if (item.HideFlag == 0) _field.ShowSubtitle(item.Index, (ushort)item.MessageId); else if (item.HideFlag != 0) _field.HideSubtitle(item.Index); } break; } } if (_cameraId < _cameras.Count) { var curCamera = _cameras[_cameraId]; _field.SetCamera( new Vector3( (float)GetCameraValue(cameraFrameTime, curCamera.PositionX, null), (float)GetCameraValue(cameraFrameTime, curCamera.PositionY, null), (float)GetCameraValue(cameraFrameTime, curCamera.PositionZ, null)), new Vector3( (float)GetCameraValue(cameraFrameTime, curCamera.LookAtX, null), (float)GetCameraValue(cameraFrameTime, curCamera.LookAtY, null), (float)GetCameraValue(cameraFrameTime, curCamera.LookAtZ, null)), (float)GetCameraValue(cameraFrameTime, curCamera.FieldOfView, null), (float)GetCameraValue(cameraFrameTime, curCamera.Roll, null)); } _secondsPrev = _seconds; } private string GetAnmPath(int actorId, string name) { var sb = new StringBuilder(); var split = name.Split("anm_")[1].Split("/"); sb.Append(split[0]); sb.Append("/"); sb.Append(_actors[actorId]); sb.Append("/"); sb.Append(split[1]); return sb.ToString(); } private static double GetCameraValue( double time, IList<SetCameraData.CameraKeys> keyFrames, SetCameraData.CameraKeys prevKey) { if (keyFrames.Count == 0) return 0.0; if (keyFrames.Count == 1) return keyFrames[0].Value; const int First = 0; var Last = keyFrames.Count - 1; var m = time + 1.0 / 60.0; var currentFrameIndex = (int)(m * 512.0); if (currentFrameIndex > keyFrames[First].KeyFrame - 0x10000000) { if (currentFrameIndex < keyFrames[Last].KeyFrame) { // Do a binary search through all the key frames var left = First; var right = Last; if (right <= 1) return InterpolateCamera(right - 1, m, keyFrames, prevKey); while (true) { var mid = (left + right) / 2; if (currentFrameIndex >= keyFrames[mid].KeyFrame) { if (currentFrameIndex <= keyFrames[mid].KeyFrame) return keyFrames[mid].Value; left = mid; } else right = mid; if (right - left <= 1) return InterpolateCamera(right - 1, m, keyFrames, prevKey); } } double tangent; var keyFrameDistance = keyFrames[Last].KeyFrame - keyFrames[Last - 1].KeyFrame; if (keyFrames[Last].Interpolation != Kh2.Motion.Interpolation.Linear || keyFrameDistance == 0) tangent = keyFrames[Last].TangentEaseOut; else tangent = (keyFrames[Last].Value - keyFrames[Last - 1].Value) / keyFrameDistance; return ((currentFrameIndex - (keyFrames[Last].KeyFrame + 0x10000000) + 0x10000000) * tangent) + keyFrames[Last].Value; } else { double tangent; var keyFrameDistance = keyFrames[First + 1].KeyFrame - keyFrames[First].KeyFrame; if (keyFrames[First].Interpolation != Kh2.Motion.Interpolation.Linear || keyFrameDistance == 0) tangent = keyFrames[First].TangentEaseIn; else tangent = (keyFrames[First + 1].Value - keyFrames[First].Value) / keyFrameDistance; return -(((keyFrames[First].KeyFrame - currentFrameIndex - 0x10000000) * tangent) - keyFrames[First].Value); } } private static double InterpolateCamera( int keyFrameIndex, double time, IList<SetCameraData.CameraKeys> keyFrames, SetCameraData.CameraKeys prevKey) { const double N = 1.0 / 512.0; var curKeyFrame = keyFrames[keyFrameIndex]; var nextKeyFrame = keyFrames[keyFrameIndex + 1]; if (prevKey != null) prevKey.TangentEaseOut = curKeyFrame.TangentEaseOut; var t = time - curKeyFrame.KeyFrame * N; var tx = (nextKeyFrame.KeyFrame - curKeyFrame.KeyFrame) * N; switch (curKeyFrame.Interpolation) { case Kh2.Motion.Interpolation.Nearest: return curKeyFrame.Value; case Kh2.Motion.Interpolation.Linear: return curKeyFrame.Value + ((nextKeyFrame.Value - curKeyFrame.Value) * t / tx); case Kh2.Motion.Interpolation.Hermite: case (Kh2.Motion.Interpolation)3: case (Kh2.Motion.Interpolation)4: var itx = 1.0 / tx; // Perform a cubic hermite interpolation var p0 = curKeyFrame.Value; var p1 = nextKeyFrame.Value; var m0 = curKeyFrame.TangentEaseOut; var m1 = nextKeyFrame.TangentEaseIn; var t2 = t * t * itx; var t3 = t * t * t * itx * itx; return p0 * (2 * t3 * itx - 3 * t2 * itx + 1) + m0 * (t3 - 2 * t2 + t) + p1 * (-2 * t3 * itx + 3 * t2 * itx) + m1 * (t3 - t2); default: return 0; } } } }
using Microsoft.EntityFrameworkCore.TestUtilities; using Xunit; using Xunit.Abstractions; namespace Microsoft.EntityFrameworkCore.Query { public class IncludeSqlCeTest : IncludeTestBase<IncludeSqlCeFixture> { private bool SupportsOffset => TestEnvironment.GetFlag(nameof(SqlServerCondition.SupportsOffset)) ?? true; public IncludeSqlCeTest(IncludeSqlCeFixture fixture, ITestOutputHelper testOutputHelper) : base(fixture) { fixture.TestSqlLoggerFactory.Clear(); } [Theory(Skip = "SQLCE limitation")] public override void Include_collection_OrderBy_empty_list_contains(bool useString) { base.Include_collection_OrderBy_empty_list_contains(useString); } [Theory(Skip = "SQLCE limitation")] public override void Include_collection_OrderBy_empty_list_does_not_contains(bool useString) { base.Include_collection_OrderBy_empty_list_does_not_contains(useString); } [Theory(Skip = "SQLCE limitation")] public override void Include_with_complex_projection_does_not_change_ordering_of_projection(bool useString) { base.Include_with_complex_projection_does_not_change_ordering_of_projection(useString); } public override void Include_list(bool useString) { base.Include_list(useString); AssertSql( @"SELECT [p].[ProductID], [p].[Discontinued], [p].[ProductName], [p].[SupplierID], [p].[UnitPrice], [p].[UnitsInStock] FROM [Products] AS [p] ORDER BY [p].[ProductID]", // @"SELECT [p.OrderDetails].[OrderID], [p.OrderDetails].[ProductID], [p.OrderDetails].[Discount], [p.OrderDetails].[Quantity], [p.OrderDetails].[UnitPrice], [o.Order].[OrderID], [o.Order].[CustomerID], [o.Order].[EmployeeID], [o.Order].[OrderDate] FROM [Order Details] AS [p.OrderDetails] INNER JOIN [Orders] AS [o.Order] ON [p.OrderDetails].[OrderID] = [o.Order].[OrderID] INNER JOIN ( SELECT [p0].[ProductID] FROM [Products] AS [p0] ) AS [t] ON [p.OrderDetails].[ProductID] = [t].[ProductID] ORDER BY [t].[ProductID]"); } public override void Include_reference(bool useString) { base.Include_reference(useString); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate], [o.Customer].[CustomerID], [o.Customer].[Address], [o.Customer].[City], [o.Customer].[CompanyName], [o.Customer].[ContactName], [o.Customer].[ContactTitle], [o.Customer].[Country], [o.Customer].[Fax], [o.Customer].[Phone], [o.Customer].[PostalCode], [o.Customer].[Region] FROM [Orders] AS [o] LEFT JOIN [Customers] AS [o.Customer] ON [o].[CustomerID] = [o.Customer].[CustomerID]"); } public override void Include_collection(bool useString) { base.Include_collection(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID] FROM [Customers] AS [c0] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_with_last(bool useString) { base.Include_collection_with_last(useString); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CompanyName] DESC, [c].[CustomerID] DESC", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID], [c0].[CompanyName] FROM [Customers] AS [c0] ORDER BY [c0].[CompanyName] DESC, [c0].[CustomerID] DESC ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CompanyName] DESC, [t].[CustomerID] DESC"); } public override void Include_collection_with_last_no_orderby(bool useString) { base.Include_collection_with_last_no_orderby(useString); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID] DESC", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID] FROM [Customers] AS [c0] ORDER BY [c0].[CustomerID] DESC ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID] DESC"); } public override void Include_collection_skip_no_order_by(bool useString) { base.Include_collection_skip_no_order_by(useString); if (SupportsOffset) { AssertSql( @"@__p_0='10' SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID] OFFSET @__p_0 ROWS", // @"@__p_0='10' SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID] FROM [Customers] AS [c0] ORDER BY [c0].[CustomerID] OFFSET @__p_0 ROWS ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } } public override void Include_collection_take_no_order_by(bool useString) { base.Include_collection_take_no_order_by(useString); if (SupportsOffset) { AssertSql( @"@__p_0='10' SELECT TOP(@__p_0) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]", // @"@__p_0='10' SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(@__p_0) [c0].[CustomerID] FROM [Customers] AS [c0] ORDER BY [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } } public override void Include_collection_skip_take_no_order_by(bool useString) { base.Include_collection_skip_take_no_order_by(useString); if (SupportsOffset) { AssertSql( @"@__p_0='10' @__p_1='5' SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID] OFFSET @__p_0 ROWS FETCH NEXT @__p_1 ROWS ONLY", // @"@__p_0='10' @__p_1='5' SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID] FROM [Customers] AS [c0] ORDER BY [c0].[CustomerID] OFFSET @__p_0 ROWS FETCH NEXT @__p_1 ROWS ONLY ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } } public override void Include_reference_and_collection(bool useString) { base.Include_reference_and_collection(useString); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate], [o.Customer].[CustomerID], [o.Customer].[Address], [o.Customer].[City], [o.Customer].[CompanyName], [o.Customer].[ContactName], [o.Customer].[ContactTitle], [o.Customer].[Country], [o.Customer].[Fax], [o.Customer].[Phone], [o.Customer].[PostalCode], [o.Customer].[Region] FROM [Orders] AS [o] LEFT JOIN [Customers] AS [o.Customer] ON [o].[CustomerID] = [o.Customer].[CustomerID] ORDER BY [o].[OrderID]", // @"SELECT [o.OrderDetails].[OrderID], [o.OrderDetails].[ProductID], [o.OrderDetails].[Discount], [o.OrderDetails].[Quantity], [o.OrderDetails].[UnitPrice] FROM [Order Details] AS [o.OrderDetails] INNER JOIN ( SELECT DISTINCT [o0].[OrderID] FROM [Orders] AS [o0] LEFT JOIN [Customers] AS [o.Customer0] ON [o0].[CustomerID] = [o.Customer0].[CustomerID] ) AS [t] ON [o.OrderDetails].[OrderID] = [t].[OrderID] ORDER BY [t].[OrderID]"); } public override void Include_references_multi_level(bool useString) { base.Include_references_multi_level(useString); AssertSql( @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice], [o.Order].[OrderID], [o.Order].[CustomerID], [o.Order].[EmployeeID], [o.Order].[OrderDate], [o.Order.Customer].[CustomerID], [o.Order.Customer].[Address], [o.Order.Customer].[City], [o.Order.Customer].[CompanyName], [o.Order.Customer].[ContactName], [o.Order.Customer].[ContactTitle], [o.Order.Customer].[Country], [o.Order.Customer].[Fax], [o.Order.Customer].[Phone], [o.Order.Customer].[PostalCode], [o.Order.Customer].[Region] FROM [Order Details] AS [o] INNER JOIN [Orders] AS [o.Order] ON [o].[OrderID] = [o.Order].[OrderID] LEFT JOIN [Customers] AS [o.Order.Customer] ON [o.Order].[CustomerID] = [o.Order.Customer].[CustomerID]"); } public override void Include_multiple_references_multi_level(bool useString) { base.Include_multiple_references_multi_level(useString); AssertSql( @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice], [o.Product].[ProductID], [o.Product].[Discontinued], [o.Product].[ProductName], [o.Product].[SupplierID], [o.Product].[UnitPrice], [o.Product].[UnitsInStock], [o.Order].[OrderID], [o.Order].[CustomerID], [o.Order].[EmployeeID], [o.Order].[OrderDate], [o.Order.Customer].[CustomerID], [o.Order.Customer].[Address], [o.Order.Customer].[City], [o.Order.Customer].[CompanyName], [o.Order.Customer].[ContactName], [o.Order.Customer].[ContactTitle], [o.Order.Customer].[Country], [o.Order.Customer].[Fax], [o.Order.Customer].[Phone], [o.Order.Customer].[PostalCode], [o.Order.Customer].[Region] FROM [Order Details] AS [o] INNER JOIN [Products] AS [o.Product] ON [o].[ProductID] = [o.Product].[ProductID] INNER JOIN [Orders] AS [o.Order] ON [o].[OrderID] = [o.Order].[OrderID] LEFT JOIN [Customers] AS [o.Order.Customer] ON [o.Order].[CustomerID] = [o.Order.Customer].[CustomerID]"); } public override void Include_multiple_references_multi_level_reverse(bool useString) { base.Include_multiple_references_multi_level_reverse(useString); AssertSql( @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice], [o.Order].[OrderID], [o.Order].[CustomerID], [o.Order].[EmployeeID], [o.Order].[OrderDate], [o.Order.Customer].[CustomerID], [o.Order.Customer].[Address], [o.Order.Customer].[City], [o.Order.Customer].[CompanyName], [o.Order.Customer].[ContactName], [o.Order.Customer].[ContactTitle], [o.Order.Customer].[Country], [o.Order.Customer].[Fax], [o.Order.Customer].[Phone], [o.Order.Customer].[PostalCode], [o.Order.Customer].[Region], [o.Product].[ProductID], [o.Product].[Discontinued], [o.Product].[ProductName], [o.Product].[SupplierID], [o.Product].[UnitPrice], [o.Product].[UnitsInStock] FROM [Order Details] AS [o] INNER JOIN [Orders] AS [o.Order] ON [o].[OrderID] = [o.Order].[OrderID] LEFT JOIN [Customers] AS [o.Order.Customer] ON [o.Order].[CustomerID] = [o.Order.Customer].[CustomerID] INNER JOIN [Products] AS [o.Product] ON [o].[ProductID] = [o.Product].[ProductID]"); } public override void Include_references_and_collection_multi_level(bool useString) { base.Include_references_and_collection_multi_level(useString); AssertSql( @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice], [o.Order].[OrderID], [o.Order].[CustomerID], [o.Order].[EmployeeID], [o.Order].[OrderDate], [o.Order.Customer].[CustomerID], [o.Order.Customer].[Address], [o.Order.Customer].[City], [o.Order.Customer].[CompanyName], [o.Order.Customer].[ContactName], [o.Order.Customer].[ContactTitle], [o.Order.Customer].[Country], [o.Order.Customer].[Fax], [o.Order.Customer].[Phone], [o.Order.Customer].[PostalCode], [o.Order.Customer].[Region] FROM [Order Details] AS [o] INNER JOIN [Orders] AS [o.Order] ON [o].[OrderID] = [o.Order].[OrderID] LEFT JOIN [Customers] AS [o.Order.Customer] ON [o.Order].[CustomerID] = [o.Order.Customer].[CustomerID] ORDER BY [o.Order.Customer].[CustomerID]", // @"SELECT [o.Order.Customer.Orders].[OrderID], [o.Order.Customer.Orders].[CustomerID], [o.Order.Customer.Orders].[EmployeeID], [o.Order.Customer.Orders].[OrderDate] FROM [Orders] AS [o.Order.Customer.Orders] INNER JOIN ( SELECT DISTINCT [o.Order.Customer0].[CustomerID] FROM [Order Details] AS [o0] INNER JOIN [Orders] AS [o.Order0] ON [o0].[OrderID] = [o.Order0].[OrderID] LEFT JOIN [Customers] AS [o.Order.Customer0] ON [o.Order0].[CustomerID] = [o.Order.Customer0].[CustomerID] ) AS [t] ON [o.Order.Customer.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_multi_level_reference_and_collection_predicate(bool useString) { base.Include_multi_level_reference_and_collection_predicate(useString); AssertSql( @"SELECT TOP(2) [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate], [o.Customer].[CustomerID], [o.Customer].[Address], [o.Customer].[City], [o.Customer].[CompanyName], [o.Customer].[ContactName], [o.Customer].[ContactTitle], [o.Customer].[Country], [o.Customer].[Fax], [o.Customer].[Phone], [o.Customer].[PostalCode], [o.Customer].[Region] FROM [Orders] AS [o] LEFT JOIN [Customers] AS [o.Customer] ON [o].[CustomerID] = [o.Customer].[CustomerID] WHERE [o].[OrderID] = 10248 ORDER BY [o.Customer].[CustomerID]", // @"SELECT [o.Customer.Orders].[OrderID], [o.Customer.Orders].[CustomerID], [o.Customer.Orders].[EmployeeID], [o.Customer.Orders].[OrderDate] FROM [Orders] AS [o.Customer.Orders] INNER JOIN ( SELECT DISTINCT [t].* FROM ( SELECT TOP(1) [o.Customer0].[CustomerID] FROM [Orders] AS [o0] LEFT JOIN [Customers] AS [o.Customer0] ON [o0].[CustomerID] = [o.Customer0].[CustomerID] WHERE [o0].[OrderID] = 10248 ORDER BY [o.Customer0].[CustomerID] ) AS [t] ) AS [t0] ON [o.Customer.Orders].[CustomerID] = [t0].[CustomerID] ORDER BY [t0].[CustomerID]"); } public override void Include_multi_level_collection_and_then_include_reference_predicate(bool useString) { base.Include_multi_level_collection_and_then_include_reference_predicate(useString); AssertSql( @"SELECT TOP(2) [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE [o].[OrderID] = 10248 ORDER BY [o].[OrderID]", // @"SELECT [o.OrderDetails].[OrderID], [o.OrderDetails].[ProductID], [o.OrderDetails].[Discount], [o.OrderDetails].[Quantity], [o.OrderDetails].[UnitPrice], [o.Product].[ProductID], [o.Product].[Discontinued], [o.Product].[ProductName], [o.Product].[SupplierID], [o.Product].[UnitPrice], [o.Product].[UnitsInStock] FROM [Order Details] AS [o.OrderDetails] INNER JOIN [Products] AS [o.Product] ON [o.OrderDetails].[ProductID] = [o.Product].[ProductID] INNER JOIN ( SELECT TOP(1) [o0].[OrderID] FROM [Orders] AS [o0] WHERE [o0].[OrderID] = 10248 ORDER BY [o0].[OrderID] ) AS [t] ON [o.OrderDetails].[OrderID] = [t].[OrderID] ORDER BY [t].[OrderID]"); } public override void Include_collection_alias_generation(bool useString) { base.Include_collection_alias_generation(useString); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] ORDER BY [o].[OrderID]", // @"SELECT [o.OrderDetails].[OrderID], [o.OrderDetails].[ProductID], [o.OrderDetails].[Discount], [o.OrderDetails].[Quantity], [o.OrderDetails].[UnitPrice] FROM [Order Details] AS [o.OrderDetails] INNER JOIN ( SELECT [o0].[OrderID] FROM [Orders] AS [o0] ) AS [t] ON [o.OrderDetails].[OrderID] = [t].[OrderID] ORDER BY [t].[OrderID]"); } [Theory(Skip = "SQLCE limitation")] public override void Include_collection_order_by_collection_column(bool useString) { base.Include_collection_order_by_collection_column(useString); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] LIKE N'W' + N'%' AND (LEFT([c].[CustomerID], LEN(N'W')) = N'W') ORDER BY ( SELECT TOP(1) [oo].[OrderDate] FROM [Orders] AS [oo] WHERE [c].[CustomerID] = [oo].[CustomerID] ORDER BY [oo].[OrderDate] DESC ) DESC, [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID], ( SELECT TOP(1) [oo1].[OrderDate] FROM [Orders] AS [oo1] WHERE [c0].[CustomerID] = [oo1].[CustomerID] ORDER BY [oo1].[OrderDate] DESC ) AS [c] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] LIKE N'W' + N'%' AND (LEFT([c0].[CustomerID], LEN(N'W')) = N'W') ORDER BY ( SELECT TOP(1) [oo0].[OrderDate] FROM [Orders] AS [oo0] WHERE [c0].[CustomerID] = [oo0].[CustomerID] ORDER BY [oo0].[OrderDate] DESC ) DESC, [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[c] DESC, [t].[CustomerID]"); } public override void Include_collection_order_by_key(bool useString) { base.Include_collection_order_by_key(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID] FROM [Customers] AS [c0] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_order_by_non_key(bool useString) { base.Include_collection_order_by_non_key(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[City], [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID], [c0].[City] FROM [Customers] AS [c0] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[City], [t].[CustomerID]"); } public override void Include_collection_order_by_non_key_with_take(bool useString) { base.Include_collection_order_by_non_key_with_take(useString); AssertSql( @"@__p_0='10' SELECT TOP(@__p_0) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[ContactTitle], [c].[CustomerID]", // @"@__p_0='10' SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(@__p_0) [c0].[CustomerID], [c0].[ContactTitle] FROM [Customers] AS [c0] ORDER BY [c0].[ContactTitle], [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[ContactTitle], [t].[CustomerID]"); } public override void Include_collection_order_by_non_key_with_skip(bool useString) { base.Include_collection_order_by_non_key_with_skip(useString); if (SupportsOffset) { AssertSql( @"@__p_0='10' SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[ContactTitle], [c].[CustomerID] OFFSET @__p_0 ROWS", // @"@__p_0='10' SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID], [c0].[ContactTitle] FROM [Customers] AS [c0] ORDER BY [c0].[ContactTitle], [c0].[CustomerID] OFFSET @__p_0 ROWS ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[ContactTitle], [t].[CustomerID]"); } } public override void Include_collection_order_by_non_key_with_first_or_default(bool useString) { base.Include_collection_order_by_non_key_with_first_or_default(useString); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CompanyName] DESC, [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID], [c0].[CompanyName] FROM [Customers] AS [c0] ORDER BY [c0].[CompanyName] DESC, [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CompanyName] DESC, [t].[CustomerID]"); } [Theory(Skip = "SQLCE limitation")] public override void Include_collection_order_by_subquery(bool useString) { base.Include_collection_order_by_subquery(useString); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY ( SELECT TOP(1) [o].[OrderDate] FROM [Orders] AS [o] WHERE [c].[CustomerID] = [o].[CustomerID] ORDER BY [o].[EmployeeID] ), [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID], ( SELECT TOP(1) [o1].[OrderDate] FROM [Orders] AS [o1] WHERE [c0].[CustomerID] = [o1].[CustomerID] ORDER BY [o1].[EmployeeID] ) AS [c] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] = N'ALFKI' ORDER BY ( SELECT TOP(1) [o0].[OrderDate] FROM [Orders] AS [o0] WHERE [c0].[CustomerID] = [o0].[CustomerID] ORDER BY [o0].[EmployeeID] ), [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[c], [t].[CustomerID]"); } public override void Include_collection_as_no_tracking(bool useString) { base.Include_collection_as_no_tracking(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID] FROM [Customers] AS [c0] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_principal_already_tracked(bool useString) { base.Include_collection_principal_already_tracked(useString); AssertSql( @"SELECT TOP(2) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI'", // @"SELECT TOP(2) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] = N'ALFKI' ORDER BY [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_principal_already_tracked_as_no_tracking(bool useString) { base.Include_collection_principal_already_tracked_as_no_tracking(useString); AssertSql( @"SELECT TOP(2) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI'", // @"SELECT TOP(2) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] = N'ALFKI' ORDER BY [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_with_filter(bool useString) { base.Include_collection_with_filter(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] = N'ALFKI' ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_with_filter_reordered(bool useString) { base.Include_collection_with_filter_reordered(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] = N'ALFKI' ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_then_include_collection(bool useString) { base.Include_collection_then_include_collection(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID] FROM [Customers] AS [c0] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID], [c.Orders].[OrderID]", // @"SELECT [c.Orders.OrderDetails].[OrderID], [c.Orders.OrderDetails].[ProductID], [c.Orders.OrderDetails].[Discount], [c.Orders.OrderDetails].[Quantity], [c.Orders.OrderDetails].[UnitPrice] FROM [Order Details] AS [c.Orders.OrderDetails] INNER JOIN ( SELECT DISTINCT [c.Orders0].[OrderID], [t0].[CustomerID] FROM [Orders] AS [c.Orders0] INNER JOIN ( SELECT [c1].[CustomerID] FROM [Customers] AS [c1] ) AS [t0] ON [c.Orders0].[CustomerID] = [t0].[CustomerID] ) AS [t1] ON [c.Orders.OrderDetails].[OrderID] = [t1].[OrderID] ORDER BY [t1].[CustomerID], [t1].[OrderID]"); } public override void Include_collection_when_projection(bool useString) { base.Include_collection_when_projection(useString); AssertSql( @"SELECT [c].[CustomerID] FROM [Customers] AS [c]"); } public override void Include_collection_on_join_clause_with_filter(bool useString) { base.Include_collection_on_join_clause_with_filter(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] INNER JOIN [Orders] AS [o] ON [c].[CustomerID] = [o].[CustomerID] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT DISTINCT [c0].[CustomerID] FROM [Customers] AS [c0] INNER JOIN [Orders] AS [o0] ON [c0].[CustomerID] = [o0].[CustomerID] WHERE [c0].[CustomerID] = N'ALFKI' ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_on_additional_from_clause_with_filter(bool useString) { base.Include_collection_on_additional_from_clause_with_filter(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c1] CROSS JOIN [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT DISTINCT [c0].[CustomerID] FROM [Customers] AS [c10] CROSS JOIN [Customers] AS [c0] WHERE [c0].[CustomerID] = N'ALFKI' ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_on_additional_from_clause(bool useString) { base.Include_collection_on_additional_from_clause(useString); AssertSql( @"@__p_0='5' SELECT [c2].[CustomerID], [c2].[Address], [c2].[City], [c2].[CompanyName], [c2].[ContactName], [c2].[ContactTitle], [c2].[Country], [c2].[Fax], [c2].[Phone], [c2].[PostalCode], [c2].[Region] FROM ( SELECT TOP(@__p_0) [c].* FROM [Customers] AS [c] ORDER BY [c].[CustomerID] ) AS [t] CROSS JOIN [Customers] AS [c2] ORDER BY [c2].[CustomerID]", // @"@__p_0='5' SELECT [c2.Orders].[OrderID], [c2.Orders].[CustomerID], [c2.Orders].[EmployeeID], [c2.Orders].[OrderDate] FROM [Orders] AS [c2.Orders] INNER JOIN ( SELECT DISTINCT [c20].[CustomerID] FROM ( SELECT TOP(@__p_0) [c0].* FROM [Customers] AS [c0] ORDER BY [c0].[CustomerID] ) AS [t0] CROSS JOIN [Customers] AS [c20] ) AS [t1] ON [c2.Orders].[CustomerID] = [t1].[CustomerID] ORDER BY [t1].[CustomerID]"); } public override void Include_duplicate_collection(bool useString) { base.Include_duplicate_collection(useString); if (SupportsOffset) { AssertSql( @"@__p_0='2' SELECT [t].[CustomerID], [t].[Address], [t].[City], [t].[CompanyName], [t].[ContactName], [t].[ContactTitle], [t].[Country], [t].[Fax], [t].[Phone], [t].[PostalCode], [t].[Region], [t0].[CustomerID], [t0].[Address], [t0].[City], [t0].[CompanyName], [t0].[ContactName], [t0].[ContactTitle], [t0].[Country], [t0].[Fax], [t0].[Phone], [t0].[PostalCode], [t0].[Region] FROM ( SELECT TOP(@__p_0) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID] ) AS [t] CROSS JOIN ( SELECT [c0].[CustomerID], [c0].[Address], [c0].[City], [c0].[CompanyName], [c0].[ContactName], [c0].[ContactTitle], [c0].[Country], [c0].[Fax], [c0].[Phone], [c0].[PostalCode], [c0].[Region] FROM [Customers] AS [c0] ORDER BY [c0].[CustomerID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t0] ORDER BY [t].[CustomerID], [t0].[CustomerID]", // @"@__p_0='2' SELECT [c1.Orders].[OrderID], [c1.Orders].[CustomerID], [c1.Orders].[EmployeeID], [c1.Orders].[OrderDate] FROM [Orders] AS [c1.Orders] INNER JOIN ( SELECT DISTINCT [t1].[CustomerID] FROM ( SELECT TOP(@__p_0) [c1].* FROM [Customers] AS [c1] ORDER BY [c1].[CustomerID] ) AS [t1] CROSS JOIN ( SELECT [c2].* FROM [Customers] AS [c2] ORDER BY [c2].[CustomerID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t2] ) AS [t3] ON [c1.Orders].[CustomerID] = [t3].[CustomerID] ORDER BY [t3].[CustomerID]", // @"@__p_0='2' SELECT [c2.Orders].[OrderID], [c2.Orders].[CustomerID], [c2.Orders].[EmployeeID], [c2.Orders].[OrderDate] FROM [Orders] AS [c2.Orders] INNER JOIN ( SELECT DISTINCT [t5].[CustomerID], [t4].[CustomerID] AS [CustomerID0] FROM ( SELECT TOP(@__p_0) [c3].* FROM [Customers] AS [c3] ORDER BY [c3].[CustomerID] ) AS [t4] CROSS JOIN ( SELECT [c4].* FROM [Customers] AS [c4] ORDER BY [c4].[CustomerID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t5] ) AS [t6] ON [c2.Orders].[CustomerID] = [t6].[CustomerID] ORDER BY [t6].[CustomerID0], [t6].[CustomerID]"); } } public override void Include_duplicate_collection_result_operator(bool useString) { base.Include_duplicate_collection_result_operator(useString); if (SupportsOffset) { AssertSql( @"@__p_1='1' @__p_0='2' SELECT TOP(@__p_1) [t].[CustomerID], [t].[Address], [t].[City], [t].[CompanyName], [t].[ContactName], [t].[ContactTitle], [t].[Country], [t].[Fax], [t].[Phone], [t].[PostalCode], [t].[Region], [t0].[CustomerID], [t0].[Address], [t0].[City], [t0].[CompanyName], [t0].[ContactName], [t0].[ContactTitle], [t0].[Country], [t0].[Fax], [t0].[Phone], [t0].[PostalCode], [t0].[Region] FROM ( SELECT TOP(@__p_0) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID] ) AS [t] CROSS JOIN ( SELECT [c0].[CustomerID], [c0].[Address], [c0].[City], [c0].[CompanyName], [c0].[ContactName], [c0].[ContactTitle], [c0].[Country], [c0].[Fax], [c0].[Phone], [c0].[PostalCode], [c0].[Region] FROM [Customers] AS [c0] ORDER BY [c0].[CustomerID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t0] ORDER BY [t].[CustomerID], [t0].[CustomerID]", // @"@__p_1='1' @__p_0='2' SELECT [c1.Orders].[OrderID], [c1.Orders].[CustomerID], [c1.Orders].[EmployeeID], [c1.Orders].[OrderDate] FROM [Orders] AS [c1.Orders] INNER JOIN ( SELECT DISTINCT [t3].* FROM ( SELECT TOP(@__p_1) [t1].[CustomerID] FROM ( SELECT TOP(@__p_0) [c1].* FROM [Customers] AS [c1] ORDER BY [c1].[CustomerID] ) AS [t1] CROSS JOIN ( SELECT [c2].* FROM [Customers] AS [c2] ORDER BY [c2].[CustomerID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t2] ORDER BY [t1].[CustomerID] ) AS [t3] ) AS [t4] ON [c1.Orders].[CustomerID] = [t4].[CustomerID] ORDER BY [t4].[CustomerID]", // @"@__p_1='1' @__p_0='2' SELECT [c2.Orders].[OrderID], [c2.Orders].[CustomerID], [c2.Orders].[EmployeeID], [c2.Orders].[OrderDate] FROM [Orders] AS [c2.Orders] INNER JOIN ( SELECT DISTINCT [t7].* FROM ( SELECT TOP(@__p_1) [t6].[CustomerID], [t5].[CustomerID] AS [CustomerID0] FROM ( SELECT TOP(@__p_0) [c3].* FROM [Customers] AS [c3] ORDER BY [c3].[CustomerID] ) AS [t5] CROSS JOIN ( SELECT [c4].* FROM [Customers] AS [c4] ORDER BY [c4].[CustomerID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t6] ORDER BY [t5].[CustomerID], [t6].[CustomerID] ) AS [t7] ) AS [t8] ON [c2.Orders].[CustomerID] = [t8].[CustomerID] ORDER BY [t8].[CustomerID0], [t8].[CustomerID]"); } } public override void Include_collection_on_join_clause_with_order_by_and_filter(bool useString) { base.Include_collection_on_join_clause_with_order_by_and_filter(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] INNER JOIN [Orders] AS [o] ON [c].[CustomerID] = [o].[CustomerID] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[City], [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT DISTINCT [c0].[CustomerID], [c0].[City] FROM [Customers] AS [c0] INNER JOIN [Orders] AS [o0] ON [c0].[CustomerID] = [o0].[CustomerID] WHERE [c0].[CustomerID] = N'ALFKI' ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[City], [t].[CustomerID]"); } public override void Include_collection_when_groupby(bool useString) { base.Include_collection_when_groupby(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[City], [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID], [c0].[City] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] = N'ALFKI' ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[City], [t].[CustomerID]"); } public override void Include_collection_on_additional_from_clause2(bool useString) { base.Include_collection_on_additional_from_clause2(useString); AssertSql( @"@__p_0='5' SELECT [t].[CustomerID], [t].[Address], [t].[City], [t].[CompanyName], [t].[ContactName], [t].[ContactTitle], [t].[Country], [t].[Fax], [t].[Phone], [t].[PostalCode], [t].[Region] FROM ( SELECT TOP(@__p_0) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID] ) AS [t] CROSS JOIN [Customers] AS [c2]"); } public override void Include_where_skip_take_projection(bool useString) { base.Include_where_skip_take_projection(useString); if (SupportsOffset) { AssertSql( @"@__p_0='1' @__p_1='2' SELECT [od.Order].[CustomerID] FROM [Order Details] AS [od] INNER JOIN [Orders] AS [od.Order] ON [od].[OrderID] = [od.Order].[OrderID] WHERE [od].[Quantity] = CAST(10 AS smallint) ORDER BY [od].[OrderID], [od].[ProductID] OFFSET @__p_0 ROWS FETCH NEXT @__p_1 ROWS ONLY"); } } public override void Include_duplicate_collection_result_operator2(bool useString) { base.Include_duplicate_collection_result_operator2(useString); if (SupportsOffset) { AssertSql( @"@__p_1='1' @__p_0='2' SELECT TOP(@__p_1) [t].[CustomerID], [t].[Address], [t].[City], [t].[CompanyName], [t].[ContactName], [t].[ContactTitle], [t].[Country], [t].[Fax], [t].[Phone], [t].[PostalCode], [t].[Region], [t0].[CustomerID], [t0].[Address], [t0].[City], [t0].[CompanyName], [t0].[ContactName], [t0].[ContactTitle], [t0].[Country], [t0].[Fax], [t0].[Phone], [t0].[PostalCode], [t0].[Region] FROM ( SELECT TOP(@__p_0) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID] ) AS [t] CROSS JOIN ( SELECT [c0].[CustomerID], [c0].[Address], [c0].[City], [c0].[CompanyName], [c0].[ContactName], [c0].[ContactTitle], [c0].[Country], [c0].[Fax], [c0].[Phone], [c0].[PostalCode], [c0].[Region] FROM [Customers] AS [c0] ORDER BY [c0].[CustomerID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t0] ORDER BY [t].[CustomerID]", // @"@__p_1='1' @__p_0='2' SELECT [c1.Orders].[OrderID], [c1.Orders].[CustomerID], [c1.Orders].[EmployeeID], [c1.Orders].[OrderDate] FROM [Orders] AS [c1.Orders] INNER JOIN ( SELECT DISTINCT [t3].* FROM ( SELECT TOP(@__p_1) [t1].[CustomerID] FROM ( SELECT TOP(@__p_0) [c1].* FROM [Customers] AS [c1] ORDER BY [c1].[CustomerID] ) AS [t1] CROSS JOIN ( SELECT [c2].* FROM [Customers] AS [c2] ORDER BY [c2].[CustomerID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t2] ORDER BY [t1].[CustomerID] ) AS [t3] ) AS [t4] ON [c1.Orders].[CustomerID] = [t4].[CustomerID] ORDER BY [t4].[CustomerID]"); } } public override void Include_multiple_references(bool useString) { base.Include_multiple_references(useString); AssertSql( @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice], [o.Product].[ProductID], [o.Product].[Discontinued], [o.Product].[ProductName], [o.Product].[SupplierID], [o.Product].[UnitPrice], [o.Product].[UnitsInStock], [o.Order].[OrderID], [o.Order].[CustomerID], [o.Order].[EmployeeID], [o.Order].[OrderDate] FROM [Order Details] AS [o] INNER JOIN [Products] AS [o.Product] ON [o].[ProductID] = [o.Product].[ProductID] INNER JOIN [Orders] AS [o.Order] ON [o].[OrderID] = [o.Order].[OrderID]"); } public override void Include_reference_alias_generation(bool useString) { base.Include_reference_alias_generation(useString); AssertSql( @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice], [o.Order].[OrderID], [o.Order].[CustomerID], [o.Order].[EmployeeID], [o.Order].[OrderDate] FROM [Order Details] AS [o] INNER JOIN [Orders] AS [o.Order] ON [o].[OrderID] = [o.Order].[OrderID]"); } public override void Include_duplicate_reference(bool useString) { base.Include_duplicate_reference(useString); if (SupportsOffset) { AssertSql( @"@__p_0='2' SELECT [t].[OrderID], [t].[CustomerID], [t].[EmployeeID], [t].[OrderDate], [o1.Customer].[CustomerID], [o1.Customer].[Address], [o1.Customer].[City], [o1.Customer].[CompanyName], [o1.Customer].[ContactName], [o1.Customer].[ContactTitle], [o1.Customer].[Country], [o1.Customer].[Fax], [o1.Customer].[Phone], [o1.Customer].[PostalCode], [o1.Customer].[Region], [t0].[OrderID], [t0].[CustomerID], [t0].[EmployeeID], [t0].[OrderDate], [o2.Customer].[CustomerID], [o2.Customer].[Address], [o2.Customer].[City], [o2.Customer].[CompanyName], [o2.Customer].[ContactName], [o2.Customer].[ContactTitle], [o2.Customer].[Country], [o2.Customer].[Fax], [o2.Customer].[Phone], [o2.Customer].[PostalCode], [o2.Customer].[Region] FROM ( SELECT TOP(@__p_0) [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] ORDER BY [o].[CustomerID] ) AS [t] LEFT JOIN [Customers] AS [o1.Customer] ON [t].[CustomerID] = [o1.Customer].[CustomerID] CROSS JOIN ( SELECT [o0].[OrderID], [o0].[CustomerID], [o0].[EmployeeID], [o0].[OrderDate] FROM [Orders] AS [o0] ORDER BY [o0].[CustomerID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t0] LEFT JOIN [Customers] AS [o2.Customer] ON [t0].[CustomerID] = [o2.Customer].[CustomerID]"); } } public override void Include_duplicate_reference2(bool useString) { base.Include_duplicate_reference2(useString); if (SupportsOffset) { AssertSql( @"@__p_0='2' SELECT [t].[OrderID], [t].[CustomerID], [t].[EmployeeID], [t].[OrderDate], [o1.Customer].[CustomerID], [o1.Customer].[Address], [o1.Customer].[City], [o1.Customer].[CompanyName], [o1.Customer].[ContactName], [o1.Customer].[ContactTitle], [o1.Customer].[Country], [o1.Customer].[Fax], [o1.Customer].[Phone], [o1.Customer].[PostalCode], [o1.Customer].[Region], [t0].[OrderID], [t0].[CustomerID], [t0].[EmployeeID], [t0].[OrderDate] FROM ( SELECT TOP(@__p_0) [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] ORDER BY [o].[OrderID] ) AS [t] LEFT JOIN [Customers] AS [o1.Customer] ON [t].[CustomerID] = [o1.Customer].[CustomerID] CROSS JOIN ( SELECT [o0].[OrderID], [o0].[CustomerID], [o0].[EmployeeID], [o0].[OrderDate] FROM [Orders] AS [o0] ORDER BY [o0].[OrderID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t0]"); } } public override void Include_duplicate_reference3(bool useString) { base.Include_duplicate_reference3(useString); if (SupportsOffset) { AssertSql( @"@__p_0='2' SELECT [t].[OrderID], [t].[CustomerID], [t].[EmployeeID], [t].[OrderDate], [t0].[OrderID], [t0].[CustomerID], [t0].[EmployeeID], [t0].[OrderDate], [o2.Customer].[CustomerID], [o2.Customer].[Address], [o2.Customer].[City], [o2.Customer].[CompanyName], [o2.Customer].[ContactName], [o2.Customer].[ContactTitle], [o2.Customer].[Country], [o2.Customer].[Fax], [o2.Customer].[Phone], [o2.Customer].[PostalCode], [o2.Customer].[Region] FROM ( SELECT TOP(@__p_0) [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] ORDER BY [o].[OrderID] ) AS [t] CROSS JOIN ( SELECT [o0].[OrderID], [o0].[CustomerID], [o0].[EmployeeID], [o0].[OrderDate] FROM [Orders] AS [o0] ORDER BY [o0].[OrderID] OFFSET 2 ROWS FETCH NEXT 2 ROWS ONLY ) AS [t0] LEFT JOIN [Customers] AS [o2.Customer] ON [t0].[CustomerID] = [o2.Customer].[CustomerID]"); } } public override void Include_reference_when_projection(bool useString) { base.Include_reference_when_projection(useString); AssertSql( @"SELECT [o].[CustomerID] FROM [Orders] AS [o]"); } public override void Include_reference_with_filter_reordered(bool useString) { base.Include_reference_with_filter_reordered(useString); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate], [o.Customer].[CustomerID], [o.Customer].[Address], [o.Customer].[City], [o.Customer].[CompanyName], [o.Customer].[ContactName], [o.Customer].[ContactTitle], [o.Customer].[Country], [o.Customer].[Fax], [o.Customer].[Phone], [o.Customer].[PostalCode], [o.Customer].[Region] FROM [Orders] AS [o] LEFT JOIN [Customers] AS [o.Customer] ON [o].[CustomerID] = [o.Customer].[CustomerID] WHERE [o].[CustomerID] = N'ALFKI'"); } public override void Include_reference_with_filter(bool useString) { base.Include_reference_with_filter(useString); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate], [o.Customer].[CustomerID], [o.Customer].[Address], [o.Customer].[City], [o.Customer].[CompanyName], [o.Customer].[ContactName], [o.Customer].[ContactTitle], [o.Customer].[Country], [o.Customer].[Fax], [o.Customer].[Phone], [o.Customer].[PostalCode], [o.Customer].[Region] FROM [Orders] AS [o] LEFT JOIN [Customers] AS [o.Customer] ON [o].[CustomerID] = [o.Customer].[CustomerID] WHERE [o].[CustomerID] = N'ALFKI'"); } public override void Include_collection_dependent_already_tracked_as_no_tracking(bool useString) { base.Include_collection_dependent_already_tracked_as_no_tracking(useString); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE [o].[CustomerID] = N'ALFKI'", // @"SELECT TOP(2) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] = N'ALFKI' ORDER BY [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_collection_dependent_already_tracked(bool useString) { base.Include_collection_dependent_already_tracked(useString); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE [o].[CustomerID] = N'ALFKI'", // @"SELECT TOP(2) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI' ORDER BY [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] = N'ALFKI' ORDER BY [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_reference_dependent_already_tracked(bool useString) { base.Include_reference_dependent_already_tracked(useString); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE [o].[CustomerID] = N'ALFKI'", // @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate], [o.Customer].[CustomerID], [o.Customer].[Address], [o.Customer].[City], [o.Customer].[CompanyName], [o.Customer].[ContactName], [o.Customer].[ContactTitle], [o.Customer].[Country], [o.Customer].[Fax], [o.Customer].[Phone], [o.Customer].[PostalCode], [o.Customer].[Region] FROM [Orders] AS [o] LEFT JOIN [Customers] AS [o.Customer] ON [o].[CustomerID] = [o.Customer].[CustomerID]"); } public override void Include_reference_as_no_tracking(bool useString) { base.Include_reference_as_no_tracking(useString); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate], [o.Customer].[CustomerID], [o.Customer].[Address], [o.Customer].[City], [o.Customer].[CompanyName], [o.Customer].[ContactName], [o.Customer].[ContactTitle], [o.Customer].[Country], [o.Customer].[Fax], [o.Customer].[Phone], [o.Customer].[PostalCode], [o.Customer].[Region] FROM [Orders] AS [o] LEFT JOIN [Customers] AS [o.Customer] ON [o].[CustomerID] = [o.Customer].[CustomerID]"); } public override void Include_collection_as_no_tracking2(bool useString) { base.Include_collection_as_no_tracking2(useString); AssertSql( @"@__p_0='5' SELECT TOP(@__p_0) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]", // @"@__p_0='5' SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(@__p_0) [c0].[CustomerID] FROM [Customers] AS [c0] ORDER BY [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[CustomerID]"); } public override void Include_with_complex_projection(bool useString) { base.Include_with_complex_projection(useString); AssertSql( @"SELECT [o].[CustomerID] AS [Id] FROM [Orders] AS [o]"); } public override void Include_with_take(bool useString) { base.Include_with_take(useString); AssertSql( @"@__p_0='10' SELECT TOP(@__p_0) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[City] DESC, [c].[CustomerID]", // @"@__p_0='10' SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(@__p_0) [c0].[CustomerID], [c0].[City] FROM [Customers] AS [c0] ORDER BY [c0].[City] DESC, [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[City] DESC, [t].[CustomerID]"); } public override void Include_with_skip(bool useString) { base.Include_with_skip(useString); if (SupportsOffset) { AssertSql( @"@__p_0='80' SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[ContactName], [c].[CustomerID] OFFSET @__p_0 ROWS", // @"@__p_0='80' SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID], [c0].[ContactName] FROM [Customers] AS [c0] ORDER BY [c0].[ContactName], [c0].[CustomerID] OFFSET @__p_0 ROWS ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[ContactName], [t].[CustomerID]"); } } [Theory(Skip = "SQLCE limitation")] public override void Then_include_collection_order_by_collection_column(bool useString) { base.Then_include_collection_order_by_collection_column(useString); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] LIKE N'W' + N'%' AND (LEFT([c].[CustomerID], LEN(N'W')) = N'W') ORDER BY ( SELECT TOP(1) [oo].[OrderDate] FROM [Orders] AS [oo] WHERE [c].[CustomerID] = [oo].[CustomerID] ORDER BY [oo].[OrderDate] DESC ) DESC, [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT TOP(1) [c0].[CustomerID], ( SELECT TOP(1) [oo1].[OrderDate] FROM [Orders] AS [oo1] WHERE [c0].[CustomerID] = [oo1].[CustomerID] ORDER BY [oo1].[OrderDate] DESC ) AS [c] FROM [Customers] AS [c0] WHERE [c0].[CustomerID] LIKE N'W' + N'%' AND (LEFT([c0].[CustomerID], LEN(N'W')) = N'W') ORDER BY ( SELECT TOP(1) [oo0].[OrderDate] FROM [Orders] AS [oo0] WHERE [c0].[CustomerID] = [oo0].[CustomerID] ORDER BY [oo0].[OrderDate] DESC ) DESC, [c0].[CustomerID] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[c] DESC, [t].[CustomerID], [c.Orders].[OrderID]", // @"SELECT [c.Orders.OrderDetails].[OrderID], [c.Orders.OrderDetails].[ProductID], [c.Orders.OrderDetails].[Discount], [c.Orders.OrderDetails].[Quantity], [c.Orders.OrderDetails].[UnitPrice] FROM [Order Details] AS [c.Orders.OrderDetails] INNER JOIN ( SELECT DISTINCT [c.Orders0].[OrderID], [t0].[c], [t0].[CustomerID] FROM [Orders] AS [c.Orders0] INNER JOIN ( SELECT TOP(1) [c1].[CustomerID], ( SELECT TOP(1) [oo4].[OrderDate] FROM [Orders] AS [oo4] WHERE [c1].[CustomerID] = [oo4].[CustomerID] ORDER BY [oo4].[OrderDate] DESC ) AS [c] FROM [Customers] AS [c1] WHERE [c1].[CustomerID] LIKE N'W' + N'%' AND (LEFT([c1].[CustomerID], LEN(N'W')) = N'W') ORDER BY ( SELECT TOP(1) [oo3].[OrderDate] FROM [Orders] AS [oo3] WHERE [c1].[CustomerID] = [oo3].[CustomerID] ORDER BY [oo3].[OrderDate] DESC ) DESC, [c1].[CustomerID] ) AS [t0] ON [c.Orders0].[CustomerID] = [t0].[CustomerID] ) AS [t1] ON [c.Orders.OrderDetails].[OrderID] = [t1].[OrderID] ORDER BY [t1].[c] DESC, [t1].[CustomerID], [t1].[OrderID]"); } public override void Include_collection_with_conditional_order_by(bool useString) { base.Include_collection_with_conditional_order_by(useString); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY CASE WHEN [c].[CustomerID] LIKE N'S' + N'%' AND (SUBSTRING([c].[CustomerID], 1, LEN(N'S')) = N'S') THEN 1 ELSE 2 END, [c].[CustomerID]", // @"SELECT [c.Orders].[OrderID], [c.Orders].[CustomerID], [c.Orders].[EmployeeID], [c.Orders].[OrderDate] FROM [Orders] AS [c.Orders] INNER JOIN ( SELECT [c0].[CustomerID], CASE WHEN [c0].[CustomerID] LIKE N'S' + N'%' AND (SUBSTRING([c0].[CustomerID], 1, LEN(N'S')) = N'S') THEN 1 ELSE 2 END AS [c] FROM [Customers] AS [c0] ) AS [t] ON [c.Orders].[CustomerID] = [t].[CustomerID] ORDER BY [t].[c], [t].[CustomerID]"); } private void AssertSql(params string[] expected) { //string[] expectedFixed = new string[expected.Length]; //int i = 0; //foreach (var item in expected) //{ // expectedFixed[i++] = item.Replace("\r\n", "\n"); //} Fixture.TestSqlLoggerFactory.AssertBaseline(expected); } protected override void ClearLog() => Fixture.TestSqlLoggerFactory.Clear(); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** Class: SortedList ** ** Purpose: Represents a collection of key/value pairs ** that are sorted by the keys and are accessible ** by key and by index. ** ===========================================================*/ using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; namespace System.Collections { // The SortedList class implements a sorted list of keys and values. Entries in // a sorted list are sorted by their keys and are accessible both by key and by // index. The keys of a sorted list can be ordered either according to a // specific IComparer implementation given when the sorted list is // instantiated, or according to the IComparable implementation provided // by the keys themselves. In either case, a sorted list does not allow entries // with duplicate keys. // // A sorted list internally maintains two arrays that store the keys and // values of the entries. The capacity of a sorted list is the allocated // length of these internal arrays. As elements are added to a sorted list, the // capacity of the sorted list is automatically increased as required by // reallocating the internal arrays. The capacity is never automatically // decreased, but users can call either TrimToSize or // Capacity explicitly. // // The GetKeyList and GetValueList methods of a sorted list // provides access to the keys and values of the sorted list in the form of // List implementations. The List objects returned by these // methods are aliases for the underlying sorted list, so modifications // made to those lists are directly reflected in the sorted list, and vice // versa. // // The SortedList class provides a convenient way to create a sorted // copy of another dictionary, such as a Hashtable. For example: // // Hashtable h = new Hashtable(); // h.Add(...); // h.Add(...); // ... // SortedList s = new SortedList(h); // // The last line above creates a sorted list that contains a copy of the keys // and values stored in the hashtable. In this particular example, the keys // will be ordered according to the IComparable interface, which they // all must implement. To impose a different ordering, SortedList also // has a constructor that allows a specific IComparer implementation to // be specified. // [DebuggerTypeProxy(typeof(System.Collections.SortedList.SortedListDebugView))] [DebuggerDisplay("Count = {Count}")] [Serializable] [System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] public class SortedList : IDictionary, ICloneable { private object[] keys; // Do not rename (binary serialization) private object[] values; // Do not rename (binary serialization) private int _size; // Do not rename (binary serialization) private int version; // Do not rename (binary serialization) private IComparer comparer; // Do not rename (binary serialization) private KeyList keyList; // Do not rename (binary serialization) private ValueList valueList; // Do not rename (binary serialization) [NonSerialized] private object _syncRoot; private const int _defaultCapacity = 16; // Copy of Array.MaxArrayLength internal const int MaxArrayLength = 0X7FEFFFFF; // Constructs a new sorted list. The sorted list is initially empty and has // a capacity of zero. Upon adding the first element to the sorted list the // capacity is increased to 16, and then increased in multiples of two as // required. The elements of the sorted list are ordered according to the // IComparable interface, which must be implemented by the keys of // all entries added to the sorted list. public SortedList() { Init(); } private void Init() { keys = Array.Empty<Object>(); values = Array.Empty<Object>(); _size = 0; comparer = new Comparer(CultureInfo.CurrentCulture); } // Constructs a new sorted list. The sorted list is initially empty and has // a capacity of zero. Upon adding the first element to the sorted list the // capacity is increased to 16, and then increased in multiples of two as // required. The elements of the sorted list are ordered according to the // IComparable interface, which must be implemented by the keys of // all entries added to the sorted list. // public SortedList(int initialCapacity) { if (initialCapacity < 0) throw new ArgumentOutOfRangeException(nameof(initialCapacity), SR.ArgumentOutOfRange_NeedNonNegNum); keys = new object[initialCapacity]; values = new object[initialCapacity]; comparer = new Comparer(CultureInfo.CurrentCulture); } // Constructs a new sorted list with a given IComparer // implementation. The sorted list is initially empty and has a capacity of // zero. Upon adding the first element to the sorted list the capacity is // increased to 16, and then increased in multiples of two as required. The // elements of the sorted list are ordered according to the given // IComparer implementation. If comparer is null, the // elements are compared to each other using the IComparable // interface, which in that case must be implemented by the keys of all // entries added to the sorted list. // public SortedList(IComparer comparer) : this() { if (comparer != null) this.comparer = comparer; } // Constructs a new sorted list with a given IComparer // implementation and a given initial capacity. The sorted list is // initially empty, but will have room for the given number of elements // before any reallocations are required. The elements of the sorted list // are ordered according to the given IComparer implementation. If // comparer is null, the elements are compared to each other using // the IComparable interface, which in that case must be implemented // by the keys of all entries added to the sorted list. // public SortedList(IComparer comparer, int capacity) : this(comparer) { Capacity = capacity; } // Constructs a new sorted list containing a copy of the entries in the // given dictionary. The elements of the sorted list are ordered according // to the IComparable interface, which must be implemented by the // keys of all entries in the given dictionary as well as keys // subsequently added to the sorted list. // public SortedList(IDictionary d) : this(d, null) { } // Constructs a new sorted list containing a copy of the entries in the // given dictionary. The elements of the sorted list are ordered according // to the given IComparer implementation. If comparer is // null, the elements are compared to each other using the // IComparable interface, which in that case must be implemented // by the keys of all entries in the given dictionary as well as keys // subsequently added to the sorted list. // public SortedList(IDictionary d, IComparer comparer) : this(comparer, (d != null ? d.Count : 0)) { if (d == null) throw new ArgumentNullException(nameof(d), SR.ArgumentNull_Dictionary); d.Keys.CopyTo(keys, 0); d.Values.CopyTo(values, 0); // Array.Sort(Array keys, Array values, IComparer comparer) does not exist in System.Runtime contract v4.0.10.0. // This works around that by sorting only on the keys and then assigning values accordingly. Array.Sort(keys, comparer); for (int i = 0; i < keys.Length; i++) { values[i] = d[keys[i]]; } _size = d.Count; } // Adds an entry with the given key and value to this sorted list. An // ArgumentException is thrown if the key is already present in the sorted list. // public virtual void Add(object key, object value) { if (key == null) throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); int i = Array.BinarySearch(keys, 0, _size, key, comparer); if (i >= 0) throw new ArgumentException(SR.Format(SR.Argument_AddingDuplicate_OldAndNewKeys, GetKey(i), key)); Insert(~i, key, value); } // Returns the capacity of this sorted list. The capacity of a sorted list // represents the allocated length of the internal arrays used to store the // keys and values of the list, and thus also indicates the maximum number // of entries the list can contain before a reallocation of the internal // arrays is required. // public virtual int Capacity { get { return keys.Length; } set { if (value < Count) { throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_SmallCapacity); } if (value != keys.Length) { if (value > 0) { object[] newKeys = new object[value]; object[] newValues = new object[value]; if (_size > 0) { Array.Copy(keys, 0, newKeys, 0, _size); Array.Copy(values, 0, newValues, 0, _size); } keys = newKeys; values = newValues; } else { // size can only be zero here. Debug.Assert(_size == 0, "Size is not zero"); keys = Array.Empty<Object>(); values = Array.Empty<Object>(); } } } } // Returns the number of entries in this sorted list. // public virtual int Count { get { return _size; } } // Returns a collection representing the keys of this sorted list. This // method returns the same object as GetKeyList, but typed as an // ICollection instead of an IList. // public virtual ICollection Keys { get { return GetKeyList(); } } // Returns a collection representing the values of this sorted list. This // method returns the same object as GetValueList, but typed as an // ICollection instead of an IList. // public virtual ICollection Values { get { return GetValueList(); } } // Is this SortedList read-only? public virtual bool IsReadOnly { get { return false; } } public virtual bool IsFixedSize { get { return false; } } // Is this SortedList synchronized (thread-safe)? public virtual bool IsSynchronized { get { return false; } } // Synchronization root for this object. public virtual object SyncRoot { get { if (_syncRoot == null) { System.Threading.Interlocked.CompareExchange<Object>(ref _syncRoot, new object(), null); } return _syncRoot; } } // Removes all entries from this sorted list. public virtual void Clear() { // clear does not change the capacity version++; Array.Clear(keys, 0, _size); // Don't need to doc this but we clear the elements so that the gc can reclaim the references. Array.Clear(values, 0, _size); // Don't need to doc this but we clear the elements so that the gc can reclaim the references. _size = 0; } // Makes a virtually identical copy of this SortedList. This is a shallow // copy. IE, the Objects in the SortedList are not cloned - we copy the // references to those objects. public virtual object Clone() { SortedList sl = new SortedList(_size); Array.Copy(keys, 0, sl.keys, 0, _size); Array.Copy(values, 0, sl.values, 0, _size); sl._size = _size; sl.version = version; sl.comparer = comparer; // Don't copy keyList nor valueList. return sl; } // Checks if this sorted list contains an entry with the given key. // public virtual bool Contains(object key) { return IndexOfKey(key) >= 0; } // Checks if this sorted list contains an entry with the given key. // public virtual bool ContainsKey(object key) { // Yes, this is a SPEC'ed duplicate of Contains(). return IndexOfKey(key) >= 0; } // Checks if this sorted list contains an entry with the given value. The // values of the entries of the sorted list are compared to the given value // using the Object.Equals method. This method performs a linear // search and is substantially slower than the Contains // method. // public virtual bool ContainsValue(object value) { return IndexOfValue(value) >= 0; } // Copies the values in this SortedList to an array. public virtual void CopyTo(Array array, int arrayIndex) { if (array == null) throw new ArgumentNullException(nameof(array), SR.ArgumentNull_Array); if (array.Rank != 1) throw new ArgumentException(SR.Arg_RankMultiDimNotSupported, nameof(array)); if (arrayIndex < 0) throw new ArgumentOutOfRangeException(nameof(arrayIndex), SR.ArgumentOutOfRange_NeedNonNegNum); if (array.Length - arrayIndex < Count) throw new ArgumentException(SR.Arg_ArrayPlusOffTooSmall); for (int i = 0; i < Count; i++) { DictionaryEntry entry = new DictionaryEntry(keys[i], values[i]); array.SetValue(entry, i + arrayIndex); } } // Copies the values in this SortedList to an KeyValuePairs array. // KeyValuePairs is different from Dictionary Entry in that it has special // debugger attributes on its fields. internal virtual KeyValuePairs[] ToKeyValuePairsArray() { KeyValuePairs[] array = new KeyValuePairs[Count]; for (int i = 0; i < Count; i++) { array[i] = new KeyValuePairs(keys[i], values[i]); } return array; } // Ensures that the capacity of this sorted list is at least the given // minimum value. If the current capacity of the list is less than // min, the capacity is increased to twice the current capacity or // to min, whichever is larger. private void EnsureCapacity(int min) { int newCapacity = keys.Length == 0 ? 16 : keys.Length * 2; // Allow the list to grow to maximum possible capacity (~2G elements) before encountering overflow. // Note that this check works even when _items.Length overflowed thanks to the (uint) cast if ((uint)newCapacity > MaxArrayLength) newCapacity = MaxArrayLength; if (newCapacity < min) newCapacity = min; Capacity = newCapacity; } // Returns the value of the entry at the given index. // public virtual object GetByIndex(int index) { if (index < 0 || index >= Count) throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_Index); return values[index]; } // Returns an IEnumerator for this sorted list. If modifications // made to the sorted list while an enumeration is in progress, // the MoveNext and Remove methods // of the enumerator will throw an exception. // IEnumerator IEnumerable.GetEnumerator() { return new SortedListEnumerator(this, 0, _size, SortedListEnumerator.DictEntry); } // Returns an IDictionaryEnumerator for this sorted list. If modifications // made to the sorted list while an enumeration is in progress, // the MoveNext and Remove methods // of the enumerator will throw an exception. // public virtual IDictionaryEnumerator GetEnumerator() { return new SortedListEnumerator(this, 0, _size, SortedListEnumerator.DictEntry); } // Returns the key of the entry at the given index. // public virtual object GetKey(int index) { if (index < 0 || index >= Count) throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_Index); return keys[index]; } // Returns an IList representing the keys of this sorted list. The // returned list is an alias for the keys of this sorted list, so // modifications made to the returned list are directly reflected in the // underlying sorted list, and vice versa. The elements of the returned // list are ordered in the same way as the elements of the sorted list. The // returned list does not support adding, inserting, or modifying elements // (the Add, AddRange, Insert, InsertRange, // Reverse, Set, SetRange, and Sort methods // throw exceptions), but it does allow removal of elements (through the // Remove and RemoveRange methods or through an enumerator). // Null is an invalid key value. // public virtual IList GetKeyList() { if (keyList == null) keyList = new KeyList(this); return keyList; } // Returns an IList representing the values of this sorted list. The // returned list is an alias for the values of this sorted list, so // modifications made to the returned list are directly reflected in the // underlying sorted list, and vice versa. The elements of the returned // list are ordered in the same way as the elements of the sorted list. The // returned list does not support adding or inserting elements (the // Add, AddRange, Insert and InsertRange // methods throw exceptions), but it does allow modification and removal of // elements (through the Remove, RemoveRange, Set and // SetRange methods or through an enumerator). // public virtual IList GetValueList() { if (valueList == null) valueList = new ValueList(this); return valueList; } // Returns the value associated with the given key. If an entry with the // given key is not found, the returned value is null. // public virtual object this[object key] { get { int i = IndexOfKey(key); if (i >= 0) return values[i]; return null; } set { if (key == null) throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); int i = Array.BinarySearch(keys, 0, _size, key, comparer); if (i >= 0) { values[i] = value; version++; return; } Insert(~i, key, value); } } // Returns the index of the entry with a given key in this sorted list. The // key is located through a binary search, and thus the average execution // time of this method is proportional to Log2(size), where // size is the size of this sorted list. The returned value is -1 if // the given key does not occur in this sorted list. Null is an invalid // key value. // public virtual int IndexOfKey(object key) { if (key == null) throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); int ret = Array.BinarySearch(keys, 0, _size, key, comparer); return ret >= 0 ? ret : -1; } // Returns the index of the first occurrence of an entry with a given value // in this sorted list. The entry is located through a linear search, and // thus the average execution time of this method is proportional to the // size of this sorted list. The elements of the list are compared to the // given value using the Object.Equals method. // public virtual int IndexOfValue(object value) { return Array.IndexOf(values, value, 0, _size); } // Inserts an entry with a given key and value at a given index. private void Insert(int index, object key, object value) { if (_size == keys.Length) EnsureCapacity(_size + 1); if (index < _size) { Array.Copy(keys, index, keys, index + 1, _size - index); Array.Copy(values, index, values, index + 1, _size - index); } keys[index] = key; values[index] = value; _size++; version++; } // Removes the entry at the given index. The size of the sorted list is // decreased by one. // public virtual void RemoveAt(int index) { if (index < 0 || index >= Count) throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_Index); _size--; if (index < _size) { Array.Copy(keys, index + 1, keys, index, _size - index); Array.Copy(values, index + 1, values, index, _size - index); } keys[_size] = null; values[_size] = null; version++; } // Removes an entry from this sorted list. If an entry with the specified // key exists in the sorted list, it is removed. An ArgumentException is // thrown if the key is null. // public virtual void Remove(object key) { int i = IndexOfKey(key); if (i >= 0) RemoveAt(i); } // Sets the value at an index to a given value. The previous value of // the given entry is overwritten. // public virtual void SetByIndex(int index, object value) { if (index < 0 || index >= Count) throw new ArgumentOutOfRangeException(nameof(index), SR.ArgumentOutOfRange_Index); values[index] = value; version++; } // Returns a thread-safe SortedList. // public static SortedList Synchronized(SortedList list) { if (list == null) throw new ArgumentNullException(nameof(list)); return new SyncSortedList(list); } // Sets the capacity of this sorted list to the size of the sorted list. // This method can be used to minimize a sorted list's memory overhead once // it is known that no new elements will be added to the sorted list. To // completely clear a sorted list and release all memory referenced by the // sorted list, execute the following statements: // // sortedList.Clear(); // sortedList.TrimToSize(); // public virtual void TrimToSize() { Capacity = _size; } [Serializable] [System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] private class SyncSortedList : SortedList { private SortedList _list; // Do not rename (binary serialization) private object _root; // Do not rename (binary serialization) internal SyncSortedList(SortedList list) { _list = list; _root = list.SyncRoot; } public override int Count { get { lock (_root) { return _list.Count; } } } public override object SyncRoot { get { return _root; } } public override bool IsReadOnly { get { return _list.IsReadOnly; } } public override bool IsFixedSize { get { return _list.IsFixedSize; } } public override bool IsSynchronized { get { return true; } } public override object this[object key] { get { lock (_root) { return _list[key]; } } set { lock (_root) { _list[key] = value; } } } public override void Add(object key, object value) { lock (_root) { _list.Add(key, value); } } public override int Capacity { get { lock (_root) { return _list.Capacity; } } } public override void Clear() { lock (_root) { _list.Clear(); } } public override object Clone() { lock (_root) { return _list.Clone(); } } public override bool Contains(object key) { lock (_root) { return _list.Contains(key); } } public override bool ContainsKey(object key) { lock (_root) { return _list.ContainsKey(key); } } public override bool ContainsValue(object key) { lock (_root) { return _list.ContainsValue(key); } } public override void CopyTo(Array array, int index) { lock (_root) { _list.CopyTo(array, index); } } [SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems. public override object GetByIndex(int index) { lock (_root) { return _list.GetByIndex(index); } } public override IDictionaryEnumerator GetEnumerator() { lock (_root) { return _list.GetEnumerator(); } } [SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems. public override object GetKey(int index) { lock (_root) { return _list.GetKey(index); } } public override IList GetKeyList() { lock (_root) { return _list.GetKeyList(); } } public override IList GetValueList() { lock (_root) { return _list.GetValueList(); } } public override int IndexOfKey(object key) { if (key == null) throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); lock (_root) { return _list.IndexOfKey(key); } } [SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems. public override int IndexOfValue(object value) { lock (_root) { return _list.IndexOfValue(value); } } [SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems. public override void RemoveAt(int index) { lock (_root) { _list.RemoveAt(index); } } public override void Remove(object key) { lock (_root) { _list.Remove(key); } } [SuppressMessage("Microsoft.Contracts", "CC1055")] // Skip extra error checking to avoid *potential* AppCompat problems. public override void SetByIndex(int index, object value) { lock (_root) { _list.SetByIndex(index, value); } } internal override KeyValuePairs[] ToKeyValuePairsArray() { return _list.ToKeyValuePairsArray(); } public override void TrimToSize() { lock (_root) { _list.TrimToSize(); } } } private class SortedListEnumerator : IDictionaryEnumerator, ICloneable { private SortedList _sortedList; private object _key; private object _value; private int _index; private int _startIndex; // Store for Reset. private int _endIndex; private int _version; private bool _current; // Is the current element valid? private int _getObjectRetType; // What should GetObject return? internal const int Keys = 1; internal const int Values = 2; internal const int DictEntry = 3; internal SortedListEnumerator(SortedList sortedList, int index, int count, int getObjRetType) { _sortedList = sortedList; _index = index; _startIndex = index; _endIndex = index + count; _version = sortedList.version; _getObjectRetType = getObjRetType; _current = false; } public object Clone() => MemberwiseClone(); public virtual object Key { get { if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); if (_current == false) throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); return _key; } } public virtual bool MoveNext() { if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); if (_index < _endIndex) { _key = _sortedList.keys[_index]; _value = _sortedList.values[_index]; _index++; _current = true; return true; } _key = null; _value = null; _current = false; return false; } public virtual DictionaryEntry Entry { get { if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); if (_current == false) throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); return new DictionaryEntry(_key, _value); } } public virtual object Current { get { if (_current == false) throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); if (_getObjectRetType == Keys) return _key; else if (_getObjectRetType == Values) return _value; else return new DictionaryEntry(_key, _value); } } public virtual object Value { get { if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); if (_current == false) throw new InvalidOperationException(SR.InvalidOperation_EnumOpCantHappen); return _value; } } public virtual void Reset() { if (_version != _sortedList.version) throw new InvalidOperationException(SR.InvalidOperation_EnumFailedVersion); _index = _startIndex; _current = false; _key = null; _value = null; } } [Serializable] [System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] private class KeyList : IList { private SortedList sortedList; // Do not rename (binary serialization) internal KeyList(SortedList sortedList) { this.sortedList = sortedList; } public virtual int Count { get { return sortedList._size; } } public virtual bool IsReadOnly { get { return true; } } public virtual bool IsFixedSize { get { return true; } } public virtual bool IsSynchronized { get { return sortedList.IsSynchronized; } } public virtual object SyncRoot { get { return sortedList.SyncRoot; } } public virtual int Add(object key) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); // return 0; // suppress compiler warning } public virtual void Clear() { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public virtual bool Contains(object key) { return sortedList.Contains(key); } public virtual void CopyTo(Array array, int arrayIndex) { if (array != null && array.Rank != 1) throw new ArgumentException(SR.Arg_RankMultiDimNotSupported, nameof(array)); // defer error checking to Array.Copy Array.Copy(sortedList.keys, 0, array, arrayIndex, sortedList.Count); } public virtual void Insert(int index, object value) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public virtual object this[int index] { get { return sortedList.GetKey(index); } set { throw new NotSupportedException(SR.NotSupported_KeyCollectionSet); } } public virtual IEnumerator GetEnumerator() { return new SortedListEnumerator(sortedList, 0, sortedList.Count, SortedListEnumerator.Keys); } public virtual int IndexOf(object key) { if (key == null) throw new ArgumentNullException(nameof(key), SR.ArgumentNull_Key); int i = Array.BinarySearch(sortedList.keys, 0, sortedList.Count, key, sortedList.comparer); if (i >= 0) return i; return -1; } public virtual void Remove(object key) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public virtual void RemoveAt(int index) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } } [Serializable] [System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")] private class ValueList : IList { private SortedList sortedList; // Do not rename (binary serialization) internal ValueList(SortedList sortedList) { this.sortedList = sortedList; } public virtual int Count { get { return sortedList._size; } } public virtual bool IsReadOnly { get { return true; } } public virtual bool IsFixedSize { get { return true; } } public virtual bool IsSynchronized { get { return sortedList.IsSynchronized; } } public virtual object SyncRoot { get { return sortedList.SyncRoot; } } public virtual int Add(object key) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public virtual void Clear() { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public virtual bool Contains(object value) { return sortedList.ContainsValue(value); } public virtual void CopyTo(Array array, int arrayIndex) { if (array != null && array.Rank != 1) throw new ArgumentException(SR.Arg_RankMultiDimNotSupported, nameof(array)); // defer error checking to Array.Copy Array.Copy(sortedList.values, 0, array, arrayIndex, sortedList.Count); } public virtual void Insert(int index, object value) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public virtual object this[int index] { get { return sortedList.GetByIndex(index); } set { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } } public virtual IEnumerator GetEnumerator() { return new SortedListEnumerator(sortedList, 0, sortedList.Count, SortedListEnumerator.Values); } public virtual int IndexOf(object value) { return Array.IndexOf(sortedList.values, value, 0, sortedList.Count); } public virtual void Remove(object value) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } public virtual void RemoveAt(int index) { throw new NotSupportedException(SR.NotSupported_SortedListNestedWrite); } } // internal debug view class for sorted list internal class SortedListDebugView { private SortedList _sortedList; public SortedListDebugView(SortedList sortedList) { if (sortedList == null) { throw new ArgumentNullException(nameof(sortedList)); } _sortedList = sortedList; } [DebuggerBrowsable(DebuggerBrowsableState.RootHidden)] public KeyValuePairs[] Items { get { return _sortedList.ToKeyValuePairsArray(); } } } } }
/* Copyright (c) Citrix Systems, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, * with or without modification, are permitted provided * that the following conditions are met: * * * Redistributions of source code must retain the above * copyright notice, this list of conditions and the * following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the * following disclaimer in the documentation and/or other * materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Threading; using XenAdmin; using XenAdmin.Core; using XenAdmin.Network; using System.Diagnostics; using System.Web.Script.Serialization; namespace XenAPI { public partial class Host : IComparable<Host>, IEquatable<Host> { private static readonly log4net.ILog log = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType); public enum Edition { Free, PerSocket, //Added in Clearwater (PR-1589) XenDesktop, //Added in Clearwater (PR-1589) and is new form of "EnterpriseXD" StandardPerSocket, // Added in Creedence (standard-per-socket) Desktop, // Added in Creedence (desktop) Standard, // Added in Dundee/Violet (standard) EnterprisePerSocket, // Added in Creedence (enterprise-per-socket) EnterprisePerUser, // Added in Creedence (enterprise-per-user) DesktopPlus, // Added in Creedence (desktop-plus) DesktopCloud, // Added in Jura (desktop-cloud) Premium // Added in Indigo (premium) } public const string LicenseServerWebConsolePort = "8082"; public override string Name() { return name_label; } public static Edition GetEdition(string editionText) { switch (editionText) { case "xendesktop": return Edition.XenDesktop; case "per-socket": return Edition.PerSocket; case "enterprise-per-socket": case "premium-per-socket": return Edition.EnterprisePerSocket; case "enterprise-per-user": case "premium-per-user": return Edition.EnterprisePerUser; case "standard-per-socket": return Edition.StandardPerSocket; case "desktop": return Edition.Desktop; case "desktop-plus": return Edition.DesktopPlus; case "desktop-cloud": return Edition.DesktopCloud; case "premium": return Edition.Premium; case "standard": return Edition.Standard; case "basic": default: return Edition.Free; } } public bool CanSeeNetwork(XenAPI.Network network) { System.Diagnostics.Trace.Assert(network != null); // Special case for local networks. if (network.PIFs.Count == 0) return true; foreach (var pifRef in network.PIFs) { PIF pif = network.Connection.Resolve(pifRef); if (pif != null && pif.host != null && pif.host.opaque_ref == opaque_ref) return true; } return false; } public string GetEditionText(Edition edition) { switch (edition) { case Edition.XenDesktop: return "xendesktop"; case Edition.PerSocket: return "per-socket"; case Edition.EnterprisePerSocket: return Helpers.NaplesOrGreater(this) ? "premium-per-socket" : "enterprise-per-socket"; case Edition.EnterprisePerUser: return Helpers.NaplesOrGreater(this) ? "premium-per-user" : "enterprise-per-user"; case Edition.StandardPerSocket: return "standard-per-socket"; case Edition.Desktop: return "desktop"; case Edition.DesktopPlus: return "desktop-plus"; case Edition.DesktopCloud: return "desktop-cloud"; case Edition.Premium: return "premium"; case Edition.Standard: return "standard"; default: return Helpers.NaplesOrGreater(this) ? "express" : "free"; } } public string GetIscsiIqn() { if (Helpers.KolkataOrGreater(this)) { return iscsi_iqn; } return Get(other_config, "iscsi_iqn") ?? ""; } public void SetIscsiIqn(string value) { if (Helpers.KolkataOrGreater(this)) { iscsi_iqn = value; } else { other_config = SetDictionaryKey(other_config, "iscsi_iqn", value); } } public override string ToString() { return this.name_label; } public override string Description() { // i18n: CA-30372, CA-207273 if (name_description == "Default install of XenServer" || name_description == "Default install") return string.Format(Messages.DEFAULT_INSTALL_OF_XENSERVER, software_version.ContainsKey("product_brand") ? software_version["product_brand"] : BrandManager.ProductBrand); return name_description ?? ""; } /// <summary> /// The expiry date of this host's license in UTC. /// </summary> public virtual DateTime LicenseExpiryUTC() { if (license_params != null && license_params.ContainsKey("expiry") && Util.TryParseIso8601DateTime(license_params["expiry"], out var result)) return result; return new DateTime(2030, 1, 1); } public static bool RestrictRBAC(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_rbac"); } public static bool RestrictDMC(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_dmc"); } /// <summary> /// Added for Clearwater /// </summary> /// <param name="h"></param> /// <returns></returns> public static bool RestrictHotfixApply(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_hotfix_apply"); } /// <summary> /// Restrict Automated Updates /// </summary> /// <param name="h">host</param> /// <returns></returns> public static bool RestrictBatchHotfixApply(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_batch_hotfix_apply"); } public static bool RestrictCheckpoint(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_checkpoint"); } public static bool RestrictCifs(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_cifs"); } public static bool RestrictVendorDevice(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_pci_device_for_auto_update"); } public static bool RestrictWLB(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_wlb"); } public static bool RestrictVSwitchController(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_vswitch_controller"); } public static bool RestrictSriovNetwork(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_network_sriov"); } public static bool RestrictPooling(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_pooling"); } public static bool RestrictVMSnapshotSchedule(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_vmss"); } public static bool RestrictVMAppliances(Host h) { return false; } public static bool RestrictDR(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_dr"); } public static bool RestrictConversion(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_xcm"); } public static bool RestrictCrossPoolMigrate(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_storage_xen_motion"); } public static bool RestrictChangedBlockTracking(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_cbt"); } public virtual bool IsFreeLicense() { return edition == "free" || edition == "express"; } public virtual bool IsFreeLicenseOrExpired() { if (Connection != null && Connection.CacheIsPopulated) return IsFreeLicense() || LicenseExpiryUTC() < DateTime.UtcNow - Connection.ServerTimeOffset; return true; } public static bool RestrictHA(Host h) { return !BoolKey(h.license_params, "enable_xha"); } public static bool RestrictPoolSecretRotation(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_pool_secret_rotation"); } public static bool RestrictCertificateVerification(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_certificate_verification"); } public static bool RestrictAlerts(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_email_alerting"); } public static bool RestrictStorageChoices(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_netapp"); } public static bool RestrictPerformanceGraphs(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_historical_performance"); } public static bool RestrictCpuMasking(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_cpu_masking"); } public static bool RestrictGpu(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_gpu"); } public static bool RestrictUsbPassthrough(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_usb_passthrough"); } public static bool RestrictVgpu(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_vgpu"); } public static bool RestrictManagementOnVLAN(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_management_on_vlan"); } public static bool RestrictIntegratedGpuPassthrough(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_integrated_gpu_passthrough"); } public static bool RestrictExportResourceData(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_export_resource_data"); } public static bool RestrictIntraPoolMigrate(Host h) { return BoolKey(h.license_params, "restrict_xen_motion"); } /// <summary> /// Active directory is restricted only if the "restrict_ad" key exists and it is true /// </summary> public static bool RestrictAD(Host h) { return BoolKey(h.license_params, "restrict_ad"); } public static bool RestrictReadCaching(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_read_caching"); } public static bool RestrictHealthCheck(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_health_check"); } /// <summary> /// Vss feature is restricted only if the "restrict_vss" key exists and it is true /// </summary> public static bool RestrictVss(Host h) { return BoolKey(h.license_params, "restrict_vss"); } public static bool RestrictPoolSize(Host h) { return BoolKey(h.license_params, "restrict_pool_size"); } public static bool RestrictPvsCache(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_pvs_proxy"); } /// <summary> /// For Dundee and greater hosts: the feature is restricted only if the "restrict_ssl_legacy_switch" key exists and it is true /// For pre-Dundee hosts: the feature is restricted if the "restrict_ssl_legacy_switch" key is absent or it is true /// </summary> public static bool RestrictSslLegacySwitch(Host h) { return Helpers.DundeeOrGreater(h) ? BoolKey(h.license_params, "restrict_ssl_legacy_switch") : BoolKeyPreferTrue(h.license_params, "restrict_ssl_legacy_switch"); } public static bool RestrictLivePatching(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_live_patching"); } public static bool RestrictIGMPSnooping(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_igmp_snooping"); } public static bool RestrictVcpuHotplug(Host h) { if (Helpers.ElyOrGreater(h.Connection)) { return BoolKeyPreferTrue(h.license_params, "restrict_set_vcpus_number_live"); } // Pre-Ely hosts: // allowed on Premium edition only var hostEdition = GetEdition(h.edition); if (hostEdition == Edition.Premium) { return h.LicenseExpiryUTC() < DateTime.UtcNow - h.Connection.ServerTimeOffset; // restrict if the license has expired } return true; } /// <summary> /// The feature is restricted if the "restrict_rpu" key exists and it is true /// or if the key is absent and the host is unlicensed /// </summary> public static bool RestrictRpu(Host h) { return h.license_params.ContainsKey("restrict_rpu") ? BoolKey(h.license_params, "restrict_rpu") : h.IsFreeLicenseOrExpired(); // restrict on Free edition or if the license has expired } public static bool RestrictCorosync(Host h) { return BoolKeyPreferTrue(h.license_params, "restrict_corosync"); } #region Experimental Features public static bool CorosyncDisabled(Host h) { return RestrictCorosync(h) && FeatureDisabled(h, "corosync"); } public static bool SriovNetworkDisabled(Host h) { return RestrictSriovNetwork(h) && FeatureDisabled(h, "network_sriov"); } public static bool UefiBootDisabled(Host h) { return FeatureDisabled(h, "guefi"); } public static bool UefiSecureBootDisabled(Host h) { return FeatureDisabled(h, "guefi-secureboot"); } public static bool UefiBootExperimental(Host h) { return FeatureExperimental(h, "guefi"); } public static bool UefiSecureBootExperimental(Host h) { return FeatureExperimental(h, "guefi-secureboot"); } public static bool FeatureDisabled(Host h, string featureName) { foreach (var feature in h.Connection.ResolveAll(h.features)) { if (feature.name_label.Equals(featureName, StringComparison.OrdinalIgnoreCase)) return !feature.enabled; } return false; } public static bool FeatureExperimental(Host h, string featureName) { foreach (var feature in h.Connection.ResolveAll(h.features)) { if (feature.name_label.Equals(featureName, StringComparison.OrdinalIgnoreCase)) return feature.enabled && feature.experimental; } return false; } #endregion public bool HasPBDTo(SR sr) { foreach (XenRef<PBD> pbd in PBDs) { PBD thePBD = sr.Connection.Resolve<PBD>(pbd); if (thePBD != null && thePBD.SR.opaque_ref == sr.opaque_ref) { return true; } } return false; } public PBD GetPBDTo(SR sr) { foreach (XenRef<PBD> pbd in PBDs) { PBD thePBD = sr.Connection.Resolve<PBD>(pbd); if (thePBD != null && thePBD.SR.opaque_ref == sr.opaque_ref) { return thePBD; } } return null; } // Constants for other-config from CP-329 public const String MULTIPATH = "multipathing"; public const String MULTIPATH_HANDLE = "multipathhandle"; public const String DMP = "dmp"; public bool MultipathEnabled() { if (Helpers.KolkataOrGreater(this)) { return multipathing; } return BoolKey(other_config, MULTIPATH); } public String MultipathHandle() { return Get(other_config, MULTIPATH_HANDLE); } public override int CompareTo(Host other) { // CA-20865 Sort in the following order: // * Coordinators first // * Then connected supporters // * Then disconnected servers // Within each group, in NaturalCompare order bool thisConnected = (Connection.IsConnected && Helpers.GetCoordinator(Connection) != null); bool otherConnected = (other.Connection.IsConnected && Helpers.GetCoordinator(other.Connection) != null); if (thisConnected && !otherConnected) return -1; else if (!thisConnected && otherConnected) return 1; else if (thisConnected) { bool thisIsCoordinator = IsCoordinator(); bool otherIsCoordinator = other.IsCoordinator(); if (thisIsCoordinator && !otherIsCoordinator) return -1; else if (!thisIsCoordinator && otherIsCoordinator) return 1; } return base.CompareTo(other); } public virtual bool IsCoordinator() { Pool pool = Helpers.GetPoolOfOne(Connection); if (pool == null) return false; Host coordinator = Connection.Resolve<Host>(pool.master); return coordinator != null && coordinator.uuid == this.uuid; } /// <summary> /// Return this host's product version triplet (e.g. 5.6.100), or null if it can't be found. /// </summary> public virtual string ProductVersion() { return Get(software_version, "product_version"); } private string MarketingVersion(string field) { string s = Get(software_version, field); return string.IsNullOrEmpty(s) ? ProductVersion() : s; } /// <summary> /// Return this host's marketing version number (e.g. 5.6 Feature Pack 1), /// or ProductVersion (which can still be null) if it can't be found, including pre-Cowley hosts. /// </summary> public string ProductVersionText() { return MarketingVersion("product_version_text"); } /// <summary> /// Return this host's marketing version number in short form (e.g. 5.6 FP1), /// or ProductVersion (which can still be null) if it can't be found, including pre-Cowley hosts. /// </summary> public string ProductVersionTextShort() { return MarketingVersion("product_version_text_short"); } /// <summary> /// Return this host's XCP version triplet (e.g. 1.0.50), or null if it can't be found, /// including all pre-Tampa hosts. /// </summary> public virtual string PlatformVersion() { return Get(software_version, "platform_version"); } /// <summary> /// For legacy build numbers only (used to be integers + one char at the end) /// From Falcon, this property is not used. /// </summary> /// <remarks> /// Return the build number of this host, or -1 if none can be found. This will often be /// 0 or -1 for developer builds, so comparisons should generally treat those numbers as if /// they were brand new. /// </remarks> internal int BuildNumber() { Debug.Assert(!Helpers.ElyOrGreater(this)); string bn = BuildNumberRaw(); if (bn == null) return -1; while (bn.Length > 0 && !char.IsDigit(bn[bn.Length - 1])) { bn = bn.Substring(0, bn.Length - 1); } int result; if (int.TryParse(bn, out result)) return result; else return -1; } /// <summary> /// Return the exact build_number of this host /// </summary> /// <remarks> /// null if not found /// </remarks> public virtual string BuildNumberRaw() { return Get(software_version, "build_number"); } /// <summary> /// Return this host's product version and build number (e.g. 5.6.100.72258), or null if product version can't be found. /// </summary> public virtual string LongProductVersion() { string productVersion = ProductVersion(); return productVersion != null ? string.Format("{0}.{1}", productVersion, Helpers.ElyOrGreater(this) ? BuildNumberRaw() : BuildNumber().ToString()) : null; } /// <summary> /// Return the product_brand of this host, or null if none can be found. /// </summary> public string ProductBrand() { return Get(software_version, "product_brand"); } /// <summary> /// The remote syslog target. May return null if not set on the server. /// </summary> public string GetSysLogDestination() { return logging != null && logging.ContainsKey("syslog_destination") ? logging["syslog_destination"] : null; } /// <summary> /// Set to null to unset /// </summary> public void SetSysLogDestination(string value) { logging = SetDictionaryKey(logging, "syslog_destination", value); } public static bool IsFullyPatched(Host host,IEnumerable<IXenConnection> connections) { List<Pool_patch> patches = Pool_patch.GetAllThatApply(host,connections); List<Pool_patch> appliedPatches = host.AppliedPatches(); if (appliedPatches.Count == patches.Count) return true; foreach (Pool_patch patch in patches) { Pool_patch patch1 = patch; if (!appliedPatches.Exists(otherPatch => string.Equals(patch1.uuid, otherPatch.uuid, StringComparison.OrdinalIgnoreCase))) return false; } return true; } public virtual List<Pool_patch> AppliedPatches() { List<Pool_patch> patches = new List<Pool_patch>(); foreach (Host_patch hostPatch in Connection.ResolveAll(this.patches)) { Pool_patch patch = Connection.Resolve(hostPatch.pool_patch); if (patch != null) patches.Add(patch); } return patches; } public virtual List<Pool_update> AppliedUpdates() { var updates = new List<Pool_update>(); foreach (var hostUpdate in Connection.ResolveAll(this.updates)) { if (hostUpdate != null) updates.Add(hostUpdate); } return updates; } public string XAPI_version() { return Get(software_version, "xapi"); } public bool LinuxPackPresent() { return software_version.ContainsKey("xs:linux"); } public bool HasCrashDumps() { return crashdumps != null && crashdumps.Count > 0; } public bool IsLive() { if (Connection == null) return false; Host_metrics hm = Connection.Resolve(metrics); return hm != null && hm.live; } public const string MAINTENANCE_MODE = "MAINTENANCE_MODE"; public bool MaintenanceMode() { return BoolKey(other_config, MAINTENANCE_MODE); } private const string BOOT_TIME = "boot_time"; public double BootTime() { if (other_config == null) return 0.0; if (!other_config.ContainsKey(BOOT_TIME)) return 0.0; double bootTime; if (!double.TryParse(other_config[BOOT_TIME], NumberStyles.Number, CultureInfo.InvariantCulture, out bootTime)) return 0.0; return bootTime; } public static double BootTime(Session session, string hostOpaqueRef) { var host = get_record(session, hostOpaqueRef); return host.BootTime(); } public PrettyTimeSpan Uptime() { double bootTime = BootTime(); if (bootTime == 0.0) return null; return new PrettyTimeSpan(DateTime.UtcNow - Util.FromUnixTime(bootTime) - Connection.ServerTimeOffset); } private const string AGENT_START_TIME = "agent_start_time"; public double AgentStartTime() { if (other_config == null) return 0.0; if (!other_config.ContainsKey(AGENT_START_TIME)) return 0.0; double agentStartTime; if (!double.TryParse(other_config[AGENT_START_TIME], System.Globalization.NumberStyles.Any, CultureInfo.InvariantCulture, out agentStartTime)) return 0.0; return agentStartTime; } public static double AgentStartTime(Session session, string hostOpaqueRef) { var host = get_record(session, hostOpaqueRef); return host.AgentStartTime(); } public PrettyTimeSpan AgentUptime() { double startTime = AgentStartTime(); if (startTime == 0.0) return null; return new PrettyTimeSpan(DateTime.UtcNow - Util.FromUnixTime(startTime) - Connection.ServerTimeOffset); } // Get the path counts from the Multipath Boot From SAN feature (see PR-1034 and CP-1696). // Returns true if the Host.other_config contains the multipathed and mpath-boot keys, // and the mpath-boot key is parseable. In this case, current and max will contain the result; // otherwise they will contain zero. public bool GetBootPathCounts(out int current, out int max) { current = max = 0; return (BoolKey(other_config, "multipathed") && other_config.ContainsKey("mpath-boot") && PBD.ParsePathCounts(other_config["mpath-boot"], out current, out max)); } public bool HasRunningVMs() { // 2 not 1, because the Control Domain doesn't count return resident_VMs != null && resident_VMs.Count >= 2; } public List<XenRef<VM>> GetRunningPvVMs() { var vms = from XenRef<VM> vmref in resident_VMs let vm = Connection.Resolve(vmref) where vm != null && vm.is_a_real_vm() && !vm.IsHVM() select vmref; return vms.ToList(); } public List<XenRef<VM>> GetRunningHvmVMs() { var vms = from XenRef<VM> vmref in resident_VMs let vm = Connection.Resolve(vmref) where vm != null && vm.is_a_real_vm() && vm.IsHVM() select vmref; return vms.ToList(); } public List<XenRef<VM>> GetRunningVMs() { var vms = from XenRef<VM> vmref in resident_VMs let vm = Connection.Resolve(vmref) where vm != null && vm.is_a_real_vm() select vmref; return vms.ToList(); } #region Save Evacuated VMs for later public const String MAINTENANCE_MODE_EVACUATED_VMS_MIGRATED = "MAINTENANCE_MODE_EVACUATED_VMS_MIGRATED"; public const String MAINTENANCE_MODE_EVACUATED_VMS_SUSPENDED = "MAINTENANCE_MODE_EVACUATED_VMS_SUSPENDED"; public const String MAINTENANCE_MODE_EVACUATED_VMS_HALTED = "MAINTENANCE_MODE_EVACUATED_VMS_HALTED"; /// <summary> /// Save the list of VMs on this host, so we can try and put them back when finished. /// This may get run multiple times, after which some vms will have been suspended / shutdown. /// </summary> /// <param name="session">Pass in the session you want to use for the other config writing</param> public void SaveEvacuatedVMs(Session session) { //Program.AssertOffEventThread(); XenRef<Host> opaque_ref = get_by_uuid(session, uuid); List<VM> migratedVMs = GetVMs(MAINTENANCE_MODE_EVACUATED_VMS_MIGRATED); List<VM> suspendedVMs = GetVMs(MAINTENANCE_MODE_EVACUATED_VMS_SUSPENDED); List<VM> haltedVMs = GetVMs(MAINTENANCE_MODE_EVACUATED_VMS_HALTED); List<VM> allVMs = new List<VM>(); allVMs.AddRange(migratedVMs); allVMs.AddRange(suspendedVMs); allVMs.AddRange(haltedVMs); // First time round there will be no saved VMs, // (or less saved VMs than currently resident) // so just save them all as migrated // don't forget the control domain if (allVMs.Count < resident_VMs.Count - 1) { SaveVMList(session, opaque_ref, MAINTENANCE_MODE_EVACUATED_VMS_MIGRATED, Connection.ResolveAll(resident_VMs)); return; } // We've been round once, so just make sure all the vms are in the correct list // and then save the lists again migratedVMs.Clear(); suspendedVMs.Clear(); haltedVMs.Clear(); foreach (VM vm in allVMs) { switch (vm.power_state) { case vm_power_state.Halted: haltedVMs.Add(vm); break; case vm_power_state.Running: migratedVMs.Add(vm); break; case vm_power_state.Suspended: suspendedVMs.Add(vm); break; } } SaveVMList(session, opaque_ref, MAINTENANCE_MODE_EVACUATED_VMS_MIGRATED, migratedVMs); SaveVMList(session, opaque_ref, MAINTENANCE_MODE_EVACUATED_VMS_HALTED, haltedVMs); SaveVMList(session, opaque_ref, MAINTENANCE_MODE_EVACUATED_VMS_SUSPENDED, suspendedVMs); } private static void SaveVMList(Session session, String serverOpaqueRef, String key, List<VM> vms) { //Program.AssertOffEventThread(); List<String> vmUUIDs = new List<String>(); foreach (VM vm in vms) { if (vm.is_control_domain) continue; vmUUIDs.Add(vm.uuid); } Host.remove_from_other_config(session, serverOpaqueRef, key); Host.add_to_other_config(session, serverOpaqueRef, key, String.Join(",", vmUUIDs.ToArray())); } private List<VM> GetVMs(String key) { List<VM> vms = new List<VM>(); if (other_config == null || !other_config.ContainsKey(key)) return vms; String vmUUIDs = other_config[key]; if (String.IsNullOrEmpty(vmUUIDs)) return vms; foreach (String vmUUID in vmUUIDs.Split(new char[] { ',' })) foreach (VM vm in Connection.Cache.VMs) if (vm.uuid == vmUUID) { if (!vms.Contains(vm)) vms.Add(vm); break; } return vms; } public void ClearEvacuatedVMs(Session session) { var hostRef = get_by_uuid(session, uuid); ClearEvacuatedVMs(session, hostRef); } public static void ClearEvacuatedVMs(Session session, XenRef<Host> hostRef) { remove_from_other_config(session, hostRef, MAINTENANCE_MODE_EVACUATED_VMS_MIGRATED); remove_from_other_config(session, hostRef, MAINTENANCE_MODE_EVACUATED_VMS_HALTED); remove_from_other_config(session, hostRef, MAINTENANCE_MODE_EVACUATED_VMS_SUSPENDED); } public List<VM> GetMigratedEvacuatedVMs() { return GetEvacuatedVMs(MAINTENANCE_MODE_EVACUATED_VMS_MIGRATED, vm_power_state.Running); } public List<VM> GetSuspendedEvacuatedVMs() { return GetEvacuatedVMs(MAINTENANCE_MODE_EVACUATED_VMS_SUSPENDED, vm_power_state.Suspended); } public List<VM> GetHaltedEvacuatedVMs() { return GetEvacuatedVMs(MAINTENANCE_MODE_EVACUATED_VMS_HALTED, vm_power_state.Halted); } private List<VM> GetEvacuatedVMs(String key, vm_power_state expectedPowerState) { List<VM> vms = GetVMs(key); foreach (VM vm in vms.ToArray()) if (vm.power_state != expectedPowerState) vms.Remove(vm); return vms; } #endregion /// <summary> /// Will return null if cannot find connection or any control domain in list of vms /// </summary> public VM ControlDomainZero() { if (Connection == null) return null; if (!Helper.IsNullOrEmptyOpaqueRef(control_domain)) return Connection.Resolve(control_domain); var vms = Connection.ResolveAll(resident_VMs); return vms.FirstOrDefault(vm => vm.is_control_domain && vm.domid == 0); } public IEnumerable<VM> OtherControlDomains() { if (Connection == null) return null; var vms = Connection.ResolveAll(resident_VMs); if (!Helper.IsNullOrEmptyOpaqueRef(control_domain)) return vms.Where(v => v.is_control_domain && v.opaque_ref != control_domain); return vms.Where(v => v.is_control_domain && v.domid != 0); } /// <summary> /// Interpret a value from the software_version dictionary as a int, or 0 if we couldn't parse it. /// </summary> private int GetSVAsInt(string key) { string s = Get(software_version, key); if (s == null) return 0; return (int)Helper.GetAPIVersion(s); } /// <summary> /// The xencenter_min as a int, or 0. if we couldn't parse it. /// </summary> public int XenCenterMin() { return GetSVAsInt("xencenter_min"); } /// <summary> /// The xencenter_max as a int, or 0 if we couldn't parse it. /// </summary> public int XenCenterMax() { return GetSVAsInt("xencenter_max"); } public string GetDatabaseSchema() { return Get(software_version, "db_schema"); } /// <summary> /// The amount of memory free on the host. For George and earlier hosts, we use to use /// the obvious Host_metrics.memory_free. Since Midnight Ride, however, we use /// the same calculation as xapi, adding the used memory and the virtualisation overheads /// on each of the VMs. This is a more conservative estimate (i.e., it reports less memory /// free), but it's the one we need to make the memory go down to zero when ballooning /// takes place. /// </summary> public long memory_free_calc() { Host_metrics host_metrics = Connection.Resolve(this.metrics); if (host_metrics == null) return 0; long used = memory_overhead; foreach (VM vm in Connection.ResolveAll(resident_VMs)) { used += vm.memory_overhead; VM_metrics vm_metrics = vm.Connection.Resolve(vm.metrics); if (vm_metrics != null) used += vm_metrics.memory_actual; } // This hack is needed because of bug CA-32509. xapi uses a deliberately generous // estimate of VM.memory_overhead: but the low-level squeezer code doesn't (and can't) // know about the same calculation, and so uses some of this memory_overhead for the // VM's memory_actual. This causes up to 1MB of double-counting per VM. return ((host_metrics.memory_total > used) ? (host_metrics.memory_total - used) : 0); } /// <summary> /// The total of all the dynamic_minimum memories of all resident VMs other than the control domain. /// For non-ballonable VMs, we use the static_maximum instead, because the dynamic_minimum has no effect. /// </summary> public long tot_dyn_min() { long ans = 0; foreach (VM vm in Connection.ResolveAll(resident_VMs)) { if (!vm.is_control_domain) ans += vm.has_ballooning() ? vm.memory_dynamic_min : vm.memory_static_max; } return ans; } /// <summary> /// The total of all the dynamic_maximum memories of all resident VMs other than the control domain. /// For non-ballonable VMs, we use the static_maximum instead, because the dynamic_maximum has no effect. /// </summary> public long tot_dyn_max() { long ans = 0; foreach (VM vm in Connection.ResolveAll(resident_VMs)) { if (!vm.is_control_domain) ans += vm.has_ballooning() ? vm.memory_dynamic_max : vm.memory_static_max; } return ans; } /// <summary> /// The amount of available memory on the host. This is not the same as the amount of free memory, because /// it includes the memory that could be freed by reducing balloonable VMs to their dynamic_minimum memory. /// </summary> public long memory_available_calc() { Host_metrics host_metrics = Connection.Resolve(this.metrics); if (host_metrics == null) return 0; long avail = host_metrics.memory_total - tot_dyn_min() - xen_memory_calc(); if (avail < 0) avail = 0; // I don't think this can happen, but I'm nervous about CA-32509: play it safe return avail; } /// <summary> /// The amount of memory used by Xen, including the control domain plus host and VM overheads. /// Used to calculate this as total - free - tot_vm_mem, but that caused xen_mem to jump around /// during VM startup/shutdown because some changes happen before others. /// </summary> public long xen_memory_calc() { long xen_mem = memory_overhead; foreach (VM vm in Connection.ResolveAll(resident_VMs)) { xen_mem += vm.memory_overhead; if (vm.is_control_domain) { VM_metrics vmMetrics = vm.Connection.Resolve(vm.metrics); if (vmMetrics != null) xen_mem += vmMetrics.memory_actual; } } return xen_mem; } public long dom0_memory() { long dom0_mem = 0; VM vm = ControlDomainZero(); if (vm != null) { VM_metrics vmMetrics = vm.Connection.Resolve(vm.metrics); dom0_mem = vmMetrics != null ? vmMetrics.memory_actual : vm.memory_dynamic_min; } return dom0_mem; } public long dom0_memory_extra() { VM vm = ControlDomainZero(); return vm != null ? vm.memory_static_max - vm.memory_static_min : 0; } /// <summary> /// Friendly string showing memory usage on the host /// </summary> public string HostMemoryString() { Host_metrics m = Connection.Resolve(metrics); if (m == null) return Messages.GENERAL_UNKNOWN; long ServerMBAvail = memory_available_calc(); long ServerMBTotal = m.memory_total; return string.Format(Messages.GENERAL_MEMORY_SERVER_FREE, Util.MemorySizeStringSuitableUnits(ServerMBAvail, true), Util.MemorySizeStringSuitableUnits(ServerMBTotal, true)); } /// <summary> /// A friendly string for the XenMemory on this host /// </summary> public string XenMemoryString() { if (Connection.Resolve(metrics) == null) return Messages.GENERAL_UNKNOWN; return Util.MemorySizeStringSuitableUnits(xen_memory_calc(), true); } /// <summary> /// A friendly string of the resident VM's memory usage, with each entry separated by a line break /// </summary> public string ResidentVMMemoryUsageString() { Host_metrics m = Connection.Resolve(metrics); if (m == null) return Messages.GENERAL_UNKNOWN; else { List<string> lines = new List<string>(); foreach (VM vm in Connection.ResolveAll(resident_VMs)) { if (vm.is_control_domain) continue; VM_metrics VMMetrics = Connection.Resolve(vm.metrics); if (VMMetrics == null) continue; string message = string.Format(Messages.GENERAL_MEMORY_VM_USED, vm.Name(), Util.MemorySizeStringSuitableUnits(VMMetrics.memory_actual, true)); lines.Add(message); } return string.Join("\n", lines.ToArray()); } } /// <summary> /// Wait about two minutes for all the PBDs on this host to become plugged: /// if they do not, try and plug them. (Refs: CA-41219, CA-41305, CA-66496). /// </summary> public void CheckAndPlugPBDs() { bool allPBDsReady = false; int timeout = 120; log.DebugFormat("Waiting for PBDs on host {0} to become plugged", Name()); while (timeout > 0) { if (enabled) // if the Host is not yet enabled, pbd.currently_attached may not be accurate: see CA-66496. { allPBDsReady = true; foreach (var pbdRef in PBDs) { var pbd = Connection.Resolve(pbdRef); if (pbd == null || pbd.currently_attached) continue; if (Helpers.StockholmOrGreater(this)) //CA-350406 { var sr = Connection.Resolve(pbd.SR); if (sr != null && sr.is_tools_sr) continue; } allPBDsReady = false; break; } } if (allPBDsReady) return; Thread.Sleep(1000); timeout--; } foreach (var pbdRef in PBDs) { var pbd = Connection.Resolve(pbdRef); if (pbd == null || pbd.currently_attached) continue; if (Helpers.StockholmOrGreater(this)) { var sr = Connection.Resolve(pbd.SR); if (sr != null && sr.is_tools_sr) continue; } Session session = Connection.DuplicateSession(); // If we still haven't plugged, then try and plug it - this will probably // fail, but at least we'll get a better error message. try { log.DebugFormat("Plugging PBD {0} on host {1}", pbd.Name(), Name()); PBD.plug(session, pbd.opaque_ref); } catch (Exception e) { log.Debug(string.Format("Error plugging PBD {0} on host {1}", pbd.Name(), Name()), e); } } } /// <summary> /// Whether the host is running the vSwitch network stack /// </summary> public bool vSwitchNetworkBackend() { return software_version.ContainsKey("network_backend") && software_version["network_backend"] == "openvswitch"; } /// <summary> /// The number of CPU sockets the host has /// Return 0 if a problem is found /// </summary> public virtual int CpuSockets() { const string key = "socket_count"; const int defaultSockets = 0; if (cpu_info == null || !cpu_info.ContainsKey(key)) return defaultSockets; int sockets; bool parsed = int.TryParse(cpu_info[key], out sockets); if (!parsed) return defaultSockets; return sockets; } /// <summary> /// The number of cpus the host has /// Return 0 if a problem is found /// </summary> public int CpuCount() { const string key = "cpu_count"; const int defaultCpuCount = 0; if (cpu_info == null || !cpu_info.ContainsKey(key)) return defaultCpuCount; int cpuCount; bool parsed = int.TryParse(cpu_info[key], out cpuCount); if (!parsed) return defaultCpuCount; return cpuCount; } /// <summary> /// The number of cores per socket the host has /// Return 0 if a problem is found /// </summary> public int CoresPerSocket() { var sockets = CpuSockets(); var cpuCount = CpuCount(); if (sockets > 0 && cpuCount > 0) return (cpuCount/sockets); return 0; } /// <summary> /// Is the host allowed to install hotfixes or are they restricted? /// </summary> public virtual bool CanApplyHotfixes() { return !Helpers.FeatureForbidden(Connection, RestrictHotfixApply); } /// <summary> /// Grace is either upgrade or regular /// </summary> public virtual bool InGrace() { return license_params.ContainsKey("grace"); } internal override string LocationString() { //for standalone hosts we do not show redundant location info return Helpers.GetPool(Connection) == null ? string.Empty : base.LocationString(); } public bool EnterpriseFeaturesEnabled() { var hostEdition = GetEdition(edition); return EligibleForSupport() && (hostEdition == Edition.EnterprisePerSocket || hostEdition == Edition.EnterprisePerUser || hostEdition == Edition.PerSocket); } public bool DesktopPlusFeaturesEnabled() { return GetEdition(edition) == Edition.DesktopPlus; } public bool DesktopFeaturesEnabled() { return GetEdition(edition) == Edition.Desktop; } public bool DesktopCloudFeaturesEnabled() { return GetEdition(edition) == Edition.DesktopCloud; } public bool PremiumFeaturesEnabled() { return GetEdition(edition) == Edition.Premium; } public bool StandardFeaturesEnabled() { return GetEdition(edition) == Edition.Standard; } public bool EligibleForSupport() { return GetEdition(edition) != Edition.Free; } #region Supplemental Packs // From http://scale.uk.xensource.com/confluence/display/engp/Supplemental+Pack+product+design+notes#SupplementalPackproductdesignnotes-XenAPI: // The supplemental packs that are installed on a host are listed in the host's Host.software_version field in the data model. // The keys of the entries have the form "<originator>:<name>", the value is "<description>, version <version>", appended by // ", build <build>" if the build number is present in the XML file, and further appended by ", homogeneous" if the // enforce-homogeneity attribute is present and set to true. // // Examples: // xs:main: Base Pack, version 5.5.900, build 19689c // xs:linux: Linux Pack, version 5.5.900, build 19689c, homogeneous public class SuppPack { private string originator, name, description, version, build; private bool homogeneous; public string Originator { get { return originator; } } public string Name { get { return name; } } public string Description { get { return description; } } public string Version { get { return version; } } public string Build { get { return build; } } public bool Homogeneous { get { return homogeneous; } } public string OriginatorAndName { get { return originator + ":" + name; } } private bool parsed = false; public bool IsValid { get { return parsed; } } public string LongDescription { get { return string.Format(Messages.SUPP_PACK_DESCRIPTION, description, version); } } /// <summary> /// Try to parse the supp pack information from one key of software_version /// </summary> public SuppPack(string key, string value) { // Parse the key string[] splitKey = key.Split(':'); if (splitKey.Length != 2) return; originator = splitKey[0]; name = splitKey[1]; // Parse the value. The description may contain arbitrary text, so we have to be a bit subtle: // we first search from the end to find where the description ends. int x = value.LastIndexOf(", version "); if (x <= 0) return; description = value.Substring(0, x); string val = value.Substring(x + 10); string[] delims = new string[] {", "}; string[] splitValue = val.Split(delims, StringSplitOptions.None); if (splitValue.Length == 0 || splitValue.Length > 3) return; version = splitValue[0]; if (splitValue.Length >= 2) { if (!splitValue[1].StartsWith("build ")) return; build = splitValue[1].Substring(6); } if (splitValue.Length >= 3) { if (splitValue[2] != "homogeneous") return; homogeneous = true; } else homogeneous = false; parsed = true; } } /// <summary> /// Return a list of the supplemental packs /// </summary> public List<SuppPack> SuppPacks() { List<SuppPack> packs = new List<SuppPack>(); if (software_version == null) return packs; foreach (string key in software_version.Keys) { SuppPack pack = new SuppPack(key, software_version[key]); if (pack.IsValid) packs.Add(pack); } return packs; } #endregion /// <summary> /// The PGPU that is the system display device or null /// </summary> public PGPU SystemDisplayDevice() { var pGpus = Connection.ResolveAll(PGPUs); return pGpus.FirstOrDefault(pGpu => pGpu.is_system_display_device); } /// <summary> /// Is the host allowed to enable/disable integrated GPU passthrough or is the feature unavailable/restricted? /// </summary> public bool CanEnableDisableIntegratedGpu() { return Helpers.GpuCapability(Connection) && !Helpers.FeatureForbidden(Connection, RestrictIntegratedGpuPassthrough); } public static bool TryGetUpgradeVersion(Host host, Dictionary<string, string> installMethodConfig, out string platformVersion, out string productVersion) { platformVersion = productVersion = null; try { var result = call_plugin(host.Connection.Session, host.opaque_ref, "prepare_host_upgrade.py", "getVersion", installMethodConfig); var serializer = new JavaScriptSerializer(); var version = (Dictionary<string, object>)serializer.DeserializeObject(result); platformVersion = version.ContainsKey("platform-version") ? (string)version["platform-version"] : null; productVersion = version.ContainsKey("product-version") ? (string)version["product-version"] : null; return platformVersion != null || productVersion != null; } catch (Exception exception) { log.WarnFormat("Plugin call prepare_host_upgrade.getVersion on {0} failed with {1}", host.Name(), exception.Message); return false; } } #region IEquatable<Host> Members /// <summary> /// Indicates whether the current object is equal to the specified object. This calls the implementation from XenObject. /// This implementation is required for ToStringWrapper. /// </summary> public virtual bool Equals(Host other) { return base.Equals(other); } #endregion } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Collections.ObjectModel; using System.Diagnostics; using System.IO; using System.Linq; using System.Reflection.Metadata; using System.Reflection.Metadata.Ecma335; using System.Threading; using Microsoft.CodeAnalysis.CSharp.Symbols; using Microsoft.CodeAnalysis.CSharp.Symbols.Metadata.PE; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.CodeAnalysis.Emit; using Microsoft.CodeAnalysis.ExpressionEvaluator; using Microsoft.DiaSymReader; using Microsoft.VisualStudio.Debugger.Evaluation; namespace Microsoft.CodeAnalysis.CSharp.ExpressionEvaluator { internal sealed class EvaluationContext : EvaluationContextBase { private const string TypeName = "<>x"; private const string MethodName = "<>m0"; internal const bool IsLocalScopeEndInclusive = false; internal readonly MethodContextReuseConstraints? MethodContextReuseConstraints; internal readonly CSharpCompilation Compilation; private readonly MethodSymbol _currentFrame; private readonly ImmutableArray<LocalSymbol> _locals; private readonly InScopeHoistedLocals _inScopeHoistedLocals; private readonly MethodDebugInfo<TypeSymbol, LocalSymbol> _methodDebugInfo; private EvaluationContext( MethodContextReuseConstraints? methodContextReuseConstraints, CSharpCompilation compilation, MethodSymbol currentFrame, ImmutableArray<LocalSymbol> locals, InScopeHoistedLocals inScopeHoistedLocals, MethodDebugInfo<TypeSymbol, LocalSymbol> methodDebugInfo) { Debug.Assert(inScopeHoistedLocals != null); Debug.Assert(methodDebugInfo != null); this.MethodContextReuseConstraints = methodContextReuseConstraints; this.Compilation = compilation; _currentFrame = currentFrame; _locals = locals; _inScopeHoistedLocals = inScopeHoistedLocals; _methodDebugInfo = methodDebugInfo; } /// <summary> /// Create a context for evaluating expressions at a type scope. /// </summary> /// <param name="previous">Previous context, if any, for possible re-use.</param> /// <param name="metadataBlocks">Module metadata</param> /// <param name="moduleVersionId">Module containing type</param> /// <param name="typeToken">Type metadata token</param> /// <returns>Evaluation context</returns> /// <remarks> /// No locals since locals are associated with methods, not types. /// </remarks> internal static EvaluationContext CreateTypeContext( CSharpMetadataContext previous, ImmutableArray<MetadataBlock> metadataBlocks, Guid moduleVersionId, int typeToken) { // Re-use the previous compilation if possible. var compilation = previous.Matches(metadataBlocks) ? previous.Compilation : metadataBlocks.ToCompilation(); return CreateTypeContext(compilation, moduleVersionId, typeToken); } internal static EvaluationContext CreateTypeContext( CSharpCompilation compilation, Guid moduleVersionId, int typeToken) { Debug.Assert(MetadataTokens.Handle(typeToken).Kind == HandleKind.TypeDefinition); var currentType = compilation.GetType(moduleVersionId, typeToken); Debug.Assert((object)currentType != null); var currentFrame = new SynthesizedContextMethodSymbol(currentType); return new EvaluationContext( null, compilation, currentFrame, default(ImmutableArray<LocalSymbol>), InScopeHoistedLocals.Empty, MethodDebugInfo<TypeSymbol, LocalSymbol>.None); } /// <summary> /// Create a context for evaluating expressions within a method scope. /// </summary> /// <param name="previous">Previous context, if any, for possible re-use.</param> /// <param name="metadataBlocks">Module metadata</param> /// <param name="symReader"><see cref="ISymUnmanagedReader"/> for PDB associated with <paramref name="moduleVersionId"/></param> /// <param name="moduleVersionId">Module containing method</param> /// <param name="methodToken">Method metadata token</param> /// <param name="methodVersion">Method version.</param> /// <param name="ilOffset">IL offset of instruction pointer in method</param> /// <param name="localSignatureToken">Method local signature token</param> /// <returns>Evaluation context</returns> internal static EvaluationContext CreateMethodContext( CSharpMetadataContext previous, ImmutableArray<MetadataBlock> metadataBlocks, object symReader, Guid moduleVersionId, int methodToken, int methodVersion, uint ilOffset, int localSignatureToken) { var offset = NormalizeILOffset(ilOffset); // Re-use the previous compilation if possible. CSharpCompilation compilation; if (previous.Matches(metadataBlocks)) { // Re-use entire context if method scope has not changed. var previousContext = previous.EvaluationContext; if (previousContext != null && previousContext.MethodContextReuseConstraints.HasValue && previousContext.MethodContextReuseConstraints.GetValueOrDefault().AreSatisfied(moduleVersionId, methodToken, methodVersion, offset)) { return previousContext; } compilation = previous.Compilation; } else { compilation = metadataBlocks.ToCompilation(); } return CreateMethodContext( compilation, symReader, moduleVersionId, methodToken, methodVersion, offset, localSignatureToken); } internal static EvaluationContext CreateMethodContext( CSharpCompilation compilation, object symReader, Guid moduleVersionId, int methodToken, int methodVersion, uint ilOffset, int localSignatureToken) { return CreateMethodContext( compilation, symReader, moduleVersionId, methodToken, methodVersion, NormalizeILOffset(ilOffset), localSignatureToken); } private static EvaluationContext CreateMethodContext( CSharpCompilation compilation, object symReader, Guid moduleVersionId, int methodToken, int methodVersion, int ilOffset, int localSignatureToken) { var methodHandle = (MethodDefinitionHandle)MetadataTokens.Handle(methodToken); var localSignatureHandle = (localSignatureToken != 0) ? (StandaloneSignatureHandle)MetadataTokens.Handle(localSignatureToken) : default(StandaloneSignatureHandle); var currentFrame = compilation.GetMethod(moduleVersionId, methodHandle); Debug.Assert((object)currentFrame != null); var symbolProvider = new CSharpEESymbolProvider(compilation.SourceAssembly, (PEModuleSymbol)currentFrame.ContainingModule, currentFrame); var metadataDecoder = new MetadataDecoder((PEModuleSymbol)currentFrame.ContainingModule, currentFrame); var localInfo = metadataDecoder.GetLocalInfo(localSignatureHandle); var typedSymReader = (ISymUnmanagedReader3)symReader; var inScopeHoistedLocals = InScopeHoistedLocals.Empty; var debugInfo = MethodDebugInfo<TypeSymbol, LocalSymbol>.ReadMethodDebugInfo(typedSymReader, symbolProvider, methodToken, methodVersion, ilOffset, isVisualBasicMethod: false); var reuseSpan = debugInfo.ReuseSpan; var localsBuilder = ArrayBuilder<LocalSymbol>.GetInstance(); MethodDebugInfo<TypeSymbol, LocalSymbol>.GetLocals( localsBuilder, symbolProvider, debugInfo.LocalVariableNames, localInfo, debugInfo.DynamicLocalMap, debugInfo.TupleLocalMap); if (!debugInfo.HoistedLocalScopeRecords.IsDefaultOrEmpty) { inScopeHoistedLocals = new CSharpInScopeHoistedLocals(debugInfo.GetInScopeHoistedLocalIndices(ilOffset, ref reuseSpan)); } localsBuilder.AddRange(debugInfo.LocalConstants); return new EvaluationContext( new MethodContextReuseConstraints(moduleVersionId, methodToken, methodVersion, reuseSpan), compilation, currentFrame, localsBuilder.ToImmutableAndFree(), inScopeHoistedLocals, debugInfo); } internal CompilationContext CreateCompilationContext(CSharpSyntaxNode syntax) { return new CompilationContext( this.Compilation, _currentFrame, _locals, _inScopeHoistedLocals, _methodDebugInfo, syntax); } internal override CompileResult CompileExpression( string expr, DkmEvaluationFlags compilationFlags, ImmutableArray<Alias> aliases, DiagnosticBag diagnostics, out ResultProperties resultProperties, Microsoft.CodeAnalysis.CodeGen.CompilationTestData testData) { ReadOnlyCollection<string> formatSpecifiers; var syntax = Parse(expr, (compilationFlags & DkmEvaluationFlags.TreatAsExpression) != 0, diagnostics, out formatSpecifiers); if (syntax == null) { resultProperties = default(ResultProperties); return null; } var context = this.CreateCompilationContext(syntax); ResultProperties properties; var moduleBuilder = context.CompileExpression(TypeName, MethodName, aliases, testData, diagnostics, out properties); if (moduleBuilder == null) { resultProperties = default(ResultProperties); return null; } using (var stream = new MemoryStream()) { Cci.PeWriter.WritePeToStream( new EmitContext(moduleBuilder, null, diagnostics), context.MessageProvider, () => stream, getPortablePdbStreamOpt: null, nativePdbWriterOpt: null, pdbPathOpt: null, allowMissingMethodBodies: false, isDeterministic: false, cancellationToken: default(CancellationToken)); if (diagnostics.HasAnyErrors()) { resultProperties = default(ResultProperties); return null; } resultProperties = properties; return new CSharpCompileResult( stream.ToArray(), GetSynthesizedMethod(moduleBuilder), formatSpecifiers: formatSpecifiers); } } private static MethodSymbol GetSynthesizedMethod(CommonPEModuleBuilder moduleBuilder) { var method = ((EEAssemblyBuilder)moduleBuilder).Methods.Single(m => m.MetadataName == MethodName); Debug.Assert(method.ContainingType.MetadataName == TypeName); return method; } private static CSharpSyntaxNode Parse( string expr, bool treatAsExpression, DiagnosticBag diagnostics, out ReadOnlyCollection<string> formatSpecifiers) { if (!treatAsExpression) { // Try to parse as a statement. If that fails, parse as an expression. var statementDiagnostics = DiagnosticBag.GetInstance(); var statementSyntax = expr.ParseStatement(statementDiagnostics); Debug.Assert((statementSyntax == null) || !statementDiagnostics.HasAnyErrors()); statementDiagnostics.Free(); // Prefer to parse expression statements (except deconstruction-declarations) as expressions. // Once https://github.com/dotnet/roslyn/issues/15049 is fixed, we should parse d-declarations as expressions. var isExpressionStatement = statementSyntax.IsKind(SyntaxKind.ExpressionStatement); var isDeconstructionDeclaration = isExpressionStatement && IsDeconstructionDeclaration((ExpressionStatementSyntax)statementSyntax); if (statementSyntax != null && (!isExpressionStatement || isDeconstructionDeclaration)) { formatSpecifiers = null; if (statementSyntax.IsKind(SyntaxKind.LocalDeclarationStatement) || isDeconstructionDeclaration) { return statementSyntax; } diagnostics.Add(ErrorCode.ERR_ExpressionOrDeclarationExpected, Location.None); return null; } } return expr.ParseExpression(diagnostics, allowFormatSpecifiers: true, formatSpecifiers: out formatSpecifiers); } private static bool IsDeconstructionDeclaration(ExpressionStatementSyntax expressionStatement) { if (!expressionStatement.Expression.IsKind(SyntaxKind.SimpleAssignmentExpression)) { return false; } return ((AssignmentExpressionSyntax)expressionStatement.Expression).IsDeconstructionDeclaration(); } internal override CompileResult CompileAssignment( string target, string expr, ImmutableArray<Alias> aliases, DiagnosticBag diagnostics, out ResultProperties resultProperties, Microsoft.CodeAnalysis.CodeGen.CompilationTestData testData) { var assignment = target.ParseAssignment(expr, diagnostics); if (assignment == null) { resultProperties = default(ResultProperties); return null; } var context = this.CreateCompilationContext(assignment); ResultProperties properties; var moduleBuilder = context.CompileAssignment(TypeName, MethodName, aliases, testData, diagnostics, out properties); if (moduleBuilder == null) { resultProperties = default(ResultProperties); return null; } using (var stream = new MemoryStream()) { Cci.PeWriter.WritePeToStream( new EmitContext(moduleBuilder, null, diagnostics), context.MessageProvider, () => stream, getPortablePdbStreamOpt: null, nativePdbWriterOpt: null, pdbPathOpt: null, allowMissingMethodBodies: false, isDeterministic: false, cancellationToken: default(CancellationToken)); if (diagnostics.HasAnyErrors()) { resultProperties = default(ResultProperties); return null; } resultProperties = properties; return new CSharpCompileResult( stream.ToArray(), GetSynthesizedMethod(moduleBuilder), formatSpecifiers: null); } } private static readonly ReadOnlyCollection<byte> s_emptyBytes = new ReadOnlyCollection<byte>(Array.Empty<byte>()); internal override ReadOnlyCollection<byte> CompileGetLocals( ArrayBuilder<LocalAndMethod> locals, bool argumentsOnly, ImmutableArray<Alias> aliases, DiagnosticBag diagnostics, out string typeName, Microsoft.CodeAnalysis.CodeGen.CompilationTestData testData) { var context = this.CreateCompilationContext(null); var moduleBuilder = context.CompileGetLocals(TypeName, locals, argumentsOnly, aliases, testData, diagnostics); ReadOnlyCollection<byte> assembly = null; if ((moduleBuilder != null) && (locals.Count > 0)) { using (var stream = new MemoryStream()) { Cci.PeWriter.WritePeToStream( new EmitContext(moduleBuilder, null, diagnostics), context.MessageProvider, () => stream, getPortablePdbStreamOpt: null, nativePdbWriterOpt: null, pdbPathOpt: null, allowMissingMethodBodies: false, isDeterministic: false, cancellationToken: default(CancellationToken)); if (!diagnostics.HasAnyErrors()) { assembly = new ReadOnlyCollection<byte>(stream.ToArray()); } } } if (assembly == null) { locals.Clear(); assembly = s_emptyBytes; } typeName = TypeName; return assembly; } internal override bool HasDuplicateTypesOrAssemblies(Diagnostic diagnostic) { switch ((ErrorCode)diagnostic.Code) { case ErrorCode.ERR_DuplicateImport: case ErrorCode.ERR_DuplicateImportSimple: case ErrorCode.ERR_SameFullNameAggAgg: case ErrorCode.ERR_AmbigCall: return true; default: return false; } } internal override ImmutableArray<AssemblyIdentity> GetMissingAssemblyIdentities(Diagnostic diagnostic, AssemblyIdentity linqLibrary) { return GetMissingAssemblyIdentitiesHelper((ErrorCode)diagnostic.Code, diagnostic.Arguments, linqLibrary); } /// <remarks> /// Internal for testing. /// </remarks> internal static ImmutableArray<AssemblyIdentity> GetMissingAssemblyIdentitiesHelper(ErrorCode code, IReadOnlyList<object> arguments, AssemblyIdentity linqLibrary) { Debug.Assert(linqLibrary != null); switch (code) { case ErrorCode.ERR_NoTypeDef: case ErrorCode.ERR_GlobalSingleTypeNameNotFoundFwd: case ErrorCode.ERR_DottedTypeNameNotFoundInNSFwd: case ErrorCode.ERR_SingleTypeNameNotFoundFwd: case ErrorCode.ERR_NameNotInContextPossibleMissingReference: // Probably can't happen. foreach (var argument in arguments) { var identity = (argument as AssemblyIdentity) ?? (argument as AssemblySymbol)?.Identity; if (identity != null && !identity.Equals(MissingCorLibrarySymbol.Instance.Identity)) { return ImmutableArray.Create(identity); } } break; case ErrorCode.ERR_DottedTypeNameNotFoundInNS: if (arguments.Count == 2) { var namespaceName = arguments[0] as string; var containingNamespace = arguments[1] as NamespaceSymbol; if (namespaceName != null && (object)containingNamespace != null && containingNamespace.ConstituentNamespaces.Any(n => n.ContainingAssembly.Identity.IsWindowsAssemblyIdentity())) { // This is just a heuristic, but it has the advantage of being portable, particularly // across different versions of (desktop) windows. var identity = new AssemblyIdentity($"{containingNamespace.ToDisplayString()}.{namespaceName}", contentType: System.Reflection.AssemblyContentType.WindowsRuntime); return ImmutableArray.Create(identity); } } break; case ErrorCode.ERR_NoSuchMemberOrExtension: // Commonly, but not always, caused by absence of System.Core. case ErrorCode.ERR_DynamicAttributeMissing: case ErrorCode.ERR_DynamicRequiredTypesMissing: // MSDN says these might come from System.Dynamic.Runtime case ErrorCode.ERR_QueryNoProviderStandard: case ErrorCode.ERR_ExtensionAttrNotFound: // Probably can't happen. return ImmutableArray.Create(linqLibrary); case ErrorCode.ERR_BadAwaitArg_NeedSystem: Debug.Assert(false, "Roslyn no longer produces ERR_BadAwaitArg_NeedSystem"); break; } return default(ImmutableArray<AssemblyIdentity>); } } }
using System; using System.Collections; using System.Collections.Generic; using System.Text; using System.IO; using System.Web; using System.Web.UI; using System.Web.Caching; namespace Nohros.Net { [Serializable] public class Sevens : ICloneable { Queue<string> sevens_; int capacity_ = 4; #region .ctor /// <summary> /// Initializes a new instance_ of the Sevens class that is empty and has the default initial capacity. /// </summary> public Sevens():this(0) { } /// <summary> /// Initializes a new instance_ of the Sevens class by using the specified capacity /// </summary> /// <param name="capacity">The initial number of events that the Sevens can contain</param> public Sevens(int capacity) { capacity_ = capacity; sevens_ = new Queue<string>(capacity); } #endregion #region ICloneable /// <summary> /// Creates a deep copy of the Sevens. /// </summary> /// <returns>A deep copy of the Sevens</returns> public object Clone() { Sevens clone = new Sevens(sevens_.Count); string[] sevens = sevens_.ToArray(); for(int i=0,j=sevens.Length;i<j;i++) { clone.sevens_.Enqueue(sevens[i]); } return clone; } #endregion /// <summary> /// Removes all events from the Sevens. /// </summary> public void Clear() { sevens_.Clear(); } /// <summary> /// Merges the specified Sevens object into the current Sevens object. /// </summary> /// <param name="response">The Sevens object to be merged into the current Sevens object</param> public Sevens Merge(Sevens sevens) { if (sevens == null) throw new ArgumentNullException("sevens"); if( this.Equals(sevens) ) return this; string[] sevens_str = sevens_.ToArray(); for (int i = 0, j = sevens_str.Length; i < j; i++) { sevens_.Enqueue(sevens_str[i]); } return this; } /// <summary> /// Outputs the server events into a javascript object. /// </summary> public override string ToString() { int num_events = Count; StringBuilder sb = new StringBuilder(); //if (!string.IsNullOrEmpty(this.error)) //return "{\"length\":0, \"error\" :\"" + this.error + "\"}"; if (num_events == 0) return "{\"length\":0, \"error\":null}"; sb.Append("{\"length\":\"").Append(num_events).Append("\", \"error\":null,"); sb.Append("\"actions\" : ["); // append the server events while (sevens_.Count > 0) sb.Append(sevens_.Dequeue()).Append(","); // remove the extra comma and close the array // of events and the events container object. sb.Remove(sb.Length - 1, 1).Append("]}"); return sb.ToString(); } #region Server events #region Client contents /// <summary> /// Append content to he inside of every matched element /// </summary> /// <param name="event_name">The name of the event</param> /// <param name="selector">The element selector in jquery-style</param> /// <param name="markup">The HTML content to append</param> protected void ClientContent(string event_name, string selector, string markup, params KeyValuePair<string, string>[] pairs) { if (string.IsNullOrEmpty(selector) || string.IsNullOrEmpty(markup)) throw new ArgumentNullException("Arguments could not be null"); string json; json = string.Concat( "{\"type\":\"", event_name, "\",\"selector\":\"", selector); if (pairs != null) { for (int i = 0, j = pairs.Length; i < j; i++) { KeyValuePair<string, string> pair = pairs[i]; json += string.Concat("\"", pair.Key, "\":\"", pair.Value, "\","); } } json += "\"markup\":\"" + markup + "\"}"; } /// <summary> /// Append content to he inside of every matched element /// </summary> public void AppendMarkup(string selector, string markup) { ClientContent("appendmkp", selector, markup); } /// <summary> /// Prepend content to he inside of every matched element /// </summary> public void PrependMarkup(string selector, string markup) { ClientContent("prependmkp", selector, markup); } public void AfterMarkup(string selector, string markup) { ClientContent("aftermkp", selector, markup); } public void BeforeMarkup(string selector, string markup) { ClientContent("beforemkp", selector, markup); } public void ReplaceMarkup(string selector, string markup) { ClientContent("replacemkp", selector, markup); } public void SetValue(string selector, string value) { ClientContent("setval", selector, value); } public void SetText(string selector, string value) { ClientContent("settext", selector, value); } public void SetHtml(string selector, string value) { ClientContent("sethtml", selector, value); } public void FillTable(string selector, string value, bool inner) { FillTable(selector, value, null, inner); } public void FillTable(string selector, string value, string constant, bool inner) { ClientContent((inner) ? "fillintable" : "filltable", selector, value, new KeyValuePair<string, string>("constant", constant)); } #endregion public void Debugger() { sevens_.Enqueue("{\"type\":\"debugger\", \"selector\":\"debugger\"}"); } /// <summary> /// Set /// </summary> /// <param name="selector"></param> /// <param name="set"></param> public void Focus(string selector, bool set) { if (selector == null) throw new ArgumentNullException("selector"); if (selector.Length == 0) throw new ArgumentOutOfRangeException("selector"); sevens_.Enqueue( string.Concat( "{\"type\":\"focus\",", "\"selector\":\"", selector, "\", \"set\":\"", set.ToString().ToLower(), "\"}") ); } /// <summary> /// Redirects a browser to the specified URL. /// </summary> /// <param name="URL">The URL to redirects the browser</param> public void Redirect(string URL) { if (URL == null) throw new ArgumentNullException("URL"); sevens_.Enqueue( string.Concat( "{\"type\":\"redirect\",", "\"selector\":\"none\",", "\"url\":\"", URL, "\"}") ); } /// <summary> /// Simulates a mouse click on a reset button for the specified form. /// </summary> /// <param name="selector">A string containing a selector expression</param> public void ResetForm(string selector) { if (string.IsNullOrEmpty(selector)) throw new ArgumentNullException("Arguments could not be null"); sevens_.Enqueue( string.Concat( "{\"type\":\"resetfrm\",", "\"selector\":\"", selector, "\"}") ); } /// <summary> /// Set a attribute for the set of matched elements. /// </summary> /// <param name="selector">A string containing a selector expression</param> /// <param name="attribute">The name of the attribute to set</param> /// <param name="value">A value to set for the attribute</param> public void SetAttribute(string selector, string attribute, string value) { if (selector == null || attribute == null || value == null) Thrower.ThrowArgumentNullException(ExceptionArgument.any); sevens_.Enqueue( string.Concat( "{\"type\":\"setattr\",", "\"selector\":\"", selector, "\",", "\"attribute\":\"", attribute, "\",", "\"value\":\"", value, "\"}") ); } /// <summary> /// Set one or more CSS properties_ for the set of matched elements. /// </summary> /// <param name="selector">A string conaining a selector expression</param> /// <param name="cssattr">A CSS property name</param> /// <param name="value">A value to set for the property</param> public void SetCss(string selector, string property_name, string value) { if (selector == null || property_name == null || value == null) Thrower.ThrowArgumentNullException(ExceptionArgument.any); sevens_.Enqueue( string.Concat( "{\"type\":\"setcss\",", "\"selector\":\"", selector, "\",", "\"cssattr\":\"", property_name, "\",", "\"value\":\"", value, "\"}") ); } /// <summary> /// Evaluates a expression after a specified number of milliseconds has elapsed. /// </summary> /// <param name="timeout">A long that specifies the number of milliseconds</param> /// <param name="functionToCall">A javascript statement to execute</param> public void SetTimeout(long ms, string function_to_call) { if (function_to_call == null) throw new ArgumentNullException("function_to_call"); if (ms < 0) ms = 5000; sevens_.Enqueue( string.Concat( "{\"type\":\"settimeout\",", "\timeout\":\"", ms, "\",", "\functionToCall\":\"", function_to_call, "\"}") ); } public void ShowInfo(string title, string message) { if (string.IsNullOrEmpty(title) || string.IsNullOrEmpty(message)) throw new ArgumentNullException("Arguments could not be null"); // Sets the attribute of this class // //SerializeType("showinfo"); // Append the parameters that will be used by the client //SerializePair("title", title); //SerializeLastPair("message", message); } /// <summary> /// Trigger an event on every matched element /// </summary> /// <param name="selector">The name of the objects to trigger the event</param> /// <param name="evt">An event type to trigger</param> public void Trigger(string selector, string evt) { if (selector == null || evt == null) Thrower.ThrowArgumentNullException(ExceptionArgument.any); sevens_.Enqueue( string.Concat( "{\"type\":\"trigger\",", "\"selector\":\"", selector, "\",", "\"evt\":\"", evt, "\"}") ); } #endregion /// <summary> /// Gets the number of events contained in the Sevens. /// </summary> public int Count { get { return sevens_.Count; } } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; namespace UnityEditor.iOS.Xcode.GetSocial { internal class DeviceTypeRequirement { public static readonly string Key = "idiom"; public static readonly string Any = "universal"; public static readonly string iPhone = "iphone"; public static readonly string iPad = "ipad"; public static readonly string Mac = "mac"; public static readonly string iWatch = "watch"; } internal class MemoryRequirement { public static readonly string Key = "memory"; public static readonly string Any = ""; public static readonly string Mem1GB = "1GB"; public static readonly string Mem2GB = "2GB"; } internal class GraphicsRequirement { public static readonly string Key = "graphics-feature-set"; public static readonly string Any = ""; public static readonly string Metal1v2 = "metal1v2"; public static readonly string Metal2v2 = "metal2v2"; } // only used for image sets internal class SizeClassRequirement { public static readonly string HeightKey = "height-class"; public static readonly string WidthKey = "width-class"; public static readonly string Any = ""; public static readonly string Compact = "compact"; public static readonly string Regular = "regular"; } // only used for image sets internal class ScaleRequirement { public static readonly string Key = "scale"; public static readonly string Any = ""; // vector image public static readonly string X1 = "1x"; public static readonly string X2 = "2x"; public static readonly string X3 = "3x"; } internal class DeviceRequirement { internal Dictionary<string, string> values = new Dictionary<string, string>(); public DeviceRequirement AddDevice(string device) { AddCustom(DeviceTypeRequirement.Key, device); return this; } public DeviceRequirement AddMemory(string memory) { AddCustom(MemoryRequirement.Key, memory); return this; } public DeviceRequirement AddGraphics(string graphics) { AddCustom(GraphicsRequirement.Key, graphics); return this; } public DeviceRequirement AddWidthClass(string sizeClass) { AddCustom(SizeClassRequirement.WidthKey, sizeClass); return this; } public DeviceRequirement AddHeightClass(string sizeClass) { AddCustom(SizeClassRequirement.HeightKey, sizeClass); return this; } public DeviceRequirement AddScale(string scale) { AddCustom(ScaleRequirement.Key, scale); return this; } public DeviceRequirement AddCustom(string key, string value) { if (values.ContainsKey(key)) values.Remove(key); values.Add(key, value); return this; } public DeviceRequirement() { values.Add("idiom", DeviceTypeRequirement.Any); } } internal class AssetCatalog { AssetFolder m_Root; public string path { get { return m_Root.path; } } public AssetFolder root { get { return m_Root; } } public AssetCatalog(string path, string authorId) { if (Path.GetExtension(path) != ".xcassets") throw new Exception("Asset catalogs must have xcassets extension"); m_Root = new AssetFolder(path, null, authorId); } AssetFolder OpenFolderForResource(string relativePath) { var pathItems = Utils.SplitPath(relativePath).ToList(); // remove path filename pathItems.RemoveAt(pathItems.Count - 1); AssetFolder folder = root; foreach (var pathItem in pathItems) folder = folder.OpenFolder(pathItem); return folder; } // Checks if a dataset at the given path exists and returns it if it does. // Otherwise, creates a new dataset. Parent folders are created if needed. // Note: the path is filesystem path, not logical asset name formed // only from names of the folders that have "provides namespace" attribute. // If you want to put certain resources in folders with namespace, first // manually create the folders and then set the providesNamespace attribute. // OpenNamespacedFolder may help to do this. public AssetDataSet OpenDataSet(string relativePath) { var folder = OpenFolderForResource(relativePath); return folder.OpenDataSet(Path.GetFileName(relativePath)); } public AssetImageSet OpenImageSet(string relativePath) { var folder = OpenFolderForResource(relativePath); return folder.OpenImageSet(Path.GetFileName(relativePath)); } public AssetImageStack OpenImageStack(string relativePath) { var folder = OpenFolderForResource(relativePath); return folder.OpenImageStack(Path.GetFileName(relativePath)); } // Checks if a folder with given path exists and returns it if it does. // Otherwise, creates a new folder. Parent folders are created if needed. public AssetFolder OpenFolder(string relativePath) { if (relativePath == null) return root; var pathItems = Utils.SplitPath(relativePath); if (pathItems.Length == 0) return root; AssetFolder folder = root; foreach (var pathItem in pathItems) folder = folder.OpenFolder(pathItem); return folder; } // Creates a directory structure with "provides namespace" attribute. // First, retrieves or creates the directory at relativeBasePath, creating parent // directories if needed. Effectively calls OpenFolder(relativeBasePath). // Then, relative to this directory, creates namespacePath directories with "provides // namespace" attribute set. Fails if the attribute can't be set. public AssetFolder OpenNamespacedFolder(string relativeBasePath, string namespacePath) { var folder = OpenFolder(relativeBasePath); var pathItems = Utils.SplitPath(namespacePath); foreach (var pathItem in pathItems) { folder = folder.OpenFolder(pathItem); folder.providesNamespace = true; } return folder; } public void Write() { m_Root.Write(); } } internal abstract class AssetCatalogItem { public readonly string name; public readonly string authorId; public string path { get { return m_Path; } } protected Dictionary<string, string> m_Properties = new Dictionary<string, string>(); protected string m_Path; public AssetCatalogItem(string name, string authorId) { if (name != null && name.Contains("/")) throw new Exception("Asset catalog item must not have slashes in name"); this.name = name; this.authorId = authorId; } protected JsonElementDict WriteInfoToJson(JsonDocument doc) { var info = doc.root.CreateDict("info"); info.SetInteger("version", 1); info.SetString("author", authorId); return info; } public abstract void Write(); } internal class AssetFolder : AssetCatalogItem { List<AssetCatalogItem> m_Items = new List<AssetCatalogItem>(); bool m_ProvidesNamespace = false; public bool providesNamespace { get { return m_ProvidesNamespace; } set { if (m_Items.Count > 0 && value != m_ProvidesNamespace) throw new Exception("Asset folder namespace providing status can't be "+ "changed after items have been added"); m_ProvidesNamespace = value; } } internal AssetFolder(string parentPath, string name, string authorId) : base(name, authorId) { if (name != null) m_Path = Path.Combine(parentPath, name); else m_Path = parentPath; } // Checks if a folder with given name exists and returns it if it does. // Otherwise, creates a new folder. public AssetFolder OpenFolder(string name) { var item = GetChild(name); if (item != null) { if (item is AssetFolder) return item as AssetFolder; throw new Exception("The given path is already occupied with an asset"); } var folder = new AssetFolder(m_Path, name, authorId); m_Items.Add(folder); return folder; } T GetExistingItemWithType<T>(string name) where T : class { var item = GetChild(name); if (item != null) { if (item is T) return item as T; throw new Exception("The given path is already occupied with an asset"); } return null; } // Checks if a dataset with given name exists and returns it if it does. // Otherwise, creates a new data set. public AssetDataSet OpenDataSet(string name) { var item = GetExistingItemWithType<AssetDataSet>(name); if (item != null) return item; var dataset = new AssetDataSet(m_Path, name, authorId); m_Items.Add(dataset); return dataset; } // Checks if an imageset with given name exists and returns it if it does. // Otherwise, creates a new image set. public AssetImageSet OpenImageSet(string name) { var item = GetExistingItemWithType<AssetImageSet>(name); if (item != null) return item; var imageset = new AssetImageSet(m_Path, name, authorId); m_Items.Add(imageset); return imageset; } // Checks if a image stack with given name exists and returns it if it does. // Otherwise, creates a new image stack. public AssetImageStack OpenImageStack(string name) { var item = GetExistingItemWithType<AssetImageStack>(name); if (item != null) return item; var imageStack = new AssetImageStack(m_Path, name, authorId); m_Items.Add(imageStack); return imageStack; } // Returns the requested item or null if not found public AssetCatalogItem GetChild(string name) { foreach (var item in m_Items) { if (item.name == name) return item; } return null; } void WriteJson() { if (!providesNamespace) return; // json is optional when namespace is not provided var doc = new JsonDocument(); WriteInfoToJson(doc); var props = doc.root.CreateDict("properties"); props.SetBoolean("provides-namespace", providesNamespace); doc.WriteToFile(Path.Combine(m_Path, "Contents.json")); } public override void Write() { if (Directory.Exists(m_Path)) Directory.Delete(m_Path, true); // ensure we start from clean state Directory.CreateDirectory(m_Path); WriteJson(); foreach (var item in m_Items) item.Write(); } } abstract class AssetCatalogItemWithVariants : AssetCatalogItem { protected List<VariantData> m_Variants = new List<VariantData>(); protected List<string> m_ODRTags = new List<string>(); protected AssetCatalogItemWithVariants(string name, string authorId) : base(name, authorId) { } protected class VariantData { public DeviceRequirement requirement; public string path; public VariantData(DeviceRequirement requirement, string path) { this.requirement = requirement; this.path = path; } } public bool HasVariant(DeviceRequirement requirement) { foreach (var item in m_Variants) { if (item.requirement.values == requirement.values) return true; } return false; } public void AddOnDemandResourceTag(string tag) { if (!m_ODRTags.Contains(tag)) m_ODRTags.Add(tag); } protected void AddVariant(VariantData newItem) { foreach (var item in m_Variants) { if (item.requirement.values == newItem.requirement.values) throw new Exception("The given requirement has been already added"); if (Path.GetFileName(item.path) == Path.GetFileName(path)) throw new Exception("Two items within the same set must not have the same file name"); } if (Path.GetFileName(newItem.path) == "Contents.json") throw new Exception("The file name must not be equal to Contents.json"); m_Variants.Add(newItem); } protected void WriteODRTagsToJson(JsonElementDict info) { if (m_ODRTags.Count > 0) { var tags = info.CreateArray("on-demand-resource-tags"); foreach (var tag in m_ODRTags) tags.AddString(tag); } } protected void WriteRequirementsToJson(JsonElementDict item, DeviceRequirement req) { foreach (var kv in req.values) { if (kv.Value != null && kv.Value != "") item.SetString(kv.Key, kv.Value); } } } internal class AssetDataSet : AssetCatalogItemWithVariants { class DataSetVariant : VariantData { public string id; public DataSetVariant(DeviceRequirement requirement, string path, string id) : base(requirement, path) { this.id = id; } } internal AssetDataSet(string parentPath, string name, string authorId) : base(name, authorId) { m_Path = Path.Combine(parentPath, name + ".dataset"); } // an exception is thrown is two equivalent requirements are added. // The same asset dataset must not have paths with equivalent filenames. // The identifier allows to identify which data variant is actually loaded (use // the typeIdentifer property of the NSDataAsset that was created from the data set) public void AddVariant(DeviceRequirement requirement, string path, string typeIdentifier) { foreach (DataSetVariant item in m_Variants) { if (item.id != null && typeIdentifier != null && item.id == typeIdentifier) throw new Exception("Two items within the same dataset must not have the same id"); } AddVariant(new DataSetVariant(requirement, path, typeIdentifier)); } public override void Write() { Directory.CreateDirectory(m_Path); var doc = new JsonDocument(); var info = WriteInfoToJson(doc); WriteODRTagsToJson(info); var data = doc.root.CreateArray("data"); foreach (DataSetVariant item in m_Variants) { var filename = Path.GetFileName(item.path); File.Copy(item.path, Path.Combine(m_Path, filename)); var docItem = data.AddDict(); docItem.SetString("filename", filename); WriteRequirementsToJson(docItem, item.requirement); if (item.id != null) docItem.SetString("universal-type-identifier", item.id); } doc.WriteToFile(Path.Combine(m_Path, "Contents.json")); } } internal class ImageAlignment { public int left = 0, right = 0, top = 0, bottom = 0; } internal class ImageResizing { public enum SlicingType { Horizontal, Vertical, HorizontalAndVertical } public enum ResizeMode { Stretch, Tile } public SlicingType type = SlicingType.HorizontalAndVertical; public int left = 0; // only valid for horizontal slicing public int right = 0; // only valid for horizontal slicing public int top = 0; // only valid for vertical slicing public int bottom = 0; // only valid for vertical slicing public ResizeMode centerResizeMode = ResizeMode.Stretch; public int centerWidth = 0; // only valid for vertical slicing public int centerHeight = 0; // only valid for horizontal slicing } // TODO: rendering intent property internal class AssetImageSet : AssetCatalogItemWithVariants { internal AssetImageSet(string assetCatalogPath, string name, string authorId) : base(name, authorId) { m_Path = Path.Combine(assetCatalogPath, name + ".imageset"); } class ImageSetVariant : VariantData { public ImageAlignment alignment = null; public ImageResizing resizing = null; public ImageSetVariant(DeviceRequirement requirement, string path) : base(requirement, path) { } } public void AddVariant(DeviceRequirement requirement, string path) { AddVariant(new ImageSetVariant(requirement, path)); } public void AddVariant(DeviceRequirement requirement, string path, ImageAlignment alignment, ImageResizing resizing) { var imageset = new ImageSetVariant(requirement, path); imageset.alignment = alignment; imageset.resizing = resizing; AddVariant(imageset); } void WriteAlignmentToJson(JsonElementDict item, ImageAlignment alignment) { var docAlignment = item.CreateDict("alignment-insets"); docAlignment.SetInteger("top", alignment.top); docAlignment.SetInteger("bottom", alignment.bottom); docAlignment.SetInteger("left", alignment.left); docAlignment.SetInteger("right", alignment.right); } static string GetSlicingMode(ImageResizing.SlicingType mode) { switch (mode) { case ImageResizing.SlicingType.Horizontal: return "3-part-horizontal"; case ImageResizing.SlicingType.Vertical: return "3-part-vertical"; case ImageResizing.SlicingType.HorizontalAndVertical: return "9-part"; } return ""; } static string GetCenterResizeMode(ImageResizing.ResizeMode mode) { switch (mode) { case ImageResizing.ResizeMode.Stretch: return "stretch"; case ImageResizing.ResizeMode.Tile: return "tile"; } return ""; } void WriteResizingToJson(JsonElementDict item, ImageResizing resizing) { var docResizing = item.CreateDict("resizing"); docResizing.SetString("mode", GetSlicingMode(resizing.type)); var docCenter = docResizing.CreateDict("center"); docCenter.SetString("mode", GetCenterResizeMode(resizing.centerResizeMode)); docCenter.SetInteger("width", resizing.centerWidth); docCenter.SetInteger("height", resizing.centerHeight); var docInsets = docResizing.CreateDict("cap-insets"); docInsets.SetInteger("top", resizing.top); docInsets.SetInteger("bottom", resizing.bottom); docInsets.SetInteger("left", resizing.left); docInsets.SetInteger("right", resizing.right); } public override void Write() { Directory.CreateDirectory(m_Path); var doc = new JsonDocument(); var info = WriteInfoToJson(doc); WriteODRTagsToJson(info); var images = doc.root.CreateArray("images"); foreach (ImageSetVariant item in m_Variants) { var filename = Path.GetFileName(item.path); File.Copy(item.path, Path.Combine(m_Path, filename)); var docItem = images.AddDict(); docItem.SetString("filename", filename); WriteRequirementsToJson(docItem, item.requirement); if (item.alignment != null) WriteAlignmentToJson(docItem, item.alignment); if (item.resizing != null) WriteResizingToJson(docItem, item.resizing); } doc.WriteToFile(Path.Combine(m_Path, "Contents.json")); } } /* A stack layer may either contain an image set or reference another imageset */ class AssetImageStackLayer : AssetCatalogItem { internal AssetImageStackLayer(string assetCatalogPath, string name, string authorId) : base(name, authorId) { m_Path = Path.Combine(assetCatalogPath, name + ".imagestacklayer"); m_Imageset = new AssetImageSet(m_Path, "Content", authorId); } AssetImageSet m_Imageset = null; string m_ReferencedName = null; public void SetReference(string name) { m_Imageset = null; m_ReferencedName = name; } public string ReferencedName() { return m_ReferencedName; } public AssetImageSet GetImageSet() { return m_Imageset; } public override void Write() { Directory.CreateDirectory(m_Path); var doc = new JsonDocument(); WriteInfoToJson(doc); if (m_ReferencedName != null) { var props = doc.root.CreateDict("properties"); var reference = props.CreateDict("content-reference"); reference.SetString("type", "image-set"); reference.SetString("name", m_ReferencedName); reference.SetString("matching-style", "fully-qualified-name"); } if (m_Imageset != null) m_Imageset.Write(); doc.WriteToFile(Path.Combine(m_Path, "Contents.json")); } } class AssetImageStack : AssetCatalogItem { List<AssetImageStackLayer> m_Layers = new List<AssetImageStackLayer>(); internal AssetImageStack(string assetCatalogPath, string name, string authorId) : base(name, authorId) { m_Path = Path.Combine(assetCatalogPath, name + ".imagestack"); } public AssetImageStackLayer AddLayer(string name) { foreach (var layer in m_Layers) { if (layer.name == name) throw new Exception("A layer with given name already exists"); } var newLayer = new AssetImageStackLayer(m_Path, name, authorId); m_Layers.Add(newLayer); return newLayer; } public override void Write() { Directory.CreateDirectory(m_Path); var doc = new JsonDocument(); WriteInfoToJson(doc); var docLayers = doc.root.CreateArray("layers"); foreach (var layer in m_Layers) { layer.Write(); var docLayer = docLayers.AddDict(); docLayer.SetString("filename", Path.GetFileName(layer.path)); } doc.WriteToFile(Path.Combine(m_Path, "Contents.json")); } } } // namespace UnityEditor.iOS.Xcode.GetSocial
using k8s; using k8s.Models; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Options; using Orleans.Configuration; using Orleans.Runtime; using System; using System.Collections.Generic; using System.Linq; using System.Threading; using System.Threading.Tasks; namespace Orleans.Hosting.Kubernetes { /// <summary> /// Reflects cluster configuration changes between Orleans and Kubernetes. /// </summary> public sealed class KubernetesClusterAgent : ILifecycleParticipant<ISiloLifecycle> { private readonly IOptionsMonitor<KubernetesHostingOptions> _options; private readonly ClusterOptions _clusterOptions; private readonly IClusterMembershipService _clusterMembershipService; private readonly KubernetesClientConfiguration _config; private readonly k8s.Kubernetes _client; private readonly string _podLabelSelector; private readonly string _podNamespace; private readonly string _podName; private readonly ILocalSiloDetails _localSiloDetails; private readonly ILogger<KubernetesClusterAgent> _logger; private readonly CancellationTokenSource _shutdownToken; private readonly SemaphoreSlim _pauseMonitoringSemaphore = new SemaphoreSlim(0); private volatile bool _enableMonitoring; private Task _runTask; public KubernetesClusterAgent( IClusterMembershipService clusterMembershipService, ILogger<KubernetesClusterAgent> logger, IOptionsMonitor<KubernetesHostingOptions> options, IOptions<ClusterOptions> clusterOptions, ILocalSiloDetails localSiloDetails) { _localSiloDetails = localSiloDetails; _logger = logger; _shutdownToken = new CancellationTokenSource(); _options = options; _clusterOptions = clusterOptions.Value; _clusterMembershipService = clusterMembershipService; _config = _options.CurrentValue.GetClientConfiguration?.Invoke() ?? throw new ArgumentNullException(nameof(KubernetesHostingOptions) + "." + nameof(KubernetesHostingOptions.GetClientConfiguration)); _client = new k8s.Kubernetes(_config); _podLabelSelector = $"{KubernetesHostingOptions.ServiceIdLabel}={_clusterOptions.ServiceId},{KubernetesHostingOptions.ClusterIdLabel}={_clusterOptions.ClusterId}"; _podNamespace = _options.CurrentValue.Namespace; _podName = _options.CurrentValue.PodName; } public void Participate(ISiloLifecycle lifecycle) { lifecycle.Subscribe( nameof(KubernetesClusterAgent), ServiceLifecycleStage.AfterRuntimeGrainServices, OnRuntimeInitializeStart, OnRuntimeInitializeStop); } private async Task OnRuntimeInitializeStart(CancellationToken cancellation) { // Find the currently known cluster members first, before interrogating Kubernetes await _clusterMembershipService.Refresh(); var snapshot = _clusterMembershipService.CurrentSnapshot.Members; // Find the pods which correspond to this cluster var pods = await _client.ListNamespacedPodAsync( namespaceParameter: _podNamespace, labelSelector: _podLabelSelector, cancellationToken: cancellation); var clusterPods = new HashSet<string>(); clusterPods.Add(_podName); foreach (var pod in pods.Items) { clusterPods.Add(pod.Metadata.Name); } HashSet<string> known = new HashSet<string>(); var knownMap = new Dictionary<string, ClusterMember>(); known.Add(_podName); foreach (var member in snapshot.Values) { if (member.Status == SiloStatus.Dead) { continue; } known.Add(member.Name); knownMap[member.Name] = member; } var unknown = new List<string>(clusterPods.Except(known)); unknown.Sort(); foreach (var pod in unknown) { _logger.LogWarning("Pod {PodName} does not correspond to any known silos", pod); // Delete the pod once it has been active long enough? } var unmatched = new List<string>(known.Except(clusterPods)); unmatched.Sort(); foreach (var pod in unmatched) { var siloAddress = knownMap[pod]; _logger.LogWarning("Silo {SiloAddress} does not correspond to any known pod. Marking it as dead.", siloAddress); await _clusterMembershipService.TryKill(siloAddress.SiloAddress); } // Start monitoring loop ThreadPool.UnsafeQueueUserWorkItem(_ => _runTask = Task.WhenAll(Task.Run(MonitorOrleansClustering), Task.Run(MonitorKubernetesPods)), null); } public async Task OnRuntimeInitializeStop(CancellationToken cancellationToken) { _shutdownToken.Cancel(); _enableMonitoring = false; _pauseMonitoringSemaphore.Release(); if (_runTask is object) { await Task.WhenAny(_runTask, Task.Delay(TimeSpan.FromMinutes(1), cancellationToken)); } } private async Task MonitorOrleansClustering() { var previous = _clusterMembershipService.CurrentSnapshot; while (!_shutdownToken.IsCancellationRequested) { try { await foreach (var update in _clusterMembershipService.MembershipUpdates.WithCancellation(_shutdownToken.Token)) { // Determine which silos should be monitoring Kubernetes var chosenSilos = _clusterMembershipService.CurrentSnapshot.Members.Values .Where(s => s.Status == SiloStatus.Active) .OrderBy(s => s.SiloAddress) .Take(_options.CurrentValue.MaxAgents) .ToList(); if (!_enableMonitoring && chosenSilos.Any(s => s.SiloAddress.Equals(_localSiloDetails.SiloAddress))) { _enableMonitoring = true; _pauseMonitoringSemaphore.Release(1); } else if (_enableMonitoring) { _enableMonitoring = false; } if (_enableMonitoring && _options.CurrentValue.DeleteDefunctSiloPods) { var delta = update.CreateUpdate(previous); foreach (var change in delta.Changes) { if (change.SiloAddress.Equals(_localSiloDetails.SiloAddress)) { // Ignore all changes for this silo continue; } if (change.Status == SiloStatus.Dead) { try { if (_logger.IsEnabled(LogLevel.Information)) { _logger.LogInformation("Silo {SiloAddress} is dead, proceeding to delete the corresponding pod, {PodName}, in namespace {PodNamespace}", change.SiloAddress, change.Name, _podNamespace); } await _client.DeleteNamespacedPodAsync(change.Name, _podNamespace); } catch (Exception exception) { _logger.LogError(exception, "Error deleting pod {PodName} in namespace {PodNamespace} corresponding to defunct silo {SiloAddress}", change.Name, _podNamespace, change.SiloAddress); } } } } previous = update; } } catch (Exception exception) when (!(_shutdownToken.IsCancellationRequested && (exception is TaskCanceledException || exception is OperationCanceledException))) { _logger.LogError(exception, "Error monitoring cluster changes"); if (!_shutdownToken.IsCancellationRequested) { await Task.Delay(5000); } } } } private async Task MonitorKubernetesPods() { while (!_shutdownToken.IsCancellationRequested) { try { if (!_enableMonitoring) { // Pulse the semaphore to avoid spinning in a tight loop. await _pauseMonitoringSemaphore.WaitAsync(); continue; } if (_shutdownToken.IsCancellationRequested) { break; } var pods = await _client.ListNamespacedPodWithHttpMessagesAsync( namespaceParameter: _podNamespace, labelSelector: _podLabelSelector, watch: true, cancellationToken: _shutdownToken.Token); await foreach (var (eventType, pod) in pods.WatchAsync<V1PodList, V1Pod>(_shutdownToken.Token)) { if (!_enableMonitoring || _shutdownToken.IsCancellationRequested) { break; } if (string.Equals(pod.Metadata.Name, _podName, StringComparison.Ordinal)) { // Never declare ourselves dead this way. continue; } if (eventType == WatchEventType.Modified) { // TODO: Remember silo addresses for pods are restarting/terminating } if (eventType == WatchEventType.Deleted) { if (this.TryMatchSilo(pod, out var member) && member.Status != SiloStatus.Dead) { if (_logger.IsEnabled(LogLevel.Information)) { _logger.LogInformation("Declaring server {Silo} dead since its corresponding pod, {Pod}, has been deleted", member.SiloAddress, pod.Metadata.Name); } await _clusterMembershipService.TryKill(member.SiloAddress); } } } if (_enableMonitoring && !_shutdownToken.IsCancellationRequested) { if (_logger.IsEnabled(LogLevel.Debug)) { _logger.LogDebug("Unexpected end of stream from Kubernetes API. Will try again."); } await Task.Delay(5000); } } catch (Exception exception) { _logger.LogError(exception, "Error monitoring Kubernetes pods"); if (!_shutdownToken.IsCancellationRequested) { await Task.Delay(5000); } } } } private bool TryMatchSilo(V1Pod pod, out ClusterMember server) { var snapshot = _clusterMembershipService.CurrentSnapshot; foreach (var member in snapshot.Members) { if (string.Equals(member.Value.Name, pod.Metadata.Name, StringComparison.Ordinal)) { server = member.Value; return true; } } server = default; return false; } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Relay { using Azure; using Management; using Rest; using Rest.Azure; using Rest.Serialization; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; /// <summary> /// Use these API to manage Azure Relay resources through Azure Resources /// Manager. /// </summary> public partial class RelayManagementClient : ServiceClient<RelayManagementClient>, IRelayManagementClient, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public System.Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Credentials needed for the client to connect to Azure. /// </summary> public ServiceClientCredentials Credentials { get; private set; } /// <summary> /// Subscription credentials which uniquely identify Microsoft Azure /// subscription. The subscription ID forms part of the URI for every service /// call. /// </summary> public string SubscriptionId { get; set; } /// <summary> /// Client Api Version. /// </summary> public string ApiVersion { get; private set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } /// <summary> /// Gets the IOperations. /// </summary> public virtual IOperations Operations { get; private set; } /// <summary> /// Gets the INamespacesOperations. /// </summary> public virtual INamespacesOperations Namespaces { get; private set; } /// <summary> /// Gets the IHybridConnectionsOperations. /// </summary> public virtual IHybridConnectionsOperations HybridConnections { get; private set; } /// <summary> /// Gets the IWCFRelaysOperations. /// </summary> public virtual IWCFRelaysOperations WCFRelays { get; private set; } /// <summary> /// Initializes a new instance of the RelayManagementClient class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected RelayManagementClient(params System.Net.Http.DelegatingHandler[] handlers) : base(handlers) { Initialize(); } /// <summary> /// Initializes a new instance of the RelayManagementClient class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected RelayManagementClient(System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : base(rootHandler, handlers) { Initialize(); } /// <summary> /// Initializes a new instance of the RelayManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected RelayManagementClient(System.Uri baseUri, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the RelayManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected RelayManagementClient(System.Uri baseUri, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the RelayManagementClient class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public RelayManagementClient(ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the RelayManagementClient class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public RelayManagementClient(ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the RelayManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public RelayManagementClient(System.Uri baseUri, ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } BaseUri = baseUri; Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the RelayManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public RelayManagementClient(System.Uri baseUri, ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } BaseUri = baseUri; Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// An optional partial-method to perform custom initialization. /// </summary> partial void CustomInitialize(); /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { Operations = new Operations(this); Namespaces = new NamespacesOperations(this); HybridConnections = new HybridConnectionsOperations(this); WCFRelays = new WCFRelaysOperations(this); BaseUri = new System.Uri("https://management.azure.com"); ApiVersion = "2016-07-01"; AcceptLanguage = "en-US"; LongRunningOperationRetryTimeout = 30; GenerateClientRequestId = true; SerializationSettings = new JsonSerializerSettings { Formatting = Formatting.Indented, DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; SerializationSettings.Converters.Add(new TransformationJsonConverter()); DeserializationSettings = new JsonSerializerSettings { DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; CustomInitialize(); DeserializationSettings.Converters.Add(new TransformationJsonConverter()); DeserializationSettings.Converters.Add(new CloudErrorJsonConverter()); } } }
namespace Volante { using System; using System.Collections.Generic; using System.Text; public enum IndexType { Unique, NonUnique } public enum TransactionMode { /// <summary> /// Exclusive per-thread transaction: each thread accesses database in exclusive mode /// </summary> Exclusive, /// <summary> /// Cooperative mode; all threads share the same transaction. Commit will commit changes made /// by all threads. To make this schema work correctly, it is necessary to ensure (using locking) /// that no thread is performing update of the database while another one tries to perform commit. /// Rollback will undo the work of all threads. /// </summary> Cooperative, /// <summary> /// Serializable per-thread transaction. Unlike exclusive mode, threads can concurrently access database, /// but effect will be the same as them working exclusively. /// To provide such behavior, programmer should lock all access objects (or use hierarchical locking). /// When object is updated, exclusive lock should be set, otherwise shared lock is enough. /// Lock should be preserved until the end of transaction. /// </summary> Serializable, #if WITH_REPLICATION /// <summary> /// Read only transaction which can be started at replication slave node. /// It runs concurrently with receiving updates from master node. /// </summary> ReplicationSlave #endif } public enum CacheType { Lru, Strong, Weak } /// <summary> Object db /// </summary> public interface IDatabase { /// <summary>Get/set database root. Database can have exactly one root. /// If you need several root objects and access them by name (as is possible /// in many other OODBMSes), create an index and use it as root object. /// Previous reference to the root object is rewritten but old root is not /// automatically deallocated. /// </summary> IPersistent Root { get; set; } /// <summary>Open the database /// </summary> /// <param name="filePath">path to the database file /// </param> /// <param name="cacheSizeInBytes">size of database cache, in bytes. /// Minimum size of the cache should be 64kB (64*1024 bytes). /// Larger cache usually leads to better performance. If the size is 0 /// the cache is unlimited - and will grow to the size of the database. /// </param> void Open(String filePath, int cacheSizeInBytes); /// <summary>Open the database with default page pool size (4 MB) /// </summary> /// <param name="filePath">path to the database file /// </param> void Open(String filePath); /// <summary>Open the db /// </summary> /// <param name="file">object implementing IFile interface /// </param> /// <param name="cacheSizeInBytes">size of database cache, in bytes. /// Minimum size of the cache should be 64kB (64*1024 bytes). /// Larger cache usually leads to better performance. If the size is 0 /// the cache is unlimited - and will grow to the size of the database. /// </param> void Open(IFile file, int cacheSizeInBytes); /// <summary>Open the database with default cache size /// </summary> /// <param name="file">user specific implementation of IFile interface /// </param> void Open(IFile file); /// <summary>Check if database is opened /// </summary> /// <returns><code>true</code>if database was opened by <code>open</code> method, /// <code>false</code> otherwise /// </returns> bool IsOpened { get; } /// <summary> Commit changes done by the last transaction. Transaction is started implcitly with forst update /// opertation. /// </summary> void Commit(); /// <summary> Rollback changes made by the last transaction /// </summary> void Rollback(); /// <summary> /// Backup current state of database /// </summary> /// <param name="stream">output stream to which backup is done</param> void Backup(System.IO.Stream stream); /// <summary> Create new index. K parameter specifies key type, V - associated object type. /// </summary> /// <param name="indexType">whether index is unique (duplicate value of keys are not allowed) /// </param> /// <returns>persistent object implementing index /// </returns> /// <exception cref="Volante.DatabaseException">DatabaseException(DatabaseException.ErrorCode.UNSUPPORTED_INDEX_TYPE) exception if /// specified key type is not supported by implementation. /// </exception> IIndex<K, V> CreateIndex<K, V>(IndexType indexType) where V : class,IPersistent; /// <summary> Create new thick index (index with large number of duplicated keys). /// K parameter specifies key type, V - associated object type. /// </summary> /// <returns>persistent object implementing thick index /// </returns> /// <exception cref="Volante.DatabaseException">DatabaseException(DatabaseException.ErrorCode.UNSUPPORTED_INDEX_TYPE) exception if /// specified key type is not supported by implementation. /// </exception> IIndex<K, V> CreateThickIndex<K, V>() where V : class,IPersistent; /// <summary> /// Create new field index /// K parameter specifies key type, V - associated object type. /// </summary> /// <param name="fieldName">name of the index field. Field with such name should be present in specified class <code>type</code> /// </param> /// <param name="indexType">whether index is unique (duplicate value of keys are not allowed) /// </param> /// <returns>persistent object implementing field index /// </returns> /// <exception cref="Volante.DatabaseException">DatabaseException(DatabaseException.INDEXED_FIELD_NOT_FOUND) if there is no such field in specified class, /// DatabaseException(DatabaseException.UNSUPPORTED_INDEX_TYPE) exception if type of specified field is not supported by implementation /// </exception> IFieldIndex<K, V> CreateFieldIndex<K, V>(string fieldName, IndexType indexType) where V : class,IPersistent; /// <summary> /// Create new multi-field index /// </summary> /// <param name="fieldNames">array of names of the fields. Field with such name should be present in specified class <code>type</code> /// </param> /// <param name="indexType">whether index is unique (duplicate value of keys are not allowed) /// </param> /// <returns>persistent object implementing field index /// </returns> /// <exception cref="Volante.DatabaseException">DatabaseException(DatabaseException.INDEXED_FIELD_NOT_FOUND) if there is no such field in specified class, /// DatabaseException(DatabaseException.UNSUPPORTED_INDEX_TYPE) exception if type of specified field is not supported by implementation /// </exception> IMultiFieldIndex<V> CreateFieldIndex<V>(string[] fieldNames, IndexType indexType) where V : class,IPersistent; #if WITH_OLD_BTREE /// <summary> /// Create new bit index. Bit index is used to select object /// with specified set of (boolean) properties. /// </summary> /// <returns>persistent object implementing bit index</returns> IBitIndex<T> CreateBitIndex<T>() where T : class,IPersistent; #endif /// <summary> /// Create new spatial index with integer coordinates /// </summary> /// <returns> /// persistent object implementing spatial index /// </returns> ISpatialIndex<T> CreateSpatialIndex<T>() where T : class,IPersistent; /// <summary> /// Create new R2 spatial index /// </summary> /// <returns> /// persistent object implementing spatial index /// </returns> ISpatialIndexR2<T> CreateSpatialIndexR2<T>() where T : class,IPersistent; /// <summary> /// Create new sorted collection with specified comparator /// </summary> /// <param name="comparator">comparator class specifying order in the collection</param> /// <param name="indexType"> whether collection is unique (members with the same key value are not allowed)</param> /// <returns> persistent object implementing sorted collection</returns> ISortedCollection<K, V> CreateSortedCollection<K, V>(PersistentComparator<K, V> comparator, IndexType indexType) where V : class,IPersistent; /// <summary> /// Create new sorted collection. Members of this collections should implement /// <code>System.IComparable</code> interface and make it possible to compare /// collection members with each other as well as with serch key. /// </summary> /// <param name="indexType"> whether collection is unique (members with the same key value are not allowed)</param> /// <returns> persistent object implementing sorted collection</returns> ISortedCollection<K, V> CreateSortedCollection<K, V>(IndexType indexType) where V : class,IPersistent, IComparable<K>, IComparable<V>; /// <summary>Create set of references to persistent objects. /// </summary> /// <returns>empty set, members can be added to the set later. /// </returns> ISet<T> CreateSet<T>() where T : class,IPersistent; /// <summary>Create set of references to persistent objects. /// </summary> /// <param name="initialSize">initial size of the set</param> /// <returns>empty set, members can be added to the set later. /// </returns> ISet<T> CreateSet<T>(int initialSize) where T : class,IPersistent; /// <summary>Create one-to-many link. /// </summary> /// <returns>empty link, members can be added to the link later. /// </returns> ILink<T> CreateLink<T>() where T : class,IPersistent; /// <summary>Create one-to-many link with specified initial size. /// </summary> /// <param name="initialSize">initial size of the array</param> /// <returns>empty link with specified size /// </returns> ILink<T> CreateLink<T>(int initialSize) where T : class,IPersistent; /// <summary>Create dynamically extended array of referencess to persistent objects. /// It is intended to be used in classes using virtual properties to /// access components of persistent objects. /// </summary> /// <returns>new empty array, new members can be added to the array later. /// </returns> IPArray<T> CreateArray<T>() where T : class,IPersistent; /// <summary>Create dynamcially extended array of reference to persistent objects. /// It is inteded to be used in classes using virtual properties to /// access components of persistent objects. /// </summary> /// <param name="initialSize">initially allocated size of the array</param> /// <returns>new empty array, new members can be added to the array later. /// </returns> IPArray<T> CreateArray<T>(int initialSize) where T : class,IPersistent; /// <summary> Create relation object. Unlike link which represent embedded relation and stored /// inside owner object, this Relation object is standalone persisitent object /// containing references to owner and members of the relation /// </summary> /// <param name="owner">owner of the relation /// </param> /// <returns>object representing empty relation (relation with specified owner and no members), /// new members can be added to the link later. /// </returns> Relation<M, O> CreateRelation<M, O>(O owner) where M : class,IPersistent where O : class,IPersistent; /// <summary> /// Create new BLOB. Create object for storing large binary data. /// </summary> /// <returns>empty BLOB</returns> IBlob CreateBlob(); /// <summary> /// Create new time series object. /// </summary> /// <param name="blockSize">number of elements in the block</param> /// <param name="maxBlockTimeInterval">maximal difference in system ticks (100 nanoseconds) between timestamps /// of the first and the last elements in a block. /// If value of this parameter is too small, then most blocks will contains less elements /// than preallocated. /// If it is too large, then searching of block will be inefficient, because index search /// will select a lot of extra blocks which do not contain any element from the /// specified range. /// Usually the value of this parameter should be set as /// (number of elements in block)*(tick interval)*2. /// Coefficient 2 here is used to compact possible holes in time series. /// For example, if we collect stocks data, we will have data only for working hours. /// If number of element in block is 100, time series period is 1 day, then /// value of maxBlockTimeInterval can be set as 100*(24*60*60*10000000L)*2 /// </param> /// <returns>new empty time series</returns> ITimeSeries<T> CreateTimeSeries<T>(int blockSize, long maxBlockTimeInterval) where T : ITimeSeriesTick; #if WITH_PATRICIA /// <summary> /// Create PATRICIA trie (Practical Algorithm To Retrieve Information Coded In Alphanumeric) /// Tries are a kind of tree where each node holds a common part of one or more keys. /// PATRICIA trie is one of the many existing variants of the trie, which adds path compression /// by grouping common sequences of nodes together. /// This structure provides a very efficient way of storing values while maintaining the lookup time /// for a key in O(N) in the worst case, where N is the length of the longest key. /// This structure has it's main use in IP routing software, but can provide an interesting alternative /// to other structures such as hashtables when memory space is of concern. /// </summary> /// <returns>created PATRICIA trie</returns> IPatriciaTrie<T> CreatePatriciaTrie<T>() where T : class,IPersistent; #endif /// <summary> Commit transaction (if needed) and close the db /// </summary> void Close(); /// <summary>Explicitly start garbage collection /// </summary> /// <returns>number of collected (deallocated) objects</returns> int Gc(); /// <summary> /// Retrieve object by oid. This method should be used with care because /// if object is deallocated, its oid can be reused. In this case /// GetObjectByOid() will return reference to the new object with may be /// different type. /// </summary> /// <param name="oid">object oid</param> /// <returns>reference to the object with specified oid</returns> IPersistent GetObjectByOid(int oid); /// <summary> /// Explicitly make object peristent. Usually objects are made persistent /// implicitly using "persistency on reachability approach", but this /// method allows to do it explicitly. If object is already persistent, execution of /// this method has no effect. /// </summary> /// <param name="obj">object to be made persistent</param> /// <returns>oid assigned to the object</returns> int MakePersistent(IPersistent obj); #if WITH_OLD_BTREE /// Use aternative implementation of B-Tree (not using direct access to database /// file pages). This implementation should be used in case of serialized per thread transctions. /// New implementation of B-Tree will be used instead of old implementation /// if AlternativeBtree property is set. New B-Tree has incompatible format with /// old B-Tree, so you could not use old database or XML export file with new indices. /// Alternative B-Tree is needed to provide serializable transaction (old one could not be used). /// Also it provides better performance (about 3 times comaring with old implementation) because /// of object caching. And B-Tree supports keys of user defined types. /// Default value: false bool AlternativeBtree { get; set; } #endif /// <summary>Set/get kind of object cache. /// If cache is CacheType.Strong none of the loaded persistent objects /// can be deallocated from memory by garbage collection. /// CacheType.Weak and CacheType.Lru both use weak references, so loaded /// objects can be deallocated. Lru cache can also pin some number of /// recently used objects for improved performance. /// Default value: CacheType.Lru /// </summary> CacheType CacheKind { get; set; } /// <summary>Set/get initial size of object index. Bigger values increase /// initial size of database but reduce number of index reallocations. /// Default value: 1024 /// </summary> int ObjectIndexInitSize { get; set; } /// <summary>Set/get initial size of object cache. Default value: 1319 /// </summary> int ObjectCacheInitSize { get; set; } /// <summary>Set/get object allocation bitmap extenstion quantum. Memory /// is allocated by scanning a bitmap. If there is no hole large enough, /// then database is extended by this value. It should not be smaller /// than 64 KB. /// Default value: 104857 bytes (1 MB) /// </summary> long ExtensionQuantum { get; set; } /// Threshold for initiation of garbage collection. /// If it is set to the value different from long.MaxValue, GC will be started each time /// when delta between total size of allocated and deallocated objects exceeds specified threashold OR /// after reaching end of allocation bitmap in allocator. /// <summary>Set threshold for initiation of garbage collection. By default garbage /// collection is disabled (threshold is set to /// Int64.MaxValue). If it is set to the value different fro /// Long.MAX_VALUE, GC will be started each time when /// delta between total size of allocated and deallocated /// objects exceeds specified threashold OR /// after reaching end of allocation bitmap in allocator. /// </summary> /// <param>delta between total size of allocated and deallocated object since last GC or db opening /// </param> /// Default value: long.MaxValue long GcThreshold { get; set; } /// <summary>Set/get whether garbage collection is performed in a /// separate thread in order to not block main application. /// Default value: false /// </summary> bool BackgroundGc { get; set; } /// <summary>Set/get whether dynamic code generation is used to generate /// pack/unpack methods for persisted classes. /// If used, serialization/deserialization of classes that only have public /// fields will be faster. On the downside, those methods must be generated /// at startup, increasing startup time. /// Default value: false /// </summary> bool CodeGeneration { get; set; } #if WITH_REPLICATION /// Request acknowledgement from slave that it receives all data before transaction /// commit. If this option is not set, then replication master node just writes /// data to the socket not warring whether it reaches slave node or not. /// When this option is set to true, master not will wait during each transaction commit acknowledgement /// from slave node. This option must be either set or not set at both /// slave and master nodes. If it is set only on one of this nodes then behavior of /// the system is unpredicted. This option can be used both in synchronous and asynchronous replication /// mode. The only difference is that in first case main application thread will be blocked waiting /// for acknowledgment, while in the asynchronous mode special replication thread will be blocked /// allowing thread performing commit to proceed. /// Default value: false bool ReplicationAck { get; set; } #endif /// <summary>Get database file. Should only be used to set FileListener.</summary> IFile File { get; } /// <summary>Get/set db listener. You can set <code>null</code> listener. /// </summary> DatabaseListener Listener { get; set; } /// <summary> /// Set class loader. This class loader will be used to locate classes for /// loaded class descriptors. If class loader is not specified or /// it did find the class, then class will be searched in all active assemblies /// </summary> IClassLoader Loader { get; set; } /// <summary> /// Create persistent class wrapper. This wrapper will implement virtual properties /// defined in specified class or interface, performing transparent loading and storing of persistent object /// </summary> /// <param name="type">Class or interface type of instantiated object</param> /// <returns>Wrapper for the specified class, implementing all virtual properties defined /// in it /// </returns> IPersistent CreateClass(Type type); /// <summary> /// Begin per-thread transaction. Three types of per-thread transactions are supported: /// exclusive, cooperative and serializable. In case of exclusive transaction, only one /// thread can update the database. In cooperative mode, multiple transaction can work /// concurrently and commit() method will be invoked only when transactions of all threads /// are terminated. Serializable transactions can also work concurrently. But unlike /// cooperative transaction, the threads are isolated from each other. Each thread /// has its own associated set of modified objects and committing the transaction will cause /// saving only of these objects to the database.To synchronize access to the objects /// in case of serializable transaction programmer should use lock methods /// of IResource interface. Shared lock should be set before read access to any object, /// and exclusive lock - before write access. Locks will be automatically released when /// transaction is committed (so programmer should not explicitly invoke unlock method) /// In this case it is guaranteed that transactions are serializable. /// It is not possible to use <code>IPersistent.store()</code> method in /// serializable transactions. That is why it is also not possible to use Index and FieldIndex /// containers (since them are based on B-Tree and B-Tree directly access database pages /// and use <code>Store()</code> method to assign oid to inserted object. /// You should use <code>SortedCollection</code> based on T-Tree instead or alternative /// B-Tree implemenataion (set AlternativeBtree property). /// </summary> /// <param name="mode"><code>TransactionMode.Exclusive</code>, <code>TransactionMode.Cooperative</code>, /// <code>TransactionMode.ReplicationSlave</code> or <code>TransactionMode.Serializable</code> /// </param> void BeginThreadTransaction(TransactionMode mode); /// <summary> /// End per-thread transaction started by beginThreadTransaction method. /// <ul> /// <li>If transaction is <i>exclusive</i>, this method commits the transaction and /// allows other thread to proceed.</li><li> /// If transaction is <i>serializable</i>, this method commits sll changes done by this thread /// and release all locks set by this thread.</li><li> /// If transaction is <i>cooperative</i>, this method decrement counter of cooperative /// transactions and if it becomes zero - commit the work</li></ul> /// </summary> void EndThreadTransaction(); /// <summary> /// End per-thread cooperative transaction with specified maximal delay of transaction /// commit. When cooperative transaction is ended, data is not immediately committed to the /// disk (because other cooperative transaction can be active at this moment of time). /// Instead of it cooperative transaction counter is decremented. Commit is performed /// only when this counter reaches zero value. But in case of heavy load there can be a lot of /// requests and so a lot of active cooperative transactions. So transaction counter never reaches zero value. /// If system crash happens a large amount of work will be lost in this case. /// To prevent such scenario, it is possible to specify maximal delay of pending transaction commit. /// In this case when such timeout is expired, new cooperative transaction will be blocked until /// transaction is committed. /// </summary> /// <param name="maxDelay">maximal delay in milliseconds of committing transaction. Please notice, that Volante could /// not force other threads to commit their cooperative transactions when this timeout is expired. It will only /// block new cooperative transactions to make it possible to current transaction to complete their work. /// If <code>maxDelay</code> is 0, current thread will be blocked until all other cooperative trasnaction are also finished /// and changhes will be committed to the database. /// </param> void EndThreadTransaction(int maxDelay); /// <summary> /// Rollback per-thread transaction. It is safe to use this method only for exclusive transactions. /// In case of cooperative transactions, this method rollback results of all transactions. /// </summary> void RollbackThreadTransaction(); /// <summary> /// Get database memory dump. This function returns hashmap which key is classes /// of stored objects and value - TypeMemoryUsage object which specifies number of instances /// of particular class in the db and total size of memory used by these instance. /// Size of internal database structures (object index, memory allocation bitmap) is associated with /// <code>IDatabase</code> class. Size of class descriptors - with <code>System.Type</code> class. /// <p>This method traverse the db as garbage collection do - starting from the root object /// and recursively visiting all reachable objects. So it reports statistic only for visible objects. /// If total database size is significantly larger than total size of all instances reported /// by this method, it means that there is garbage in the database. You can explicitly invoke /// garbage collector in this case.</p> /// </summary> Dictionary<Type, TypeMemoryUsage> GetMemoryUsage(); /// <summary> /// Get total size of all allocated objects in the database /// </summary> long UsedSize { get; } /// <summary> /// Get size of the database /// </summary> long DatabaseSize { get; } // Internal methods void deallocateObject(IPersistent obj); void storeObject(IPersistent obj); void storeFinalizedObject(IPersistent obj); void loadObject(IPersistent obj); void modifyObject(IPersistent obj); void lockObject(IPersistent obj); } }
using System.Net.Http; using System.Threading.Tasks; using System.IO; using System.Threading; using System; namespace QarnotSDK { /// <summary> /// This class manges tasks life cycle: submission, monitor, delete. /// </summary> public abstract partial class AQTask { /// <summary> /// Reference to the api connection. /// </summary> protected Connection _api; /// <summary> /// The task resource uri. /// </summary> protected string _uri = null; internal TaskApi _taskApi { get; set; } /// <summary> /// The inner Connection object. /// </summary> [InternalDataApiName(IsFilterable=false, IsSelectable=false)] public virtual Connection Connection { get { return _api; } } /// <summary> /// The task unique identifier. The Uuid is generated by the Api when the task is submitted. /// </summary> [InternalDataApiName(Name="Uuid")] public virtual Guid Uuid { get { return _taskApi.Uuid; } } internal AQTask() { } internal AQTask(Connection qapi, TaskApi taskApi) { _api = qapi; _uri = "tasks/" + taskApi.Uuid.ToString(); _taskApi = taskApi; } internal virtual async Task<AQTask> InitializeAsync(Connection qapi, TaskApi taskApi) { _api = qapi; _uri = "tasks/" + taskApi.Uuid.ToString(); _taskApi = taskApi; return await Task.FromResult(this); } #region public methods /// <summary> /// Abort a running task. /// </summary> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task AbortAsync(CancellationToken cancellationToken = default(CancellationToken)) { if (_api.IsReadOnly) throw new Exception("Can't abort tasks, this connection is configured in read-only mode"); using (var response = await _api._client.PostAsync(_uri + "/abort", null, cancellationToken)) await Utils.LookForErrorAndThrowAsync(_api._client, response, cancellationToken); } /// <summary> /// Delete the task. If the task is running, the task is aborted and deleted. /// </summary> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <param name="failIfDoesntExist">If set to false and the task doesn't exist, no exception is thrown. Default is true.</param> /// <param name="purgeResources">Boolean to trigger all resource storages deletion. Default is false.</param> /// <param name="purgeResults">Boolean to trigger result storage deletion. Default is false.</param> /// <returns></returns> public abstract Task DeleteAsync(CancellationToken cancellationToken, bool failIfDoesntExist = false, bool purgeResources=false, bool purgeResults=false); /// <summary> /// Delete the task. If the task is running, the task is aborted and deleted. /// </summary> /// <param name="failIfDoesntExist">If set to false and the task doesn't exist, no exception is thrown. Default is true.</param> /// <param name="purgeResources">Boolean to trigger all resource storages deletion. Default is false.</param> /// <param name="purgeResults">Boolean to trigger result storage deletion. Default is false.</param> /// <returns></returns> public virtual async Task DeleteAsync(bool failIfDoesntExist = false, bool purgeResources=false, bool purgeResults=false) => await DeleteAsync(default(CancellationToken), failIfDoesntExist, purgeResources, purgeResults); #endregion #region resource updates/snapshots /// <summary> /// Request made on a running task to re-sync the resource buckets to the compute nodes. /// 1 - Upload new files on your resource bucket, /// 2 - Call this method, /// 3 - The new files will appear on all the compute nodes in the $DOCKER_WORKDIR folder /// Note: There is no way to know when the files are effectively transfered. This information is available on the compute node only. /// Note: The update is additive only: files deleted from the bucket will NOT be deleted from the task's resources directory. /// </summary> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task UpdateResourcesAsync(CancellationToken cancellationToken = default(CancellationToken)) { if (_api.IsReadOnly) throw new Exception("Can't update resources, this connection is configured in read-only mode"); var reqMsg = new HttpRequestMessage(new HttpMethod("PATCH"), _uri); using (var response = await _api._client.SendAsync(reqMsg, cancellationToken)) await Utils.LookForErrorAndThrowAsync(_api._client, response, cancellationToken); } /// <summary> /// Request made on a running task to sync the result files in $DOCKER_WORKDIR on the compute node to the result bucket. /// Note: There is no way to know when all the results are effectively transfered. This information is available by monitoring the /// task ResultsCount or by checking the result bucket. /// </summary> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task SnapshotAsync(CancellationToken cancellationToken = default(CancellationToken)) { await TriggerSnapshotAsync(cancellationToken: cancellationToken); } /// <summary> /// Start a snapshotting of the results with customized parameters. /// It can have a different whitelist, blacklist, bucket and bucket prefix than a normal snapshot. /// </summary> /// <param name="whitelist">Specify a custom whitelist for this snapshot.</param> /// <param name="blacklist">Specify a custom blacklist for this snapshot.</param> /// <param name="bucket">Specify the name of a custom bucket this snapshot.</param> /// <param name="bucketPrefix">Specify a custom prefix for this snapshot.</param> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task TriggerSnapshotAsync(string whitelist = null, string blacklist = null, QBucket bucket = null, string bucketPrefix = null, CancellationToken cancellationToken = default(CancellationToken)) { if (_api._shouldSanitizeBucketPaths) { bucketPrefix = Utils.GetSanitizedBucketPath(bucketPrefix, _api._showBucketWarnings, false); } var s = new UniqueSnapshot() { Whitelist = whitelist, Blacklist = blacklist, Bucket = bucket?.UniqueId, BucketPrefix = bucketPrefix, }; if (_api.IsReadOnly) throw new Exception("Can't request a snapshot, this connection is configured in read-only mode"); using (var response = await _api._client.PostAsJsonAsync<UniqueSnapshot>(_uri + "/snapshot", s, cancellationToken)) await Utils.LookForErrorAndThrowAsync(_api._client, response, cancellationToken); } /// <summary> /// Start a periodic snapshotting of the results. /// </summary> /// <param name="interval">Interval in seconds between two snapshots.</param> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task SnapshotPeriodicAsync(uint interval, CancellationToken cancellationToken = default(CancellationToken)) { await TriggerPeriodicSnapshotAsync(interval, cancellationToken: cancellationToken); } /// <summary> /// Start a periodic snapshotting of the results with customized parameters. /// It can have a different whitelist, blacklist, bucket and bucket prefix than a normal snapshot. /// </summary> /// <param name="interval">Interval in seconds between two snapshots.</param> /// <param name="whitelist">Specify a custom whitelist for this periodic snapshot.</param> /// <param name="blacklist">Specify a custom blacklist for this periodic snapshot.</param> /// <param name="bucket">Specify the name of a custom bucket for this periodic snapshot.</param> /// <param name="bucketPrefix">Specify a custom prefix for this periodic snapshot.</param> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task TriggerPeriodicSnapshotAsync(uint interval, string whitelist = null, string blacklist = null, QBucket bucket = null, string bucketPrefix = null, CancellationToken cancellationToken = default(CancellationToken)) { if (_api._shouldSanitizeBucketPaths) { bucketPrefix = Utils.GetSanitizedBucketPath(bucketPrefix, _api._showBucketWarnings, false); } var s = new PeriodicSnapshot() { Interval = Convert.ToInt32(interval), Whitelist = whitelist, Blacklist = blacklist, Bucket = bucket?.UniqueId, BucketPrefix = bucketPrefix, }; if (_api.IsReadOnly) throw new Exception("Can't configure snapshots, this connection is configured in read-only mode"); using (var response = await _api._client.PostAsJsonAsync<PeriodicSnapshot>(_uri + "/snapshot/periodic", s, cancellationToken)) await Utils.LookForErrorAndThrowAsync(_api._client, response, cancellationToken); } #endregion #region stdin/stdout /// <summary> /// Copies the full standard output to the given stream. /// Note: the standard output will rotate if it's too large. /// </summary> /// <param name="destinationStream">The destination stream.</param> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task CopyStdoutToAsync(Stream destinationStream, CancellationToken cancellationToken = default(CancellationToken)) { using (var response = await _api._client.GetAsync(_uri + "/stdout", cancellationToken)) { await Utils.LookForErrorAndThrowAsync(_api._client, response, cancellationToken); await response.Content.CopyToAsync(destinationStream); } } /// <summary> /// Copies the full standard error to the given stream. /// Note: the standard error will rotate if it's too large. /// </summary> /// <param name="destinationStream">The destination stream.</param> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task CopyStderrToAsync(Stream destinationStream, CancellationToken cancellationToken = default(CancellationToken)) { using (var response = await _api._client.GetAsync(_uri + "/stderr", cancellationToken)) { await Utils.LookForErrorAndThrowAsync(_api._client, response, cancellationToken); await response.Content.CopyToAsync(destinationStream); } } /// <summary> /// Copies the fresh new standard output since the last call to the given stream. /// </summary> /// <param name="destinationStream">The destination stream.</param> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task CopyFreshStdoutToAsync(Stream destinationStream, CancellationToken cancellationToken = default(CancellationToken)) { if (_api.IsReadOnly) throw new Exception("Can't retrieve fresh standard output, this connection is configured in read-only mode"); using (var response = await _api._client.PostAsync(_uri + "/stdout", null, cancellationToken)) { await Utils.LookForErrorAndThrowAsync(_api._client, response, cancellationToken); await response.Content.CopyToAsync(destinationStream); } } /// <summary> /// Copies the fresh new standard error since the last call to the given stream. /// </summary> /// <param name="destinationStream">The destination stream.</param> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns></returns> public virtual async Task CopyFreshStderrToAsync(Stream destinationStream, CancellationToken cancellationToken = default(CancellationToken)) { if (_api.IsReadOnly) throw new Exception("Can't retrieve fresh standard error, this connection is configured in read-only mode"); using (var response = await _api._client.PostAsync(_uri + "/stderr", null, cancellationToken)) { await Utils.LookForErrorAndThrowAsync(_api._client, response, cancellationToken); await response.Content.CopyToAsync(destinationStream); } } /// <summary> /// Returns the full standard output. /// Note: the standard output will rotate if it's too large. /// </summary> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns>The task standard output.</returns> public virtual async Task<string> StdoutAsync(CancellationToken cancellationToken = default(CancellationToken)) { using (MemoryStream ms = new MemoryStream()) { await CopyStdoutToAsync(ms, cancellationToken); ms.Position = 0; using (var reader = new StreamReader(ms)) { return await reader.ReadToEndAsync(); } } } /// <summary> /// Return the full standard error. /// Note: the standard error will rotate if it's too large. /// </summary> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns>The task standard error.</returns> public virtual async Task<string> StderrAsync(CancellationToken cancellationToken = default(CancellationToken)) { using (MemoryStream ms = new MemoryStream()) { await CopyStderrToAsync(ms, cancellationToken); ms.Position = 0; using (var reader = new StreamReader(ms)) { return await reader.ReadToEndAsync(); } } } /// <summary> /// Returns the fresh new standard output since the last call. /// </summary> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns>The task fresh standard output.</returns> public virtual async Task<string> FreshStdoutAsync(CancellationToken cancellationToken = default(CancellationToken)) { using (MemoryStream ms = new MemoryStream()) { await CopyFreshStdoutToAsync(ms, cancellationToken); ms.Position = 0; using (var reader = new StreamReader(ms)) { return await reader.ReadToEndAsync(); } } } /// <summary> /// Returns the fresh new standard error since the last call. /// </summary> /// <param name="cancellationToken">Optional token to cancel the request.</param> /// <returns>The task fresh standard error.</returns> public virtual async Task<string> FreshStderrAsync(CancellationToken cancellationToken = default(CancellationToken)) { using (MemoryStream ms = new MemoryStream()) { await CopyFreshStderrToAsync(ms, cancellationToken); ms.Position = 0; using (var reader = new StreamReader(ms)) { return await reader.ReadToEndAsync(); } } } #endregion } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Runtime.InteropServices; using System.Security.Cryptography.X509Certificates; using Microsoft.Win32.SafeHandles; internal static partial class Interop { internal static partial class Ssl { internal const int SSL_TLSEXT_ERR_OK = 0; internal const int OPENSSL_NPN_NEGOTIATED = 1; internal const int SSL_TLSEXT_ERR_NOACK = 3; internal delegate int SslCtxSetVerifyCallback(int preverify_ok, IntPtr x509_ctx); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_EnsureLibSslInitialized")] internal static extern void EnsureLibSslInitialized(); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslV2_3Method")] internal static extern IntPtr SslV2_3Method(); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslCreate")] internal static extern SafeSslHandle SslCreate(SafeSslContextHandle ctx); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslGetError")] internal static extern SslErrorCode SslGetError(SafeSslHandle ssl, int ret); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslGetError")] internal static extern SslErrorCode SslGetError(IntPtr ssl, int ret); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslSetQuietShutdown")] internal static extern void SslSetQuietShutdown(SafeSslHandle ssl, int mode); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslDestroy")] internal static extern void SslDestroy(IntPtr ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslSetConnectState")] internal static extern void SslSetConnectState(SafeSslHandle ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslSetAcceptState")] internal static extern void SslSetAcceptState(SafeSslHandle ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslGetVersion")] private static extern IntPtr SslGetVersion(SafeSslHandle ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslGet0AlpnSelected")] internal static extern void SslGetAlpnSelected(SafeSslHandle ssl, out IntPtr protocol, out int len); internal static byte[] SslGetAlpnSelected(SafeSslHandle ssl) { IntPtr protocol; int len; SslGetAlpnSelected(ssl, out protocol, out len); if (len == 0) return null; byte[] result = new byte[len]; Marshal.Copy(protocol, result, 0, len); return result; } internal static string GetProtocolVersion(SafeSslHandle ssl) { return Marshal.PtrToStringAnsi(SslGetVersion(ssl)); } [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_GetSslConnectionInfo")] internal static extern bool GetSslConnectionInfo( SafeSslHandle ssl, out int dataCipherAlg, out int keyExchangeAlg, out int dataHashAlg, out int dataKeySize, out int hashKeySize); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslWrite")] internal static extern unsafe int SslWrite(SafeSslHandle ssl, byte* buf, int num); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslRead")] internal static extern unsafe int SslRead(SafeSslHandle ssl, byte* buf, int num); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_IsSslRenegotiatePending")] [return: MarshalAs(UnmanagedType.Bool)] internal static extern bool IsSslRenegotiatePending(SafeSslHandle ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslShutdown")] internal static extern int SslShutdown(IntPtr ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslShutdown")] internal static extern int SslShutdown(SafeSslHandle ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslSetBio")] internal static extern void SslSetBio(SafeSslHandle ssl, SafeBioHandle rbio, SafeBioHandle wbio); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslDoHandshake")] internal static extern int SslDoHandshake(SafeSslHandle ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_IsSslStateOK")] [return: MarshalAs(UnmanagedType.Bool)] internal static extern bool IsSslStateOK(SafeSslHandle ssl); // NOTE: this is just an (unsafe) overload to the BioWrite method from Interop.Bio.cs. [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_BioWrite")] internal static extern unsafe int BioWrite(SafeBioHandle b, byte* data, int len); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslGetPeerCertificate")] internal static extern SafeX509Handle SslGetPeerCertificate(SafeSslHandle ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslGetPeerCertChain")] internal static extern SafeSharedX509StackHandle SslGetPeerCertChain(SafeSslHandle ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslGetPeerFinished")] internal static extern int SslGetPeerFinished(SafeSslHandle ssl, IntPtr buf, int count); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslGetFinished")] internal static extern int SslGetFinished(SafeSslHandle ssl, IntPtr buf, int count); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslSessionReused")] internal static extern bool SslSessionReused(SafeSslHandle ssl); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslAddExtraChainCert")] internal static extern bool SslAddExtraChainCert(SafeSslHandle ssl, SafeX509Handle x509); [DllImport(Libraries.CryptoNative, EntryPoint = "CryptoNative_SslGetClientCAList")] private static extern SafeSharedX509NameStackHandle SslGetClientCAList_private(SafeSslHandle ssl); internal static SafeSharedX509NameStackHandle SslGetClientCAList(SafeSslHandle ssl) { Crypto.CheckValidOpenSslHandle(ssl); SafeSharedX509NameStackHandle handle = SslGetClientCAList_private(ssl); if (!handle.IsInvalid) { handle.SetParent(ssl); } return handle; } internal static bool AddExtraChainCertificates(SafeSslHandle sslContext, X509Chain chain) { Debug.Assert(chain != null, "X509Chain should not be null"); Debug.Assert(chain.ChainElements.Count > 0, "chain.Build should have already been called"); for (int i = chain.ChainElements.Count - 2; i > 0; i--) { SafeX509Handle dupCertHandle = Crypto.X509UpRef(chain.ChainElements[i].Certificate.Handle); Crypto.CheckValidOpenSslHandle(dupCertHandle); if (!SslAddExtraChainCert(sslContext, dupCertHandle)) { dupCertHandle.Dispose(); // we still own the safe handle; clean it up return false; } dupCertHandle.SetHandleAsInvalid(); // ownership has been transferred to sslHandle; do not free via this safe handle } return true; } internal static class SslMethods { internal static readonly IntPtr SSLv23_method = SslV2_3Method(); } internal enum SslErrorCode { SSL_ERROR_NONE = 0, SSL_ERROR_SSL = 1, SSL_ERROR_WANT_READ = 2, SSL_ERROR_WANT_WRITE = 3, SSL_ERROR_SYSCALL = 5, SSL_ERROR_ZERO_RETURN = 6, // NOTE: this SslErrorCode value doesn't exist in OpenSSL, but // we use it to distinguish when a renegotiation is pending. // Choosing an arbitrarily large value that shouldn't conflict // with any actual OpenSSL error codes SSL_ERROR_RENEGOTIATE = 29304 } } } namespace Microsoft.Win32.SafeHandles { internal sealed class SafeSslHandle : SafeHandle { private SafeBioHandle _readBio; private SafeBioHandle _writeBio; private bool _isServer; private bool _handshakeCompleted = false; private GCHandle _alpnHandle; public GCHandle AlpnHandle { set => _alpnHandle = value; } public bool IsServer { get { return _isServer; } } public SafeBioHandle InputBio { get { return _readBio; } } public SafeBioHandle OutputBio { get { return _writeBio; } } internal void MarkHandshakeCompleted() { _handshakeCompleted = true; } public static SafeSslHandle Create(SafeSslContextHandle context, bool isServer) { SafeBioHandle readBio = Interop.Crypto.CreateMemoryBio(); SafeBioHandle writeBio = Interop.Crypto.CreateMemoryBio(); SafeSslHandle handle = Interop.Ssl.SslCreate(context); if (readBio.IsInvalid || writeBio.IsInvalid || handle.IsInvalid) { readBio.Dispose(); writeBio.Dispose(); handle.Dispose(); // will make IsInvalid==true if it's not already return handle; } handle._isServer = isServer; // SslSetBio will transfer ownership of the BIO handles to the SSL context try { readBio.TransferOwnershipToParent(handle); writeBio.TransferOwnershipToParent(handle); handle._readBio = readBio; handle._writeBio = writeBio; Interop.Ssl.SslSetBio(handle, readBio, writeBio); } catch (Exception exc) { // The only way this should be able to happen without thread aborts is if we hit OOMs while // manipulating the safe handles, in which case we may leak the bio handles. Debug.Fail("Unexpected exception while transferring SafeBioHandle ownership to SafeSslHandle", exc.ToString()); throw; } if (isServer) { Interop.Ssl.SslSetAcceptState(handle); } else { Interop.Ssl.SslSetConnectState(handle); } return handle; } public override bool IsInvalid { get { return handle == IntPtr.Zero; } } protected override void Dispose(bool disposing) { if (disposing) { _readBio?.Dispose(); _writeBio?.Dispose(); } if (_alpnHandle.IsAllocated) { _alpnHandle.Free(); } base.Dispose(disposing); } protected override bool ReleaseHandle() { if (_handshakeCompleted) { Disconnect(); } IntPtr h = handle; SetHandle(IntPtr.Zero); Interop.Ssl.SslDestroy(h); // will free the handles underlying _readBio and _writeBio return true; } private void Disconnect() { Debug.Assert(!IsInvalid, "Expected a valid context in Disconnect"); int retVal = Interop.Ssl.SslShutdown(handle); // Here, we are ignoring checking for <0 return values from Ssl_Shutdown, // since the underlying memory bio is already disposed, we are not // interested in reading or writing to it. if (retVal == 0) { // Do a bi-directional shutdown. retVal = Interop.Ssl.SslShutdown(handle); } } private SafeSslHandle() : base(IntPtr.Zero, true) { } internal SafeSslHandle(IntPtr validSslPointer, bool ownsHandle) : base(IntPtr.Zero, ownsHandle) { handle = validSslPointer; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using Internal.TypeSystem; using Debug = System.Diagnostics.Debug; namespace ILCompiler.DependencyAnalysis { public struct ObjectDataBuilder : Internal.Runtime.ITargetBinaryWriter { public ObjectDataBuilder(NodeFactory factory) { _target = factory.Target; _data = new ArrayBuilder<byte>(); _relocs = new ArrayBuilder<Relocation>(); Alignment = 1; DefinedSymbols = new ArrayBuilder<ISymbolNode>(); #if DEBUG _numReservations = 0; #endif } private TargetDetails _target; private ArrayBuilder<Relocation> _relocs; private ArrayBuilder<byte> _data; internal int Alignment; internal ArrayBuilder<ISymbolNode> DefinedSymbols; #if DEBUG private int _numReservations; #endif public int CountBytes { get { return _data.Count; } } public int TargetPointerSize { get { return _target.PointerSize; } } public void RequireAlignment(int align) { Alignment = Math.Max(align, Alignment); } public void RequirePointerAlignment() { RequireAlignment(_target.PointerSize); } public void EmitByte(byte emit) { _data.Add(emit); } public void EmitShort(short emit) { EmitByte((byte)(emit & 0xFF)); EmitByte((byte)((emit >> 8) & 0xFF)); } public void EmitInt(int emit) { EmitByte((byte)(emit & 0xFF)); EmitByte((byte)((emit >> 8) & 0xFF)); EmitByte((byte)((emit >> 16) & 0xFF)); EmitByte((byte)((emit >> 24) & 0xFF)); } public void EmitLong(long emit) { EmitByte((byte)(emit & 0xFF)); EmitByte((byte)((emit >> 8) & 0xFF)); EmitByte((byte)((emit >> 16) & 0xFF)); EmitByte((byte)((emit >> 24) & 0xFF)); EmitByte((byte)((emit >> 32) & 0xFF)); EmitByte((byte)((emit >> 40) & 0xFF)); EmitByte((byte)((emit >> 48) & 0xFF)); EmitByte((byte)((emit >> 56) & 0xFF)); } public void EmitNaturalInt(int emit) { if (_target.PointerSize == 8) { EmitLong(emit); } else { Debug.Assert(_target.PointerSize == 4); EmitInt(emit); } } public void EmitHalfNaturalInt(short emit) { if (_target.PointerSize == 8) { EmitInt(emit); } else { Debug.Assert(_target.PointerSize == 4); EmitShort(emit); } } public void EmitCompressedUInt(uint emit) { if (emit < 128) { EmitByte((byte)(emit * 2 + 0)); } else if (emit < 128 * 128) { EmitByte((byte)(emit * 4 + 1)); EmitByte((byte)(emit >> 6)); } else if (emit < 128 * 128 * 128) { EmitByte((byte)(emit * 8 + 3)); EmitByte((byte)(emit >> 5)); EmitByte((byte)(emit >> 13)); } else if (emit < 128 * 128 * 128 * 128) { EmitByte((byte)(emit * 16 + 7)); EmitByte((byte)(emit >> 4)); EmitByte((byte)(emit >> 12)); EmitByte((byte)(emit >> 20)); } else { EmitByte((byte)15); EmitInt((int)emit); } } public void EmitBytes(byte[] bytes) { _data.Append(bytes); } public void EmitZeroPointer() { _data.ZeroExtend(_target.PointerSize); } public void EmitZeros(int numBytes) { _data.ZeroExtend(numBytes); } private Reservation GetReservationTicket(int size) { #if DEBUG _numReservations++; #endif Reservation ticket = (Reservation)_data.Count; _data.ZeroExtend(size); return ticket; } private int ReturnReservationTicket(Reservation reservation) { #if DEBUG Debug.Assert(_numReservations > 0); _numReservations--; #endif return (int)reservation; } public Reservation ReserveByte() { return GetReservationTicket(1); } public void EmitByte(Reservation reservation, byte emit) { int offset = ReturnReservationTicket(reservation); _data[offset] = emit; } public Reservation ReserveShort() { return GetReservationTicket(2); } public void EmitShort(Reservation reservation, short emit) { int offset = ReturnReservationTicket(reservation); _data[offset] = (byte)(emit & 0xFF); _data[offset + 1] = (byte)((emit >> 8) & 0xFF); } public Reservation ReserveInt() { return GetReservationTicket(4); } public void EmitInt(Reservation reservation, int emit) { int offset = ReturnReservationTicket(reservation); _data[offset] = (byte)(emit & 0xFF); _data[offset + 1] = (byte)((emit >> 8) & 0xFF); _data[offset + 2] = (byte)((emit >> 16) & 0xFF); _data[offset + 3] = (byte)((emit >> 24) & 0xFF); } public void AddRelocAtOffset(ISymbolNode symbol, RelocType relocType, int offset, int delta = 0) { Relocation symbolReloc = new Relocation(relocType, offset, symbol, delta); _relocs.Add(symbolReloc); } public void EmitReloc(ISymbolNode symbol, RelocType relocType, int delta = 0) { AddRelocAtOffset(symbol, relocType, _data.Count, delta); // And add space for the reloc switch (relocType) { case RelocType.IMAGE_REL_BASED_REL32: case RelocType.IMAGE_REL_BASED_ABSOLUTE: EmitInt(0); break; case RelocType.IMAGE_REL_BASED_DIR64: EmitLong(0); break; default: throw new NotImplementedException(); } } public void EmitPointerReloc(ISymbolNode symbol, int delta = 0) { EmitReloc(symbol, (_target.PointerSize == 8) ? RelocType.IMAGE_REL_BASED_DIR64 : RelocType.IMAGE_REL_BASED_HIGHLOW, delta); } public ObjectNode.ObjectData ToObjectData() { #if DEBUG Debug.Assert(_numReservations == 0); #endif ObjectNode.ObjectData returnData = new ObjectNode.ObjectData(_data.ToArray(), _relocs.ToArray(), Alignment, DefinedSymbols.ToArray()); return returnData; } public enum Reservation { } } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using osu.Framework.Allocation; using osuTK.Graphics; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Game.Graphics.Containers; using osu.Game.Graphics.Cursor; using osu.Game.Online.Chat; using osu.Framework.Graphics.Shapes; using osu.Game.Graphics; using osu.Framework.Extensions.Color4Extensions; using osu.Framework.Graphics.Sprites; using osu.Game.Graphics.Sprites; namespace osu.Game.Overlays.Chat { public class DrawableChannel : Container { public readonly Channel Channel; protected FillFlowContainer ChatLineFlow; private OsuScrollContainer scroll; [Resolved] private OsuColour colours { get; set; } public DrawableChannel(Channel channel) { Channel = channel; RelativeSizeAxes = Axes.Both; } [BackgroundDependencyLoader] private void load() { Child = new OsuContextMenuContainer { RelativeSizeAxes = Axes.Both, Masking = true, Child = scroll = new OsuScrollContainer { RelativeSizeAxes = Axes.Both, // Some chat lines have effects that slightly protrude to the bottom, // which we do not want to mask away, hence the padding. Padding = new MarginPadding { Bottom = 5 }, Child = ChatLineFlow = new FillFlowContainer { Padding = new MarginPadding { Left = 20, Right = 20 }, RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Direction = FillDirection.Vertical, } }, }; newMessagesArrived(Channel.Messages); Channel.NewMessagesArrived += newMessagesArrived; Channel.MessageRemoved += messageRemoved; Channel.PendingMessageResolved += pendingMessageResolved; } protected override void LoadComplete() { base.LoadComplete(); scrollToEnd(); } protected override void Dispose(bool isDisposing) { base.Dispose(isDisposing); Channel.NewMessagesArrived -= newMessagesArrived; Channel.MessageRemoved -= messageRemoved; Channel.PendingMessageResolved -= pendingMessageResolved; } protected virtual ChatLine CreateChatLine(Message m) => new ChatLine(m); protected virtual DaySeparator CreateDaySeparator(DateTimeOffset time) => new DaySeparator(time) { Margin = new MarginPadding { Vertical = 10 }, Colour = colours.ChatBlue.Lighten(0.7f), }; private void newMessagesArrived(IEnumerable<Message> newMessages) { bool shouldScrollToEnd = scroll.IsScrolledToEnd(10) || !chatLines.Any() || newMessages.Any(m => m is LocalMessage); // Add up to last Channel.MAX_HISTORY messages var displayMessages = newMessages.Skip(Math.Max(0, newMessages.Count() - Channel.MAX_HISTORY)); Message lastMessage = chatLines.LastOrDefault()?.Message; foreach (var message in displayMessages) { if (lastMessage == null || lastMessage.Timestamp.ToLocalTime().Date != message.Timestamp.ToLocalTime().Date) ChatLineFlow.Add(CreateDaySeparator(message.Timestamp)); ChatLineFlow.Add(CreateChatLine(message)); lastMessage = message; } var staleMessages = chatLines.Where(c => c.LifetimeEnd == double.MaxValue).ToArray(); int count = staleMessages.Length - Channel.MAX_HISTORY; if (count > 0) { void expireAndAdjustScroll(Drawable d) { scroll.OffsetScrollPosition(-d.DrawHeight); d.Expire(); } for (int i = 0; i < count; i++) expireAndAdjustScroll(staleMessages[i]); // remove all adjacent day separators after stale message removal for (int i = 0; i < ChatLineFlow.Count - 1; i++) { if (!(ChatLineFlow[i] is DaySeparator)) break; if (!(ChatLineFlow[i + 1] is DaySeparator)) break; expireAndAdjustScroll(ChatLineFlow[i]); } } if (shouldScrollToEnd) scrollToEnd(); } private void pendingMessageResolved(Message existing, Message updated) { var found = chatLines.LastOrDefault(c => c.Message == existing); if (found != null) { Trace.Assert(updated.Id.HasValue, "An updated message was returned with no ID."); ChatLineFlow.Remove(found); found.Message = updated; ChatLineFlow.Add(found); } } private void messageRemoved(Message removed) { chatLines.FirstOrDefault(c => c.Message == removed)?.FadeColour(Color4.Red, 400).FadeOut(600).Expire(); } private IEnumerable<ChatLine> chatLines => ChatLineFlow.Children.OfType<ChatLine>(); private void scrollToEnd() => ScheduleAfterChildren(() => scroll.ScrollToEnd()); public class DaySeparator : Container { public float TextSize { get => text.Font.Size; set => text.Font = text.Font.With(size: value); } private float lineHeight = 2; public float LineHeight { get => lineHeight; set => lineHeight = leftBox.Height = rightBox.Height = value; } private readonly SpriteText text; private readonly Box leftBox; private readonly Box rightBox; public DaySeparator(DateTimeOffset time) { RelativeSizeAxes = Axes.X; AutoSizeAxes = Axes.Y; Child = new GridContainer { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, ColumnDimensions = new[] { new Dimension(), new Dimension(GridSizeMode.AutoSize), new Dimension(), }, RowDimensions = new[] { new Dimension(GridSizeMode.AutoSize), }, Content = new[] { new Drawable[] { leftBox = new Box { Anchor = Anchor.Centre, Origin = Anchor.Centre, RelativeSizeAxes = Axes.X, Height = lineHeight, }, text = new OsuSpriteText { Margin = new MarginPadding { Horizontal = 10 }, Text = time.ToLocalTime().ToString("dd MMM yyyy"), }, rightBox = new Box { Anchor = Anchor.Centre, Origin = Anchor.Centre, RelativeSizeAxes = Axes.X, Height = lineHeight, }, } } }; } } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Linq; using System.Diagnostics.Contracts; using Microsoft.Research.DataStructures; using System.IO; namespace Microsoft.Research.CodeAnalysis { /// <summary> /// The manager for candidate postconditions /// </summary> [ContractClass(typeof(IPostConditionDispatcherContracts))] public interface IPostconditionDispatcher { /// <summary> /// Add the postcondition to the list of current preconditions /// </summary> void AddPostconditions(IEnumerable<BoxedExpression> postconditions); /// <summary> /// Report, for the given method, the set of non-null fields as inferred /// </summary> void AddNonNullFields(object method, IEnumerable<object> fields); /// <summary> /// Get the non-null fields at the method exit point /// </summary> IEnumerable<object> GetNonNullFields(object method); /// <summary> /// Returns the list of precondition for this method /// </summary> List<BoxedExpression> GeneratePostconditions(); /// <summary> /// Suggest the postcondition. /// Returns how many preconditions have been suggested /// </summary> int SuggestPostconditions(); /// <summary> /// Suggests the postconditions for the constructors of the type currently analyzed /// </summary> int SuggestNonNullFieldsForConstructors(); /// <summary> /// When all the constructors of a given type are analyzed, it gets the != null object invariants /// </summary> /// <returns></returns> IEnumerable<BoxedExpression> SuggestNonNullObjectInvariantsFromConstructorsForward(bool doNotRecord = false); /// <summary> /// Infer the postcondition. /// Returns how many postconditionss have been propagated to the callers /// </summary> int PropagatePostconditions(); /// <summary> /// Infer postconditions for autoproperties, i.e., if we detect this.F != null, then it will attach the postcondition result != null to the autoproperty F /// Returns how many postconditions have been installed /// </summary> /// <returns></returns> int PropagatePostconditionsForProperties(); /// <summary> /// Returns true if the method may return null directly (e.g. "return null" or indirectly (e.g., "return foo()" and we know that foo may return null) /// </summary> /// <returns></returns> bool MayReturnNull(IFact facts, TimeOutChecker timeout); /// <summary> /// If we infer false for the method, and it contains user-postconditions, warn the user /// </summary> bool EmitWarningIfFalseIsInferred(); } #region Contracts [ContractClassFor(typeof(IPostconditionDispatcher))] internal abstract class IPostConditionDispatcherContracts : IPostconditionDispatcher { public void AddPostconditions(IEnumerable<BoxedExpression> postconditions) { Contract.Requires(postconditions != null); } public void AddNonNullFields(object method, IEnumerable<object> fields) { Contract.Requires(method != null); throw new NotImplementedException(); } public IEnumerable<object> GetNonNullFields(object method) { Contract.Requires(method != null); return null; } public List<BoxedExpression> GeneratePostconditions() { Contract.Ensures(Contract.Result<List<BoxedExpression>>() != null); throw new NotImplementedException(); } public int SuggestPostconditions() { Contract.Ensures(Contract.Result<int>() >= 0); throw new NotImplementedException(); } public IEnumerable<BoxedExpression> SuggestNonNullObjectInvariantsFromConstructorsForward(bool doNotRecord) { Contract.Ensures(Contract.Result<IEnumerable<BoxedExpression>>() != null); return null; } public int SuggestNonNullFieldsForConstructors() { Contract.Ensures(Contract.Result<int>() >= 0); throw new NotImplementedException(); } public int PropagatePostconditions() { Contract.Ensures(Contract.Result<int>() >= 0); throw new NotImplementedException(); } public int PropagatePostconditionsForProperties() { Contract.Ensures(Contract.Result<int>() >= 0); throw new NotImplementedException(); } public bool MayReturnNull(IFact facts, TimeOutChecker timeout) { Contract.Requires(facts != null); throw new NotImplementedException(); } public bool EmitWarningIfFalseIsInferred() { throw new NotImplementedException(); } } #endregion [ContractVerification(true)] public class PostconditionDispatcherProfiler : IPostconditionDispatcher { #region Object Invariant [ContractInvariantMethod] private void ObjectInvariant() { Contract.Invariant(inner != null); } #endregion #region Statics [ThreadStatic] static private int generated; [ThreadStatic] static private int retained; #endregion #region State private readonly IPostconditionDispatcher inner; private bool profilingAlreadyCollected; #endregion #region Constructor public PostconditionDispatcherProfiler(IPostconditionDispatcher dispatcher) { Contract.Requires(dispatcher != null); inner = dispatcher; profilingAlreadyCollected = false; } #endregion #region Implementation of IPostConditionDispatcher public void AddPostconditions(IEnumerable<BoxedExpression> postconditions) { generated += postconditions.Count(); inner.AddPostconditions(postconditions); } public void AddNonNullFields(object method, IEnumerable<object> fields) { inner.AddNonNullFields(method, fields); } public IEnumerable<object> GetNonNullFields(object method) { return inner.GetNonNullFields(method); } public List<BoxedExpression> GeneratePostconditions() { return inner.GeneratePostconditions(); } public int SuggestPostconditions() { return RecordProfilingInformation(inner.SuggestPostconditions()); } public int SuggestNonNullFieldsForConstructors() { return inner.SuggestNonNullFieldsForConstructors(); } public IEnumerable<BoxedExpression> SuggestNonNullObjectInvariantsFromConstructorsForward(bool doNotRecord = false) { return inner.SuggestNonNullObjectInvariantsFromConstructorsForward(doNotRecord); } public int PropagatePostconditions() { return RecordProfilingInformation(inner.PropagatePostconditions()); } public int PropagatePostconditionsForProperties() { return RecordProfilingInformation(inner.PropagatePostconditionsForProperties()); } public bool MayReturnNull(IFact facts, TimeOutChecker timeout) { return RecordProfilingInformation(inner.MayReturnNull(facts, timeout)); } public bool EmitWarningIfFalseIsInferred() { // We do not record conditions on emitted false return inner.EmitWarningIfFalseIsInferred(); } #endregion #region Dumping public static void DumpStatistics(IOutput output) { Contract.Requires(output != null); if (generated != 0) { output.WriteLine("Discovered {0} postconditions to suggest", generated); output.WriteLine("Retained {0} postconditions after filtering", retained); } } #endregion #region Profiling private int RecordProfilingInformation(int howMany) { Contract.Requires(howMany >= 0); Contract.Ensures(retained >= Contract.OldValue(retained)); Contract.Ensures(Contract.Result<int>() == howMany); if (!profilingAlreadyCollected) { retained += howMany; profilingAlreadyCollected = true; } return howMany; } private bool RecordProfilingInformation(bool mayReturnNull) { // We do not keep statistics for that -- for the moment? return mayReturnNull; } #endregion } }
// *********************************************************************** // Copyright (c) 2009 Charlie Poole // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System; using System.Collections; namespace NUnit.Framework.Constraints { /// <summary> /// ConstraintExpression represents a compound constraint in the /// process of being constructed from a series of syntactic elements. /// /// Individual elements are appended to the expression as they are /// reognized. Once an actual Constraint is appended, the expression /// returns a resolvable Constraint. /// </summary> public class ConstraintExpression : ConstraintExpressionBase { /// <summary> /// Initializes a new instance of the <see cref="T:ConstraintExpression"/> class. /// </summary> public ConstraintExpression() { } /// <summary> /// Initializes a new instance of the <see cref="T:ConstraintExpression"/> /// class passing in a ConstraintBuilder, which may be pre-populated. /// </summary> /// <param name="builder">The builder.</param> public ConstraintExpression(ConstraintBuilder builder) : base( builder ) { } #region Not /// <summary> /// Returns a ConstraintExpression that negates any /// following constraint. /// </summary> public ConstraintExpression Not { get { return this.Append(new NotOperator()); } } /// <summary> /// Returns a ConstraintExpression that negates any /// following constraint. /// </summary> public ConstraintExpression No { get { return this.Append(new NotOperator()); } } #endregion #region All /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if all of them succeed. /// </summary> public ConstraintExpression All { get { return this.Append(new AllOperator()); } } #endregion #region Some /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if at least one of them succeeds. /// </summary> public ConstraintExpression Some { get { return this.Append(new SomeOperator()); } } #endregion #region None /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if all of them fail. /// </summary> public ConstraintExpression None { get { return this.Append(new NoneOperator()); } } #endregion #region Exactly(n) /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding only if a specified number of them succeed. /// </summary> public ConstraintExpression Exactly(int expectedCount) { return this.Append(new ExactCountOperator(expectedCount)); } #endregion #region Property /// <summary> /// Returns a new PropertyConstraintExpression, which will either /// test for the existence of the named property on the object /// being tested or apply any following constraint to that property. /// </summary> public ResolvableConstraintExpression Property(string name) { return this.Append(new PropOperator(name)); } #endregion #region Length /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Length property of the object being tested. /// </summary> public ResolvableConstraintExpression Length { get { return Property("Length"); } } #endregion #region Count /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Count property of the object being tested. /// </summary> public ResolvableConstraintExpression Count { get { return Property("Count"); } } #endregion #region Message /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Message property of the object being tested. /// </summary> public ResolvableConstraintExpression Message { get { return Property("Message"); } } #endregion #region InnerException /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the InnerException property of the object being tested. /// </summary> public ResolvableConstraintExpression InnerException { get { return Property("InnerException"); } } #endregion #region Attribute /// <summary> /// Returns a new AttributeConstraint checking for the /// presence of a particular attribute on an object. /// </summary> public ResolvableConstraintExpression Attribute(Type expectedType) { return this.Append(new AttributeOperator(expectedType)); } #if true /// <summary> /// Returns a new AttributeConstraint checking for the /// presence of a particular attribute on an object. /// </summary> public ResolvableConstraintExpression Attribute<T>() { return Attribute(typeof(T)); } #endif #endregion #region With /// <summary> /// With is currently a NOP - reserved for future use. /// </summary> public ConstraintExpression With { get { return this.Append(new WithOperator()); } } #endregion #region Matches /// <summary> /// Returns the constraint provided as an argument - used to allow custom /// custom constraints to easily participate in the syntax. /// </summary> public Constraint Matches(Constraint constraint) { return this.Append(constraint); } #if (CLR_2_0 || CLR_4_0) && !NETCF && !NUNITLITE /// <summary> /// Returns the constraint provided as an argument - used to allow custom /// custom constraints to easily participate in the syntax. /// </summary> public Constraint Matches<T>(Predicate<T> predicate) { return this.Append(new PredicateConstraint<T>(predicate)); } #endif #endregion #region Null /// <summary> /// Returns a constraint that tests for null /// </summary> public NullConstraint Null { get { return (NullConstraint)this.Append(new NullConstraint()); } } #endregion #region True /// <summary> /// Returns a constraint that tests for True /// </summary> public TrueConstraint True { get { return (TrueConstraint)this.Append(new TrueConstraint()); } } #endregion #region False /// <summary> /// Returns a constraint that tests for False /// </summary> public FalseConstraint False { get { return (FalseConstraint)this.Append(new FalseConstraint()); } } #endregion #region Positive /// <summary> /// Returns a constraint that tests for a positive value /// </summary> public GreaterThanConstraint Positive { get { return (GreaterThanConstraint)this.Append(new GreaterThanConstraint(0)); } } #endregion #region Negative /// <summary> /// Returns a constraint that tests for a negative value /// </summary> public LessThanConstraint Negative { get { return (LessThanConstraint)this.Append(new LessThanConstraint(0)); } } #endregion #region NaN /// <summary> /// Returns a constraint that tests for NaN /// </summary> public NaNConstraint NaN { get { return (NaNConstraint)this.Append(new NaNConstraint()); } } #endregion #region Empty /// <summary> /// Returns a constraint that tests for empty /// </summary> public EmptyConstraint Empty { get { return (EmptyConstraint)this.Append(new EmptyConstraint()); } } #endregion #region Unique /// <summary> /// Returns a constraint that tests whether a collection /// contains all unique items. /// </summary> public UniqueItemsConstraint Unique { get { return (UniqueItemsConstraint)this.Append(new UniqueItemsConstraint()); } } #endregion #region BinarySerializable #if !NETCF /// <summary> /// Returns a constraint that tests whether an object graph is serializable in binary format. /// </summary> public BinarySerializableConstraint BinarySerializable { get { return (BinarySerializableConstraint)this.Append(new BinarySerializableConstraint()); } } #endif #endregion #region XmlSerializable #if !NETCF_1_0 /// <summary> /// Returns a constraint that tests whether an object graph is serializable in xml format. /// </summary> public XmlSerializableConstraint XmlSerializable { get { return (XmlSerializableConstraint)this.Append(new XmlSerializableConstraint()); } } #endif #endregion #region EqualTo /// <summary> /// Returns a constraint that tests two items for equality /// </summary> public EqualConstraint EqualTo(object expected) { return (EqualConstraint)this.Append(new EqualConstraint(expected)); } #endregion #region SameAs /// <summary> /// Returns a constraint that tests that two references are the same object /// </summary> public SameAsConstraint SameAs(object expected) { return (SameAsConstraint)this.Append(new SameAsConstraint(expected)); } #endregion #region GreaterThan /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than the suppled argument /// </summary> public GreaterThanConstraint GreaterThan(object expected) { return (GreaterThanConstraint)this.Append(new GreaterThanConstraint(expected)); } #endregion #region GreaterThanOrEqualTo /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than or equal to the suppled argument /// </summary> public GreaterThanOrEqualConstraint GreaterThanOrEqualTo(object expected) { return (GreaterThanOrEqualConstraint)this.Append(new GreaterThanOrEqualConstraint(expected)); } /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than or equal to the suppled argument /// </summary> public GreaterThanOrEqualConstraint AtLeast(object expected) { return (GreaterThanOrEqualConstraint)this.Append(new GreaterThanOrEqualConstraint(expected)); } #endregion #region LessThan /// <summary> /// Returns a constraint that tests whether the /// actual value is less than the suppled argument /// </summary> public LessThanConstraint LessThan(object expected) { return (LessThanConstraint)this.Append(new LessThanConstraint(expected)); } #endregion #region LessThanOrEqualTo /// <summary> /// Returns a constraint that tests whether the /// actual value is less than or equal to the suppled argument /// </summary> public LessThanOrEqualConstraint LessThanOrEqualTo(object expected) { return (LessThanOrEqualConstraint)this.Append(new LessThanOrEqualConstraint(expected)); } /// <summary> /// Returns a constraint that tests whether the /// actual value is less than or equal to the suppled argument /// </summary> public LessThanOrEqualConstraint AtMost(object expected) { return (LessThanOrEqualConstraint)this.Append(new LessThanOrEqualConstraint(expected)); } #endregion #region TypeOf /// <summary> /// Returns a constraint that tests whether the actual /// value is of the exact type supplied as an argument. /// </summary> public ExactTypeConstraint TypeOf(Type expectedType) { return (ExactTypeConstraint)this.Append(new ExactTypeConstraint(expectedType)); } #if true /// <summary> /// Returns a constraint that tests whether the actual /// value is of the exact type supplied as an argument. /// </summary> public ExactTypeConstraint TypeOf<T>() { return (ExactTypeConstraint)this.Append(new ExactTypeConstraint(typeof(T))); } #endif #endregion #region InstanceOf /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> public InstanceOfTypeConstraint InstanceOf(Type expectedType) { return (InstanceOfTypeConstraint)this.Append(new InstanceOfTypeConstraint(expectedType)); } #if true /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> public InstanceOfTypeConstraint InstanceOf<T>() { return (InstanceOfTypeConstraint)this.Append(new InstanceOfTypeConstraint(typeof(T))); } #endif #endregion #region AssignableFrom /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableFromConstraint AssignableFrom(Type expectedType) { return (AssignableFromConstraint)this.Append(new AssignableFromConstraint(expectedType)); } #if true /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableFromConstraint AssignableFrom<T>() { return (AssignableFromConstraint)this.Append(new AssignableFromConstraint(typeof(T))); } #endif #endregion #region AssignableTo /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableToConstraint AssignableTo(Type expectedType) { return (AssignableToConstraint)this.Append(new AssignableToConstraint(expectedType)); } #if true /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableToConstraint AssignableTo<T>() { return (AssignableToConstraint)this.Append(new AssignableToConstraint(typeof(T))); } #endif #endregion #region EquivalentTo /// <summary> /// Returns a constraint that tests whether the actual value /// is a collection containing the same elements as the /// collection supplied as an argument. /// </summary> public CollectionEquivalentConstraint EquivalentTo(IEnumerable expected) { return (CollectionEquivalentConstraint)this.Append(new CollectionEquivalentConstraint(expected)); } #endregion #region SubsetOf /// <summary> /// Returns a constraint that tests whether the actual value /// is a subset of the collection supplied as an argument. /// </summary> public CollectionSubsetConstraint SubsetOf(IEnumerable expected) { return (CollectionSubsetConstraint)this.Append(new CollectionSubsetConstraint(expected)); } #endregion #region Ordered /// <summary> /// Returns a constraint that tests whether a collection is ordered /// </summary> public CollectionOrderedConstraint Ordered { get { return (CollectionOrderedConstraint)this.Append(new CollectionOrderedConstraint()); } } #endregion #region Member /// <summary> /// Returns a new CollectionContainsConstraint checking for the /// presence of a particular object in the collection. /// </summary> public CollectionContainsConstraint Member(object expected) { return (CollectionContainsConstraint)this.Append(new CollectionContainsConstraint(expected)); } /// <summary> /// Returns a new CollectionContainsConstraint checking for the /// presence of a particular object in the collection. /// </summary> public CollectionContainsConstraint Contains(object expected) { return (CollectionContainsConstraint)this.Append(new CollectionContainsConstraint(expected)); } #endregion #region Contains /// <summary> /// Returns a new ContainsConstraint. This constraint /// will, in turn, make use of the appropriate second-level /// constraint, depending on the type of the actual argument. /// This overload is only used if the item sought is a string, /// since any other type implies that we are looking for a /// collection member. /// </summary> public ContainsConstraint Contains(string expected) { return (ContainsConstraint)this.Append(new ContainsConstraint(expected)); } #endregion #region StringContaining /// <summary> /// Returns a constraint that succeeds if the actual /// value contains the substring supplied as an argument. /// </summary> public SubstringConstraint StringContaining(string expected) { return (SubstringConstraint)this.Append(new SubstringConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value contains the substring supplied as an argument. /// </summary> public SubstringConstraint ContainsSubstring(string expected) { return (SubstringConstraint)this.Append(new SubstringConstraint(expected)); } #endregion #region StartsWith /// <summary> /// Returns a constraint that succeeds if the actual /// value starts with the substring supplied as an argument. /// </summary> public StartsWithConstraint StartsWith(string expected) { return (StartsWithConstraint)this.Append(new StartsWithConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value starts with the substring supplied as an argument. /// </summary> public StartsWithConstraint StringStarting(string expected) { return (StartsWithConstraint)this.Append(new StartsWithConstraint(expected)); } #endregion #region EndsWith /// <summary> /// Returns a constraint that succeeds if the actual /// value ends with the substring supplied as an argument. /// </summary> public EndsWithConstraint EndsWith(string expected) { return (EndsWithConstraint)this.Append(new EndsWithConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value ends with the substring supplied as an argument. /// </summary> public EndsWithConstraint StringEnding(string expected) { return (EndsWithConstraint)this.Append(new EndsWithConstraint(expected)); } #endregion #region Matches #if !NETCF /// <summary> /// Returns a constraint that succeeds if the actual /// value matches the regular expression supplied as an argument. /// </summary> public RegexConstraint Matches(string pattern) { return (RegexConstraint)this.Append(new RegexConstraint(pattern)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value matches the regular expression supplied as an argument. /// </summary> public RegexConstraint StringMatching(string pattern) { return (RegexConstraint)this.Append(new RegexConstraint(pattern)); } #endif #endregion #region SamePath /// <summary> /// Returns a constraint that tests whether the path provided /// is the same as an expected path after canonicalization. /// </summary> public SamePathConstraint SamePath(string expected) { return (SamePathConstraint)this.Append(new SamePathConstraint(expected)); } #endregion #region SamePathOrUnder /// <summary> /// Returns a constraint that tests whether the path provided /// is the same path or under an expected path after canonicalization. /// </summary> public SamePathOrUnderConstraint SamePathOrUnder(string expected) { return (SamePathOrUnderConstraint)this.Append(new SamePathOrUnderConstraint(expected)); } #endregion #region InRange /// <summary> /// Returns a constraint that tests whether the actual value falls /// within a specified range. /// </summary> public RangeConstraint InRange(IComparable from, IComparable to) { return (RangeConstraint)this.Append(new RangeConstraint(from, to)); } #endregion } }
using Lucene.Net.Support; using System; using System.Collections.Generic; using NUnit.Framework; namespace Lucene.Net.Util.Automaton { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using Lucene.Net.Randomized.Generators; /// <summary> /// Utilities for testing automata. /// <p> /// Capable of generating random regular expressions, /// and automata, and also provides a number of very /// basic unoptimized implementations (*slow) for testing. /// </summary> public class AutomatonTestUtil { /// <summary> /// Returns random string, including full unicode range. </summary> public static string RandomRegexp(Random r) { while (true) { string regexp = RandomRegexpString(r); // we will also generate some undefined unicode queries if (!UnicodeUtil.ValidUTF16String(regexp.ToCharArray())) { continue; } try { new RegExp(regexp, RegExp.NONE); return regexp; } catch (Exception e) { } } } private static string RandomRegexpString(Random r) { int end = r.Next(20); if (end == 0) { // allow 0 length return ""; } char[] buffer = new char[end]; for (int i = 0; i < end; i++) { int t = r.Next(15); if (0 == t && i < end - 1) { // Make a surrogate pair // High surrogate buffer[i++] = (char)TestUtil.NextInt(r, 0xd800, 0xdbff); // Low surrogate buffer[i] = (char)TestUtil.NextInt(r, 0xdc00, 0xdfff); } else if (t <= 1) { buffer[i] = (char)r.Next(0x80); } else if (2 == t) { buffer[i] = (char)TestUtil.NextInt(r, 0x80, 0x800); } else if (3 == t) { buffer[i] = (char)TestUtil.NextInt(r, 0x800, 0xd7ff); } else if (4 == t) { buffer[i] = (char)TestUtil.NextInt(r, 0xe000, 0xffff); } else if (5 == t) { buffer[i] = '.'; } else if (6 == t) { buffer[i] = '?'; } else if (7 == t) { buffer[i] = '*'; } else if (8 == t) { buffer[i] = '+'; } else if (9 == t) { buffer[i] = '('; } else if (10 == t) { buffer[i] = ')'; } else if (11 == t) { buffer[i] = '-'; } else if (12 == t) { buffer[i] = '['; } else if (13 == t) { buffer[i] = ']'; } else if (14 == t) { buffer[i] = '|'; } } return new string(buffer, 0, end); } /// <summary> /// picks a random int code point, avoiding surrogates; /// throws IllegalArgumentException if this transition only /// accepts surrogates /// </summary> private static int GetRandomCodePoint(Random r, Transition t) { int code; if (t.Max < UnicodeUtil.UNI_SUR_HIGH_START || t.Min > UnicodeUtil.UNI_SUR_HIGH_END) { // easy: entire range is before or after surrogates code = t.Min + r.Next(t.Max - t.Min + 1); } else if (t.Min >= UnicodeUtil.UNI_SUR_HIGH_START) { if (t.Max > UnicodeUtil.UNI_SUR_LOW_END) { // after surrogates code = 1 + UnicodeUtil.UNI_SUR_LOW_END + r.Next(t.Max - UnicodeUtil.UNI_SUR_LOW_END); } else { throw new System.ArgumentException("transition accepts only surrogates: " + t); } } else if (t.Max <= UnicodeUtil.UNI_SUR_LOW_END) { if (t.Min < UnicodeUtil.UNI_SUR_HIGH_START) { // before surrogates code = t.Min + r.Next(UnicodeUtil.UNI_SUR_HIGH_START - t.Min); } else { throw new System.ArgumentException("transition accepts only surrogates: " + t); } } else { // range includes all surrogates int gap1 = UnicodeUtil.UNI_SUR_HIGH_START - t.Min; int gap2 = t.Max - UnicodeUtil.UNI_SUR_LOW_END; int c = r.Next(gap1 + gap2); if (c < gap1) { code = t.Min + c; } else { code = UnicodeUtil.UNI_SUR_LOW_END + c - gap1 + 1; } } Assert.True(code >= t.Min && code <= t.Max && (code < UnicodeUtil.UNI_SUR_HIGH_START || code > UnicodeUtil.UNI_SUR_LOW_END), "code=" + code + " min=" + t.Min + " max=" + t.Max); return code; } /// <summary> /// Lets you retrieve random strings accepted /// by an Automaton. /// <p> /// Once created, call <seealso cref="#getRandomAcceptedString(Random)"/> /// to get a new string (in UTF-32 codepoints). /// </summary> public class RandomAcceptedStrings { internal readonly IDictionary<Transition, bool?> LeadsToAccept; internal readonly Automaton a; private class ArrivingTransition { internal readonly State From; internal readonly Transition t; public ArrivingTransition(State from, Transition t) { this.From = from; this.t = t; } } public RandomAcceptedStrings(Automaton a) { this.a = a; if (!String.IsNullOrEmpty(a.Singleton)) { LeadsToAccept = null; return; } // must use IdentityHashmap because two Transitions w/ // different start nodes can be considered the same LeadsToAccept = new IdentityHashMap<Transition, bool?>(); IDictionary<State, IList<ArrivingTransition>> allArriving = new Dictionary<State, IList<ArrivingTransition>>(); LinkedList<State> q = new LinkedList<State>(); HashSet<State> seen = new HashSet<State>(); // reverse map the transitions, so we can quickly look // up all arriving transitions to a given state foreach (State s in a.NumberedStates) { for (int i = 0; i < s.numTransitions; i++) { Transition t = s.TransitionsArray[i]; IList<ArrivingTransition> tl; allArriving.TryGetValue(t.Dest, out tl); if (tl == null) { tl = new List<ArrivingTransition>(); allArriving[t.Dest] = tl; } tl.Add(new ArrivingTransition(s, t)); } if (s.Accept) { q.AddLast(s); seen.Add(s); } } // Breadth-first search, from accept states, // backwards: while (q.Count > 0) { State s = q.First.Value; q.RemoveFirst(); IList<ArrivingTransition> arriving; allArriving.TryGetValue(s, out arriving); if (arriving != null) { foreach (ArrivingTransition at in arriving) { State from = at.From; if (!seen.Contains(from)) { q.AddLast(from); seen.Add(from); LeadsToAccept[at.t] = true; } } } } } public int[] GetRandomAcceptedString(Random r) { IList<int?> soFar = new List<int?>(); if (a.IsSingleton) { // accepts only one var s = a.Singleton; int charUpto = 0; while (charUpto < s.Length) { int cp = Character.CodePointAt(s, charUpto); charUpto += Character.CharCount(cp); soFar.Add(cp); } } else { var s = a.InitialState; while (true) { if (s.Accept) { if (s.numTransitions == 0) { // stop now break; } else { if (r.NextBoolean()) { break; } } } if (s.numTransitions == 0) { throw new Exception("this automaton has dead states"); } bool cheat = r.NextBoolean(); Transition t; if (cheat) { // pick a transition that we know is the fastest // path to an accept state IList<Transition> toAccept = new List<Transition>(); for (int i = 0; i < s.numTransitions; i++) { Transition t0 = s.TransitionsArray[i]; if (LeadsToAccept.ContainsKey(t0)) { toAccept.Add(t0); } } if (toAccept.Count == 0) { // this is OK -- it means we jumped into a cycle t = s.TransitionsArray[r.Next(s.numTransitions)]; } else { t = toAccept[r.Next(toAccept.Count)]; } } else { t = s.TransitionsArray[r.Next(s.numTransitions)]; } soFar.Add(GetRandomCodePoint(r, t)); s = t.Dest; } } return ArrayUtil.ToIntArray(soFar); } } /// <summary> /// return a random NFA/DFA for testing </summary> public static Automaton RandomAutomaton(Random random) { // get two random Automata from regexps Automaton a1 = (new RegExp(AutomatonTestUtil.RandomRegexp(random), RegExp.NONE)).ToAutomaton(); if (random.NextBoolean()) { a1 = BasicOperations.Complement(a1); } Automaton a2 = (new RegExp(AutomatonTestUtil.RandomRegexp(random), RegExp.NONE)).ToAutomaton(); if (random.NextBoolean()) { a2 = BasicOperations.Complement(a2); } // combine them in random ways switch (random.Next(4)) { case 0: return BasicOperations.Concatenate(a1, a2); case 1: return BasicOperations.Union(a1, a2); case 2: return BasicOperations.Intersection(a1, a2); default: return BasicOperations.Minus(a1, a2); } } /// <summary> /// below are original, unoptimized implementations of DFA operations for testing. /// These are from brics automaton, full license (BSD) below: /// </summary> /* * dk.brics.automaton * * Copyright (c) 2001-2009 Anders Moeller * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /// <summary> /// Simple, original brics implementation of Brzozowski minimize() /// </summary> public static void MinimizeSimple(Automaton a) { if (!String.IsNullOrEmpty(a.Singleton)) { return; } DeterminizeSimple(a, SpecialOperations.Reverse(a)); DeterminizeSimple(a, SpecialOperations.Reverse(a)); } /// <summary> /// Simple, original brics implementation of determinize() /// </summary> public static void DeterminizeSimple(Automaton a) { if (a.Deterministic || a.Singleton != null) { return; } HashSet<State> initialset = new HashSet<State>(); initialset.Add(a.InitialState); DeterminizeSimple(a, initialset); } /// <summary> /// Simple, original brics implementation of determinize() /// Determinizes the given automaton using the given set of initial states. /// </summary> public static void DeterminizeSimple(Automaton a, ISet<State> initialset) { int[] points = a.StartPoints; // subset construction IDictionary<ISet<State>, ISet<State>> sets = new Dictionary<ISet<State>, ISet<State>>(); LinkedList<ISet<State>> worklist = new LinkedList<ISet<State>>(); IDictionary<ISet<State>, State> newstate = new Dictionary<ISet<State>, State>(); sets[initialset] = initialset; worklist.AddLast(initialset); a.InitialState = new State(); newstate[initialset] = a.InitialState; while (worklist.Count > 0) { ISet<State> s = worklist.First.Value; worklist.RemoveFirst(); State r = newstate[s]; foreach (State q in s) { if (q.Accept) { r.Accept = true; break; } } for (int n = 0; n < points.Length; n++) { ISet<State> p = new HashSet<State>(); foreach (State q in s) { foreach (Transition t in q.Transitions) { if (t.Min <= points[n] && points[n] <= t.Max) { p.Add(t.Dest); } } } if (!sets.ContainsKey(p)) { sets[p] = p; worklist.AddLast(p); newstate[p] = new State(); } State q_ = newstate[p]; int min = points[n]; int max; if (n + 1 < points.Length) { max = points[n + 1] - 1; } else { max = Character.MAX_CODE_POINT; } r.AddTransition(new Transition(min, max, q_)); } } a.Deterministic = true; a.ClearNumberedStates(); a.RemoveDeadTransitions(); } /// <summary> /// Returns true if the language of this automaton is finite. /// <p> /// WARNING: this method is slow, it will blow up if the automaton is large. /// this is only used to test the correctness of our faster implementation. /// </summary> public static bool IsFiniteSlow(Automaton a) { if (!String.IsNullOrEmpty(a.Singleton)) { return true; } return IsFiniteSlow(a.InitialState, new HashSet<State>()); } /// <summary> /// Checks whether there is a loop containing s. (this is sufficient since /// there are never transitions to dead states.) /// </summary> // TODO: not great that this is recursive... in theory a // large automata could exceed java's stack private static bool IsFiniteSlow(State s, HashSet<State> path) { path.Add(s); foreach (Transition t in s.Transitions) { if (path.Contains(t.Dest) || !IsFiniteSlow(t.Dest, path)) { return false; } } path.Remove(s); return true; } /// <summary> /// Checks that an automaton has no detached states that are unreachable /// from the initial state. /// </summary> public static void AssertNoDetachedStates(Automaton a) { int numStates = a.NumberOfStates; a.ClearNumberedStates(); // force recomputation of cached numbered states Assert.True(numStates == a.NumberOfStates, "automaton has " + (numStates - a.NumberOfStates) + " detached states"); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using Xunit; using System; using System.Collections; using System.Collections.Specialized; namespace System.Collections.Specialized.Tests { public class CtorNvcTests { [Fact] public void Test01() { NameValueCollection nvc; NameValueCollection nvc1; // argument NameValueCollection // simple string values string[] values = { "", " ", "a", "aa", "tExt", " SPaces", "1", "$%^#", "2222222222222222222222222", System.DateTime.Today.ToString(), Int32.MaxValue.ToString() }; // names(keys) for simple string values string[] names = { "zero", "oNe", " ", "", "aA", "1", System.DateTime.Today.ToString(), "$%^#", Int32.MaxValue.ToString(), " spaces", "2222222222222222222222222" }; // [] NameValueCollection is constructed as expected //----------------------------------------------------------------- nvc1 = new NameValueCollection(); // // [] create from another empty collection // nvc = new NameValueCollection(nvc1); if (nvc == null) { Assert.False(true, "Error, collection is null"); } if (nvc.Count != 0) { Assert.False(true, string.Format("Error, Count = {0} ", nvc.Count)); } if (nvc.Get("key") != null) { Assert.False(true, "Error, Get(some_key) returned non-null after default ctor"); } string[] keys = nvc.AllKeys; if (keys.Length != 0) { Assert.False(true, string.Format("Error, AllKeys contains {0} keys after default ctor", keys.Length)); } // // Item(some_key) should return null // if (nvc["key"] != null) { Assert.False(true, "Error, Item(some_key) returned non-null after default ctor"); } // // Add(string, string) // nvc.Add("Name", "Value"); if (nvc.Count != 1) { Assert.False(true, string.Format("Error, Count returned {0} instead of 1", nvc.Count)); } if (String.Compare(nvc["Name"], "Value") != 0) { Assert.False(true, "Error, Item() returned unexpected value"); } // // Clear() // nvc.Clear(); if (nvc.Count != 0) { Assert.False(true, string.Format("Error, Count returned {0} instead of 0 after Clear()", nvc.Count)); } if (nvc["Name"] != null) { Assert.False(true, "Error, Item() returned non-null value after Clear()"); } // // [] create from filled collection // int len = values.Length; for (int i = 0; i < len; i++) { nvc1.Add(names[i], values[i]); } if (nvc1.Count != len) { Assert.False(true, string.Format("Error, Count = {0} after instead of {1}", nvc.Count, len)); } nvc = new NameValueCollection(nvc1); if (nvc.Count != nvc1.Count) { Assert.False(true, string.Format("Error, Count = {0} instead of {1}", nvc.Count, nvc1.Count)); } string[] keys1 = nvc1.AllKeys; keys = nvc.AllKeys; if (keys1.Length != keys.Length) { Assert.False(true, string.Format("Error, new collection Keys.Length is {0} instead of {1}", keys.Length, keys1.Length)); } else { for (int i = 0; i < keys1.Length; i++) { if (Array.IndexOf(keys, keys1[i]) < 0) { Assert.False(true, string.Format("Error, no key \"{1}\" in AllKeys", i, keys1[i])); } } } for (int i = 0; i < keys.Length; i++) { string[] val = nvc.GetValues(keys[i]); if ((val.Length != 1) || String.Compare(val[0], (nvc1.GetValues(keys[i]))[0]) != 0) { Assert.False(true, string.Format("Error, unexpected value at key \"{1}\"", i, keys1[i])); } } // // [] change argument collection // string toChange = keys1[0]; string init = nvc1[toChange]; // // Change element // nvc1[toChange] = "new Value"; if (String.Compare(nvc1[toChange], "new Value") != 0) { Assert.False(true, "Error, failed to change element"); } if (String.Compare(nvc[toChange], init) != 0) { Assert.False(true, "Error, changed element in new collection"); } // // Remove element // nvc1.Remove(toChange); if (nvc1.Count != len - 1) { Assert.False(true, "Error, failed to remove element"); } if (nvc.Count != len) { Assert.False(true, "Error, collection changed after argument change - removed element"); } keys = nvc.AllKeys; if (Array.IndexOf(keys, toChange) < 0) { Assert.False(true, "Error, collection changed after argument change - no key"); } // // [] invalid parameter // Assert.Throws<ArgumentNullException>(() => { nvc = new NameValueCollection((NameValueCollection)null); }); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Diagnostics; using System.Diagnostics.Contracts; using System.Text; namespace System.IO { public static partial class Path { public static readonly char DirectorySeparatorChar = '/'; public static readonly char VolumeSeparatorChar = '/'; public static readonly char PathSeparator = ':'; private const string DirectorySeparatorCharAsString = "/"; private static readonly char[] InvalidFileNameChars = { '\0', '/' }; private static readonly int MaxPath = Interop.Sys.MaxPath; private static readonly int MaxLongPath = MaxPath; private static bool IsDirectoryOrVolumeSeparator(char c) { // The directory separator is the same as the volume separator, // so we only need to check one. Debug.Assert(DirectorySeparatorChar == VolumeSeparatorChar); return PathInternal.IsDirectorySeparator(c); } // Expands the given path to a fully qualified path. public static string GetFullPath(string path) { if (path == null) throw new ArgumentNullException("path"); if (path.Length == 0) throw new ArgumentException(SR.Arg_PathIllegal); PathInternal.CheckInvalidPathChars(path); // Expand with current directory if necessary if (!IsPathRooted(path)) { path = Combine(Interop.Sys.GetCwd(), path); } // We would ideally use realpath to do this, but it resolves symlinks, requires that the file actually exist, // and turns it into a full path, which we only want if fullCheck is true. string collapsedString = RemoveRelativeSegments(path); Debug.Assert(collapsedString.Length < path.Length || collapsedString.ToString() == path, "Either we've removed characters, or the string should be unmodified from the input path."); if (collapsedString.Length > MaxPath) { throw new PathTooLongException(SR.IO_PathTooLong); } string result = collapsedString.Length == 0 ? DirectorySeparatorCharAsString : collapsedString; return result; } /// <summary> /// Try to remove relative segments from the given path (without combining with a root). /// </summary> /// <param name="skip">Skip the specified number of characters before evaluating.</param> private static string RemoveRelativeSegments(string path, int skip = 0) { bool flippedSeparator = false; // Remove "//", "/./", and "/../" from the path by copying each character to the output, // except the ones we're removing, such that the builder contains the normalized path // at the end. var sb = StringBuilderCache.Acquire(path.Length); if (skip > 0) { sb.Append(path, 0, skip); } int componentCharCount = 0; for (int i = skip; i < path.Length; i++) { char c = path[i]; if (PathInternal.IsDirectorySeparator(c) && i + 1 < path.Length) { componentCharCount = 0; // Skip this character if it's a directory separator and if the next character is, too, // e.g. "parent//child" => "parent/child" if (PathInternal.IsDirectorySeparator(path[i + 1])) { continue; } // Skip this character and the next if it's referring to the current directory, // e.g. "parent/./child" =? "parent/child" if ((i + 2 == path.Length || PathInternal.IsDirectorySeparator(path[i + 2])) && path[i + 1] == '.') { i++; continue; } // Skip this character and the next two if it's referring to the parent directory, // e.g. "parent/child/../grandchild" => "parent/grandchild" if (i + 2 < path.Length && (i + 3 == path.Length || PathInternal.IsDirectorySeparator(path[i + 3])) && path[i + 1] == '.' && path[i + 2] == '.') { // Unwind back to the last slash (and if there isn't one, clear out everything). int s; for (s = sb.Length - 1; s >= 0; s--) { if (PathInternal.IsDirectorySeparator(sb[s])) { sb.Length = s; break; } } if (s < 0) { sb.Length = 0; } i += 2; continue; } } if (++componentCharCount > PathInternal.MaxComponentLength) { throw new PathTooLongException(SR.IO_PathTooLong); } // Normalize the directory separator if needed if (c != Path.DirectorySeparatorChar && c == Path.AltDirectorySeparatorChar) { c = Path.DirectorySeparatorChar; flippedSeparator = true; } sb.Append(c); } if (flippedSeparator || sb.Length != path.Length) { return StringBuilderCache.GetStringAndRelease(sb); } else { // We haven't changed the source path, return the original StringBuilderCache.Release(sb); return path; } } private static string RemoveLongPathPrefix(string path) { return path; // nop. There's nothing special about "long" paths on Unix. } public static string GetTempPath() { const string TempEnvVar = "TMPDIR"; const string DefaultTempPath = "/tmp/"; // Get the temp path from the TMPDIR environment variable. // If it's not set, just return the default path. // If it is, return it, ensuring it ends with a slash. string path = Environment.GetEnvironmentVariable(TempEnvVar); return string.IsNullOrEmpty(path) ? DefaultTempPath : PathInternal.IsDirectorySeparator(path[path.Length - 1]) ? path : path + DirectorySeparatorChar; } private static string InternalGetTempFileName(bool checkHost) { const string Suffix = ".tmp"; const int SuffixByteLength = 4; // mkstemps takes a char* and overwrites the XXXXXX with six characters // that'll result in a unique file name. string template = GetTempPath() + "tmpXXXXXX" + Suffix + "\0"; byte[] name = Encoding.UTF8.GetBytes(template); // Create, open, and close the temp file. IntPtr fd = Interop.CheckIo(Interop.Sys.MksTemps(name, SuffixByteLength)); Interop.Sys.Close(fd); // ignore any errors from close; nothing to do if cleanup isn't possible // 'name' is now the name of the file Debug.Assert(name[name.Length - 1] == '\0'); return Encoding.UTF8.GetString(name, 0, name.Length - 1); // trim off the trailing '\0' } public static bool IsPathRooted(string path) { if (path == null) return false; PathInternal.CheckInvalidPathChars(path); return path.Length > 0 && path[0] == DirectorySeparatorChar; } public static string GetPathRoot(string path) { if (path == null) return null; return IsPathRooted(path) ? DirectorySeparatorCharAsString : String.Empty; } private static byte[] CreateCryptoRandomByteArray(int byteLength) { var arr = new byte[byteLength]; if (!Interop.Crypto.GetRandomBytes(arr, arr.Length)) { throw new InvalidOperationException(SR.InvalidOperation_Cryptography); } return arr; } } }
//------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. //------------------------------------------------------------ namespace System.ServiceModel.Security { using System; using System.Collections.Generic; using System.IdentityModel.Selectors; using System.IdentityModel.Tokens; using System.Runtime; using System.ServiceModel; using System.ServiceModel.Security.Tokens; using System.Xml; using StrEntry = WSSecurityTokenSerializer.StrEntry; using TokenEntry = WSSecurityTokenSerializer.TokenEntry; abstract class WSSecureConversation : WSSecurityTokenSerializer.SerializerEntries { WSSecurityTokenSerializer tokenSerializer; DerivedKeyTokenEntry derivedKeyEntry; protected WSSecureConversation(WSSecurityTokenSerializer tokenSerializer, int maxKeyDerivationOffset, int maxKeyDerivationLabelLength, int maxKeyDerivationNonceLength) { if (tokenSerializer == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("tokenSerializer"); } this.tokenSerializer = tokenSerializer; this.derivedKeyEntry = new DerivedKeyTokenEntry(this, maxKeyDerivationOffset, maxKeyDerivationLabelLength, maxKeyDerivationNonceLength); } public abstract SecureConversationDictionary SerializerDictionary { get; } public WSSecurityTokenSerializer WSSecurityTokenSerializer { get { return this.tokenSerializer; } } public override void PopulateTokenEntries(IList<TokenEntry> tokenEntryList) { if (tokenEntryList == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("tokenEntryList"); } tokenEntryList.Add(this.derivedKeyEntry); } public virtual bool IsAtDerivedKeyToken(XmlDictionaryReader reader) { return this.derivedKeyEntry.CanReadTokenCore(reader); } public virtual void ReadDerivedKeyTokenParameters(XmlDictionaryReader reader, SecurityTokenResolver tokenResolver, out string id, out string derivationAlgorithm, out string label, out int length, out byte[] nonce, out int offset, out int generation, out SecurityKeyIdentifierClause tokenToDeriveIdentifier, out SecurityToken tokenToDerive) { this.derivedKeyEntry.ReadDerivedKeyTokenParameters(reader, tokenResolver, out id, out derivationAlgorithm, out label, out length, out nonce, out offset, out generation, out tokenToDeriveIdentifier, out tokenToDerive); } public virtual SecurityToken CreateDerivedKeyToken(string id, string derivationAlgorithm, string label, int length, byte[] nonce, int offset, int generation, SecurityKeyIdentifierClause tokenToDeriveIdentifier, SecurityToken tokenToDerive) { return this.derivedKeyEntry.CreateDerivedKeyToken(id, derivationAlgorithm, label, length, nonce, offset, generation, tokenToDeriveIdentifier, tokenToDerive); } public virtual string DerivationAlgorithm { get { return SecurityAlgorithms.Psha1KeyDerivation; } } protected class DerivedKeyTokenEntry : WSSecurityTokenSerializer.TokenEntry { public const string DefaultLabel = "WS-SecureConversation"; WSSecureConversation parent; int maxKeyDerivationOffset; int maxKeyDerivationLabelLength; int maxKeyDerivationNonceLength; public DerivedKeyTokenEntry(WSSecureConversation parent, int maxKeyDerivationOffset, int maxKeyDerivationLabelLength, int maxKeyDerivationNonceLength) { if (parent == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("parent"); } this.parent = parent; this.maxKeyDerivationOffset = maxKeyDerivationOffset; this.maxKeyDerivationLabelLength = maxKeyDerivationLabelLength; this.maxKeyDerivationNonceLength = maxKeyDerivationNonceLength; } protected override XmlDictionaryString LocalName { get { return parent.SerializerDictionary.DerivedKeyToken; } } protected override XmlDictionaryString NamespaceUri { get { return parent.SerializerDictionary.Namespace; } } protected override Type[] GetTokenTypesCore() { return new Type[] { typeof(DerivedKeySecurityToken) }; } public override string TokenTypeUri { get { return parent.SerializerDictionary.DerivedKeyTokenType.Value; } } protected override string ValueTypeUri { get { return null; } } public override SecurityKeyIdentifierClause CreateKeyIdentifierClauseFromTokenXmlCore(XmlElement issuedTokenXml, SecurityTokenReferenceStyle tokenReferenceStyle) { TokenReferenceStyleHelper.Validate(tokenReferenceStyle); switch (tokenReferenceStyle) { case SecurityTokenReferenceStyle.Internal: return CreateDirectReference(issuedTokenXml, UtilityStrings.IdAttribute, UtilityStrings.Namespace, typeof(DerivedKeySecurityToken)); case SecurityTokenReferenceStyle.External: // DerivedKeys aren't referred to externally return null; default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("tokenReferenceStyle")); } } // xml format //<DerivedKeyToken wsu:Id="..." wsse:Algorithm="..."> id required, alg optional (curr disallowed) // <SecurityTokenReference>...</SecurityTokenReference> - required // <Properties>...</Properties> - disallowed (optional in spec, but we disallow it) // choice begin - (schema requires a choice - we allow neither on read - we always write one) // <Generation>...</Generation> - optional // <Offset>...</Offset> - optional // choice end // <Length>...</Length> - optional - default 32 on read (default specified in spec, not in schema - we always write it) // <Label>...</Label> - optional // <Nonce>...</Nonce> - required (optional in spec, but we require it) //</DerivedKeyToken> public virtual void ReadDerivedKeyTokenParameters(XmlDictionaryReader reader, SecurityTokenResolver tokenResolver, out string id, out string derivationAlgorithm, out string label, out int length, out byte[] nonce, out int offset, out int generation, out SecurityKeyIdentifierClause tokenToDeriveIdentifier, out SecurityToken tokenToDerive) { if (tokenResolver == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("tokenResolver"); } id = reader.GetAttribute(XD.UtilityDictionary.IdAttribute, XD.UtilityDictionary.Namespace); derivationAlgorithm = reader.GetAttribute(XD.XmlSignatureDictionary.Algorithm, null); if (derivationAlgorithm == null) { derivationAlgorithm = parent.DerivationAlgorithm; } reader.ReadStartElement(); tokenToDeriveIdentifier = null; tokenToDerive = null; if (reader.IsStartElement(XD.SecurityJan2004Dictionary.SecurityTokenReference, XD.SecurityJan2004Dictionary.Namespace)) { tokenToDeriveIdentifier = parent.WSSecurityTokenSerializer.ReadKeyIdentifierClause(reader); tokenResolver.TryResolveToken(tokenToDeriveIdentifier, out tokenToDerive); } else { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new XmlException(SR.GetString(SR.DerivedKeyTokenRequiresTokenReference))); } // no support for properties generation = -1; if (reader.IsStartElement(parent.SerializerDictionary.Generation, parent.SerializerDictionary.Namespace)) { reader.ReadStartElement(); generation = reader.ReadContentAsInt(); reader.ReadEndElement(); if (generation < 0) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new XmlException(SR.GetString(SR.DerivedKeyInvalidGenerationSpecified, generation))); } offset = -1; if (reader.IsStartElement(parent.SerializerDictionary.Offset, parent.SerializerDictionary.Namespace)) { reader.ReadStartElement(); offset = reader.ReadContentAsInt(); reader.ReadEndElement(); if (offset < 0) throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new XmlException(SR.GetString(SR.DerivedKeyInvalidOffsetSpecified, offset))); } length = DerivedKeySecurityToken.DefaultDerivedKeyLength; if (reader.IsStartElement(parent.SerializerDictionary.Length, parent.SerializerDictionary.Namespace)) { reader.ReadStartElement(); length = reader.ReadContentAsInt(); reader.ReadEndElement(); } if ((offset == -1) && (generation == -1)) offset = 0; // verify that the offset is not larger than the max allowed DerivedKeySecurityToken.EnsureAcceptableOffset(offset, generation, length, this.maxKeyDerivationOffset); label = null; if (reader.IsStartElement(parent.SerializerDictionary.Label, parent.SerializerDictionary.Namespace)) { reader.ReadStartElement(); label = reader.ReadString(); reader.ReadEndElement(); } if (label != null && label.Length > this.maxKeyDerivationLabelLength) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperWarning(new MessageSecurityException(SR.GetString(SR.DerivedKeyTokenLabelTooLong, label.Length, this.maxKeyDerivationLabelLength))); } nonce = null; reader.ReadStartElement(parent.SerializerDictionary.Nonce, parent.SerializerDictionary.Namespace); nonce = reader.ReadContentAsBase64(); reader.ReadEndElement(); if (nonce != null && nonce.Length > this.maxKeyDerivationNonceLength) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperWarning(new MessageSecurityException(SR.GetString(SR.DerivedKeyTokenNonceTooLong, nonce.Length, this.maxKeyDerivationNonceLength))); } reader.ReadEndElement(); } public virtual SecurityToken CreateDerivedKeyToken(string id, string derivationAlgorithm, string label, int length, byte[] nonce, int offset, int generation, SecurityKeyIdentifierClause tokenToDeriveIdentifier, SecurityToken tokenToDerive) { if (tokenToDerive == null) { return new DerivedKeySecurityTokenStub(generation, offset, length, label, nonce, tokenToDeriveIdentifier, derivationAlgorithm, id); } else { return new DerivedKeySecurityToken(generation, offset, length, label, nonce, tokenToDerive, tokenToDeriveIdentifier, derivationAlgorithm, id); } } public override SecurityToken ReadTokenCore(XmlDictionaryReader reader, SecurityTokenResolver tokenResolver) { string id; string derivationAlgorithm; string label; int length; byte[] nonce; int offset; int generation; SecurityKeyIdentifierClause tokenToDeriveIdentifier; SecurityToken tokenToDerive; this.ReadDerivedKeyTokenParameters(reader, tokenResolver, out id, out derivationAlgorithm, out label, out length, out nonce, out offset, out generation, out tokenToDeriveIdentifier, out tokenToDerive); return CreateDerivedKeyToken(id, derivationAlgorithm, label, length, nonce, offset, generation, tokenToDeriveIdentifier, tokenToDerive); } public override void WriteTokenCore(XmlDictionaryWriter writer, SecurityToken token) { DerivedKeySecurityToken derivedKeyToken = token as DerivedKeySecurityToken; string serializerPrefix = parent.SerializerDictionary.Prefix.Value; writer.WriteStartElement(serializerPrefix, parent.SerializerDictionary.DerivedKeyToken, parent.SerializerDictionary.Namespace); if (derivedKeyToken.Id != null) { writer.WriteAttributeString(XD.UtilityDictionary.Prefix.Value, XD.UtilityDictionary.IdAttribute, XD.UtilityDictionary.Namespace, derivedKeyToken.Id); } if (derivedKeyToken.KeyDerivationAlgorithm != parent.DerivationAlgorithm) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new MessageSecurityException(SR.GetString(SR.UnsupportedKeyDerivationAlgorithm, derivedKeyToken.KeyDerivationAlgorithm))); } parent.WSSecurityTokenSerializer.WriteKeyIdentifierClause(writer, derivedKeyToken.TokenToDeriveIdentifier); // Don't support Properties element if (derivedKeyToken.Generation > 0 || derivedKeyToken.Offset > 0 || derivedKeyToken.Length != 32) { // this means they're both specified (offset must be gen * length) - we'll write generation if (derivedKeyToken.Generation >= 0 && derivedKeyToken.Offset >= 0) { writer.WriteStartElement(serializerPrefix, parent.SerializerDictionary.Generation, parent.SerializerDictionary.Namespace); writer.WriteValue(derivedKeyToken.Generation); writer.WriteEndElement(); } else if (derivedKeyToken.Generation != -1) { writer.WriteStartElement(serializerPrefix, parent.SerializerDictionary.Generation, parent.SerializerDictionary.Namespace); writer.WriteValue(derivedKeyToken.Generation); writer.WriteEndElement(); } else if (derivedKeyToken.Offset != -1) { writer.WriteStartElement(serializerPrefix, parent.SerializerDictionary.Offset, parent.SerializerDictionary.Namespace); writer.WriteValue(derivedKeyToken.Offset); writer.WriteEndElement(); } if (derivedKeyToken.Length != 32) { writer.WriteStartElement(serializerPrefix, parent.SerializerDictionary.Length, parent.SerializerDictionary.Namespace); writer.WriteValue(derivedKeyToken.Length); writer.WriteEndElement(); } } if (derivedKeyToken.Label != null) { writer.WriteStartElement(serializerPrefix, parent.SerializerDictionary.Generation, parent.SerializerDictionary.Namespace); writer.WriteString(derivedKeyToken.Label); writer.WriteEndElement(); } writer.WriteStartElement(serializerPrefix, parent.SerializerDictionary.Nonce, parent.SerializerDictionary.Namespace); writer.WriteBase64(derivedKeyToken.Nonce, 0, derivedKeyToken.Nonce.Length); writer.WriteEndElement(); writer.WriteEndElement(); } } protected abstract class SecurityContextTokenEntry : WSSecurityTokenSerializer.TokenEntry { WSSecureConversation parent; SecurityContextCookieSerializer cookieSerializer; public SecurityContextTokenEntry(WSSecureConversation parent, SecurityStateEncoder securityStateEncoder, IList<Type> knownClaimTypes) { this.parent = parent; this.cookieSerializer = new SecurityContextCookieSerializer(securityStateEncoder, knownClaimTypes); } protected WSSecureConversation Parent { get { return this.parent; } } protected override XmlDictionaryString LocalName { get { return parent.SerializerDictionary.SecurityContextToken; } } protected override XmlDictionaryString NamespaceUri { get { return parent.SerializerDictionary.Namespace; } } protected override Type[] GetTokenTypesCore() { return new Type[] { typeof(SecurityContextSecurityToken) }; } public override string TokenTypeUri { get { return parent.SerializerDictionary.SecurityContextTokenType.Value; } } protected override string ValueTypeUri { get { return null; } } public override SecurityKeyIdentifierClause CreateKeyIdentifierClauseFromTokenXmlCore(XmlElement issuedTokenXml, SecurityTokenReferenceStyle tokenReferenceStyle) { TokenReferenceStyleHelper.Validate(tokenReferenceStyle); switch (tokenReferenceStyle) { case SecurityTokenReferenceStyle.Internal: return CreateDirectReference(issuedTokenXml, UtilityStrings.IdAttribute, UtilityStrings.Namespace, typeof(SecurityContextSecurityToken)); case SecurityTokenReferenceStyle.External: UniqueId contextId = null; UniqueId generation = null; foreach (XmlNode node in issuedTokenXml.ChildNodes) { XmlElement element = node as XmlElement; if (element != null) { if (element.LocalName == parent.SerializerDictionary.Identifier.Value && element.NamespaceURI == parent.SerializerDictionary.Namespace.Value) { contextId = XmlHelper.ReadTextElementAsUniqueId(element); } else if (CanReadGeneration(element)) { generation = ReadGeneration(element); } } } return new SecurityContextKeyIdentifierClause(contextId, generation); default: throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ArgumentOutOfRangeException("tokenReferenceStyle")); } } protected abstract bool CanReadGeneration(XmlDictionaryReader reader); protected abstract bool CanReadGeneration(XmlElement element); protected abstract UniqueId ReadGeneration(XmlDictionaryReader reader); protected abstract UniqueId ReadGeneration(XmlElement element); SecurityContextSecurityToken TryResolveSecurityContextToken(UniqueId contextId, UniqueId generation, string id, SecurityTokenResolver tokenResolver, out ISecurityContextSecurityTokenCache sctCache) { SecurityContextSecurityToken cachedSct = null; sctCache = null; if (tokenResolver is ISecurityContextSecurityTokenCache) { sctCache = ((ISecurityContextSecurityTokenCache)tokenResolver); cachedSct = sctCache.GetContext(contextId, generation); } else if (tokenResolver is AggregateSecurityHeaderTokenResolver) { // We will see if we have a ISecurityContextSecurityTokenCache in the // AggregateTokenResolver. We will hold the reference to the first sctCache // we find. AggregateSecurityHeaderTokenResolver aggregateTokenResolve = tokenResolver as AggregateSecurityHeaderTokenResolver; for (int i = 0; i < aggregateTokenResolve.TokenResolvers.Count; ++i) { ISecurityContextSecurityTokenCache oobTokenResolver = aggregateTokenResolve.TokenResolvers[i] as ISecurityContextSecurityTokenCache; if (oobTokenResolver == null) { continue; } if (sctCache == null) { sctCache = oobTokenResolver; } cachedSct = oobTokenResolver.GetContext(contextId, generation); if (cachedSct != null) { break; } } } if (cachedSct == null) { return null; } else if (cachedSct.Id == id) { return cachedSct; } else { return new SecurityContextSecurityToken(cachedSct, id); } } public override SecurityToken ReadTokenCore(XmlDictionaryReader reader, SecurityTokenResolver tokenResolver) { UniqueId contextId = null; byte[] encodedCookie = null; UniqueId generation = null; bool isCookieMode = false; Fx.Assert(reader.NodeType == XmlNodeType.Element, ""); // check if there is an id string id = reader.GetAttribute(XD.UtilityDictionary.IdAttribute, XD.UtilityDictionary.Namespace); SecurityContextSecurityToken sct = null; // There needs to be at least a contextId in here. reader.ReadFullStartElement(); reader.MoveToStartElement(parent.SerializerDictionary.Identifier, parent.SerializerDictionary.Namespace); contextId = reader.ReadElementContentAsUniqueId(); if (CanReadGeneration(reader)) { generation = ReadGeneration(reader); } if (reader.IsStartElement(parent.SerializerDictionary.Cookie, XD.DotNetSecurityDictionary.Namespace)) { isCookieMode = true; ISecurityContextSecurityTokenCache sctCache; sct = TryResolveSecurityContextToken(contextId, generation, id, tokenResolver, out sctCache); if (sct == null) { encodedCookie = reader.ReadElementContentAsBase64(); if (encodedCookie != null) { sct = cookieSerializer.CreateSecurityContextFromCookie(encodedCookie, contextId, generation, id, reader.Quotas); if (sctCache != null) { sctCache.AddContext(sct); } } } else { reader.Skip(); } } reader.ReadEndElement(); if (contextId == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new MessageSecurityException(SR.GetString(SR.NoSecurityContextIdentifier))); } if (sct == null && !isCookieMode) { ISecurityContextSecurityTokenCache sctCache; sct = TryResolveSecurityContextToken(contextId, generation, id, tokenResolver, out sctCache); } if (sct == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperWarning(new SecurityContextTokenValidationException(SR.GetString(SR.SecurityContextNotRegistered, contextId, generation))); } return sct; } protected virtual void WriteGeneration(XmlDictionaryWriter writer, SecurityContextSecurityToken sct) { } public override void WriteTokenCore(XmlDictionaryWriter writer, SecurityToken token) { SecurityContextSecurityToken sct = (token as SecurityContextSecurityToken); // serialize the name and any wsu:Id attribute writer.WriteStartElement(parent.SerializerDictionary.Prefix.Value, parent.SerializerDictionary.SecurityContextToken, parent.SerializerDictionary.Namespace); if (sct.Id != null) { writer.WriteAttributeString(XD.UtilityDictionary.Prefix.Value, XD.UtilityDictionary.IdAttribute, XD.UtilityDictionary.Namespace, sct.Id); } // serialize the context id writer.WriteStartElement(parent.SerializerDictionary.Prefix.Value, parent.SerializerDictionary.Identifier, parent.SerializerDictionary.Namespace); XmlHelper.WriteStringAsUniqueId(writer, sct.ContextId); writer.WriteEndElement(); WriteGeneration(writer, sct); // if cookie-mode, then it must have a cookie if (sct.IsCookieMode) { if (sct.CookieBlob == null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new MessageSecurityException(SR.GetString(SR.NoCookieInSct))); } // if the token has a cookie, write it out writer.WriteStartElement(XD.DotNetSecurityDictionary.Prefix.Value, parent.SerializerDictionary.Cookie, XD.DotNetSecurityDictionary.Namespace); writer.WriteBase64(sct.CookieBlob, 0, sct.CookieBlob.Length); writer.WriteEndElement(); } writer.WriteEndElement(); } } public abstract class Driver : SecureConversationDriver { public Driver() { } protected abstract SecureConversationDictionary DriverDictionary { get; } public override XmlDictionaryString IssueAction { get { return DriverDictionary.RequestSecurityContextIssuance; } } public override XmlDictionaryString IssueResponseAction { get { return DriverDictionary.RequestSecurityContextIssuanceResponse; } } public override XmlDictionaryString RenewNeededFaultCode { get { return DriverDictionary.RenewNeededFaultCode; } } public override XmlDictionaryString BadContextTokenFaultCode { get { return DriverDictionary.BadContextTokenFaultCode; } } public override UniqueId GetSecurityContextTokenId(XmlDictionaryReader reader) { if (reader == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader"); reader.ReadStartElement(DriverDictionary.SecurityContextToken, DriverDictionary.Namespace); UniqueId contextId = XmlHelper.ReadElementStringAsUniqueId(reader, DriverDictionary.Identifier, DriverDictionary.Namespace); while (reader.IsStartElement()) { reader.Skip(); } reader.ReadEndElement(); return contextId; } public override bool IsAtSecurityContextToken(XmlDictionaryReader reader) { if (reader == null) throw DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull("reader"); return reader.IsStartElement(DriverDictionary.SecurityContextToken, DriverDictionary.Namespace); } } } }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Management.Automation.Internal; using System.Management.Automation.Language; using System.Management.Automation.Runspaces; using System.Reflection; using Dbg = System.Management.Automation.Diagnostics; namespace System.Management.Automation { /// <summary> /// A common base class for code shared between an interpreted (old) script block and a compiled (new) script block. /// </summary> internal abstract class ScriptCommandProcessorBase : CommandProcessorBase { protected ScriptCommandProcessorBase(ScriptBlock scriptBlock, ExecutionContext context, bool useLocalScope, CommandOrigin origin, SessionStateInternal sessionState) : base(new ScriptInfo(string.Empty, scriptBlock, context)) { this._dontUseScopeCommandOrigin = false; this._fromScriptFile = false; CommonInitialization(scriptBlock, context, useLocalScope, origin, sessionState); } protected ScriptCommandProcessorBase(IScriptCommandInfo commandInfo, ExecutionContext context, bool useLocalScope, SessionStateInternal sessionState) : base((CommandInfo)commandInfo) { Diagnostics.Assert(commandInfo != null, "commandInfo cannot be null"); Diagnostics.Assert(commandInfo.ScriptBlock != null, "scriptblock cannot be null"); this._fromScriptFile = (this.CommandInfo is ExternalScriptInfo || this.CommandInfo is ScriptInfo); this._dontUseScopeCommandOrigin = true; CommonInitialization(commandInfo.ScriptBlock, context, useLocalScope, CommandOrigin.Internal, sessionState); } /// <summary> /// When executing a scriptblock, the command origin needs to be set for the current scope. /// If this true, then the scope origin will be set to the command origin. If it's false, /// then the scope origin will be set to Internal. This allows public functions to call /// private functions but still see $MyInvocation.CommandOrigin as $true. /// </summary> protected bool _dontUseScopeCommandOrigin; /// <summary> /// If true, then an exit exception will be rethrown instead of caught and processed... /// </summary> protected bool _rethrowExitException; /// <summary> /// This indicates whether exit is called during the execution of /// script block. /// </summary> /// <remarks> /// Exit command can be executed in any of begin/process/end blocks. /// /// If exit is called in one block (for example, begin), any subsequent /// blocks (for example, process and end) should not be executed. /// </remarks> protected bool _exitWasCalled; protected ScriptBlock _scriptBlock; private ScriptParameterBinderController _scriptParameterBinderController; internal ScriptParameterBinderController ScriptParameterBinderController { get { if (_scriptParameterBinderController == null) { // Set up the hashtable that will be used to hold all of the bound parameters... _scriptParameterBinderController = new ScriptParameterBinderController(((IScriptCommandInfo)CommandInfo).ScriptBlock, Command.MyInvocation, Context, Command, CommandScope); _scriptParameterBinderController.CommandLineParameters.UpdateInvocationInfo(this.Command.MyInvocation); this.Command.MyInvocation.UnboundArguments = _scriptParameterBinderController.DollarArgs; } return _scriptParameterBinderController; } } /// <summary> /// Helper function for setting up command object and commandRuntime object /// for script command processor. /// </summary> protected void CommonInitialization(ScriptBlock scriptBlock, ExecutionContext context, bool useLocalScope, CommandOrigin origin, SessionStateInternal sessionState) { Diagnostics.Assert(context != null, "execution context cannot be null"); Diagnostics.Assert(context.Engine != null, "context.engine cannot be null"); this.CommandSessionState = sessionState; this._context = context; this._rethrowExitException = this.Context.ScriptCommandProcessorShouldRethrowExit; this._context.ScriptCommandProcessorShouldRethrowExit = false; ScriptCommand scriptCommand = new ScriptCommand { CommandInfo = this.CommandInfo }; this.Command = scriptCommand; // WinBlue: 219115 // Set the command origin for the new ScriptCommand object since we're not // going through command discovery here where it's usually set. this.Command.CommandOriginInternal = origin; this.Command.commandRuntime = this.commandRuntime = new MshCommandRuntime(this.Context, this.CommandInfo, scriptCommand); this.CommandScope = useLocalScope ? CommandSessionState.NewScope(this.FromScriptFile) : CommandSessionState.CurrentScope; this.UseLocalScope = useLocalScope; _scriptBlock = scriptBlock; // If the script has been dotted, throw an error if it's from a different language mode. // Unless it was a script loaded through -File, in which case the danger of dotting other // language modes (getting internal functions in the user's state) isn't a danger if ((!this.UseLocalScope) && (!this._rethrowExitException)) { ValidateCompatibleLanguageMode(_scriptBlock, context.LanguageMode, Command.MyInvocation); } } /// <summary> /// Checks if user has requested help (for example passing "-?" parameter for a cmdlet) /// and if yes, then returns the help target to display. /// </summary> /// <param name="helpTarget">Help target to request.</param> /// <param name="helpCategory">Help category to request.</param> /// <returns><see langword="true"/> if user requested help; <see langword="false"/> otherwise.</returns> internal override bool IsHelpRequested(out string helpTarget, out HelpCategory helpCategory) { if (arguments != null && CommandInfo != null && !string.IsNullOrEmpty(CommandInfo.Name) && _scriptBlock != null) { foreach (CommandParameterInternal parameter in this.arguments) { Dbg.Assert(parameter != null, "CommandProcessor.arguments shouldn't have any null arguments"); if (parameter.IsDashQuestion()) { Dictionary<Ast, Token[]> scriptBlockTokenCache = new Dictionary<Ast, Token[]>(); string unused; HelpInfo helpInfo = _scriptBlock.GetHelpInfo(context: Context, commandInfo: CommandInfo, dontSearchOnRemoteComputer: false, scriptBlockTokenCache: scriptBlockTokenCache, helpFile: out unused, helpUriFromDotLink: out unused); if (helpInfo == null) { break; } helpTarget = helpInfo.Name; helpCategory = helpInfo.HelpCategory; return true; } } } return base.IsHelpRequested(out helpTarget, out helpCategory); } } /// <summary> /// This class implements a command processor for script related commands. /// </summary> /// <remarks> /// 1. Usage scenarios /// /// ScriptCommandProcessor is used for four kinds of commands. /// /// a. Functions and filters /// /// For example, /// /// function foo($a) {$a} /// foo "my text" /// /// Second command is an example of a function invocation. /// /// In this case, a FunctionInfo object is provided while constructing /// command processor. /// /// b. Script File /// /// For example, /// /// . .\my.ps1 /// /// In this case, a ExternalScriptInfo or ScriptInfo object is provided /// while constructing command processor. /// /// c. ScriptBlock /// /// For example, /// /// . {$a = 5} /// /// In this case, a ScriptBlock object is provided while constructing command /// processor. /// /// d. Script Text /// /// This is used internally for directly running a text stream of script. /// /// 2. Design /// /// a. Script block /// /// No matter how a script command processor is created, core piece of information /// is always a ScriptBlock object, which can come from either a FunctionInfo object, /// a ScriptInfo object, or directly parsed from script text. /// /// b. Script scope /// /// A script block can be executed either in current scope or in a new scope. /// /// New scope created should be a scope supporting $script: in case the command /// processor is created from a script file. /// /// c. Begin/Process/End blocks /// /// Each script block can have one block of script for begin/process/end. These map /// to BeginProcessing, ProcessingRecord, and EndProcessing of cmdlet api. /// /// d. ExitException handling /// /// If the command processor is created based on a script file, its exit exception /// handling is different in the sense that it indicates an exitcode instead of killing /// current powershell session. /// </remarks> internal sealed class DlrScriptCommandProcessor : ScriptCommandProcessorBase { private readonly ArrayList _input = new ArrayList(); private readonly object _dollarUnderbar = AutomationNull.Value; private new ScriptBlock _scriptBlock; private MutableTuple _localsTuple; private bool _runOptimizedCode; private bool _argsBound; private bool _anyClauseExecuted; private FunctionContext _functionContext; internal DlrScriptCommandProcessor(ScriptBlock scriptBlock, ExecutionContext context, bool useNewScope, CommandOrigin origin, SessionStateInternal sessionState, object dollarUnderbar) : base(scriptBlock, context, useNewScope, origin, sessionState) { Init(); _dollarUnderbar = dollarUnderbar; } internal DlrScriptCommandProcessor(ScriptBlock scriptBlock, ExecutionContext context, bool useNewScope, CommandOrigin origin, SessionStateInternal sessionState) : base(scriptBlock, context, useNewScope, origin, sessionState) { Init(); } internal DlrScriptCommandProcessor(FunctionInfo functionInfo, ExecutionContext context, bool useNewScope, SessionStateInternal sessionState) : base(functionInfo, context, useNewScope, sessionState) { Init(); } internal DlrScriptCommandProcessor(ScriptInfo scriptInfo, ExecutionContext context, bool useNewScope, SessionStateInternal sessionState) : base(scriptInfo, context, useNewScope, sessionState) { Init(); } internal DlrScriptCommandProcessor(ExternalScriptInfo scriptInfo, ExecutionContext context, bool useNewScope, SessionStateInternal sessionState) : base(scriptInfo, context, useNewScope, sessionState) { Init(); } private void Init() { _scriptBlock = base._scriptBlock; _obsoleteAttribute = _scriptBlock.ObsoleteAttribute; _runOptimizedCode = _scriptBlock.Compile(optimized: _context._debuggingMode <= 0 && UseLocalScope); _localsTuple = _scriptBlock.MakeLocalsTuple(_runOptimizedCode); if (UseLocalScope) { Diagnostics.Assert(CommandScope.LocalsTuple == null, "a newly created scope shouldn't have it's tuple set."); CommandScope.LocalsTuple = _localsTuple; } } /// <summary> /// Get the ObsoleteAttribute of the current command. /// </summary> internal override ObsoleteAttribute ObsoleteAttribute { get { return _obsoleteAttribute; } } private ObsoleteAttribute _obsoleteAttribute; internal override void Prepare(IDictionary psDefaultParameterValues) { _localsTuple.SetAutomaticVariable(AutomaticVariable.MyInvocation, this.Command.MyInvocation, _context); _scriptBlock.SetPSScriptRootAndPSCommandPath(_localsTuple, _context); _functionContext = new FunctionContext { _executionContext = _context, _outputPipe = commandRuntime.OutputPipe, _localsTuple = _localsTuple, _scriptBlock = _scriptBlock, _file = _scriptBlock.File, _debuggerHidden = _scriptBlock.DebuggerHidden, _debuggerStepThrough = _scriptBlock.DebuggerStepThrough, _sequencePoints = _scriptBlock.SequencePoints, }; } /// <summary> /// Execute BeginProcessing part of command. It sets up the overall scope /// object for this command and runs the begin clause of the script block if /// it isn't empty. /// </summary> /// <exception cref="PipelineStoppedException"> /// a terminating error occurred, or the pipeline was otherwise stopped /// </exception> internal override void DoBegin() { if (!RanBeginAlready) { RanBeginAlready = true; ScriptBlock.LogScriptBlockStart(_scriptBlock, Context.CurrentRunspace.InstanceId); // Even if there is no begin, we need to set up the execution scope for this script... SetCurrentScopeToExecutionScope(); CommandProcessorBase oldCurrentCommandProcessor = Context.CurrentCommandProcessor; try { Context.CurrentCommandProcessor = this; if (_scriptBlock.HasBeginBlock) { RunClause(_runOptimizedCode ? _scriptBlock.BeginBlock : _scriptBlock.UnoptimizedBeginBlock, AutomationNull.Value, _input); } } finally { Context.CurrentCommandProcessor = oldCurrentCommandProcessor; RestorePreviousScope(); } } } internal override void ProcessRecord() { if (_exitWasCalled) { return; } if (!this.RanBeginAlready) { RanBeginAlready = true; if (_scriptBlock.HasBeginBlock) { RunClause(_runOptimizedCode ? _scriptBlock.BeginBlock : _scriptBlock.UnoptimizedBeginBlock, AutomationNull.Value, _input); } } if (_scriptBlock.HasProcessBlock) { if (!IsPipelineInputExpected()) { RunClause(_runOptimizedCode ? _scriptBlock.ProcessBlock : _scriptBlock.UnoptimizedProcessBlock, null, _input); } else { DoProcessRecordWithInput(); } } else if (IsPipelineInputExpected()) { // accumulate the input when working in "synchronous" mode Debug.Assert(this.Command.MyInvocation.PipelineIterationInfo != null); // this should have been allocated when the pipe was started if (this.CommandRuntime.InputPipe.ExternalReader == null) { while (Read()) { // accumulate all of the objects and execute at the end. _input.Add(Command.CurrentPipelineObject); } } } } internal override void Complete() { try { if (_exitWasCalled) { return; } // process any items that may still be in the input pipeline if (_scriptBlock.HasProcessBlock && IsPipelineInputExpected()) { DoProcessRecordWithInput(); } if (_scriptBlock.HasEndBlock) { var endBlock = _runOptimizedCode ? _scriptBlock.EndBlock : _scriptBlock.UnoptimizedEndBlock; if (this.CommandRuntime.InputPipe.ExternalReader == null) { if (IsPipelineInputExpected()) { // read any items that may still be in the input pipe while (Read()) { _input.Add(Command.CurrentPipelineObject); } } // run with accumulated input RunClause(endBlock, AutomationNull.Value, _input); } else { // run with asynchronously updated $input enumerator RunClause(endBlock, AutomationNull.Value, this.CommandRuntime.InputPipe.ExternalReader.GetReadEnumerator()); } } } finally { if (!_scriptBlock.HasCleanBlock) { ScriptBlock.LogScriptBlockEnd(_scriptBlock, Context.CurrentRunspace.InstanceId); } } } protected override void CleanResource() { if (_scriptBlock.HasCleanBlock && _anyClauseExecuted) { // The 'Clean' block doesn't write to pipeline. Pipe oldOutputPipe = _functionContext._outputPipe; _functionContext._outputPipe = new Pipe { NullPipe = true }; try { RunClause( clause: _runOptimizedCode ? _scriptBlock.CleanBlock : _scriptBlock.UnoptimizedCleanBlock, dollarUnderbar: AutomationNull.Value, inputToProcess: AutomationNull.Value); } finally { _functionContext._outputPipe = oldOutputPipe; ScriptBlock.LogScriptBlockEnd(_scriptBlock, Context.CurrentRunspace.InstanceId); } } } private void DoProcessRecordWithInput() { // block for input and execute "process" block for all input objects Debug.Assert(this.Command.MyInvocation.PipelineIterationInfo != null); // this should have been allocated when the pipe was started var processBlock = _runOptimizedCode ? _scriptBlock.ProcessBlock : _scriptBlock.UnoptimizedProcessBlock; while (Read()) { _input.Add(Command.CurrentPipelineObject); this.Command.MyInvocation.PipelineIterationInfo[this.Command.MyInvocation.PipelinePosition]++; RunClause(processBlock, Command.CurrentPipelineObject, _input); // now clear input for next iteration; also makes it clear for the end clause. _input.Clear(); } } private void RunClause(Action<FunctionContext> clause, object dollarUnderbar, object inputToProcess) { ExecutionContext.CheckStackDepth(); _anyClauseExecuted = true; Pipe oldErrorOutputPipe = this.Context.ShellFunctionErrorOutputPipe; // If the script block has a different language mode than the current, // change the language mode. PSLanguageMode? oldLanguageMode = null; PSLanguageMode? newLanguageMode = null; if ((_scriptBlock.LanguageMode.HasValue) && (_scriptBlock.LanguageMode != Context.LanguageMode)) { oldLanguageMode = Context.LanguageMode; newLanguageMode = _scriptBlock.LanguageMode; } try { var oldScopeOrigin = this.Context.EngineSessionState.CurrentScope.ScopeOrigin; try { this.Context.EngineSessionState.CurrentScope.ScopeOrigin = this._dontUseScopeCommandOrigin ? CommandOrigin.Internal : this.Command.CommandOrigin; // Set the language mode. We do this before EnterScope(), so that the language // mode is appropriately applied for evaluation parameter defaults. if (newLanguageMode.HasValue) { Context.LanguageMode = newLanguageMode.Value; } bool? oldLangModeTransitionStatus = null; try { // If it's from ConstrainedLanguage to FullLanguage, indicate the transition before parameter binding takes place. if (oldLanguageMode == PSLanguageMode.ConstrainedLanguage && newLanguageMode == PSLanguageMode.FullLanguage) { oldLangModeTransitionStatus = Context.LanguageModeTransitionInParameterBinding; Context.LanguageModeTransitionInParameterBinding = true; } EnterScope(); } finally { if (oldLangModeTransitionStatus.HasValue) { // Revert the transition state to old value after doing the parameter binding Context.LanguageModeTransitionInParameterBinding = oldLangModeTransitionStatus.Value; } } if (commandRuntime.ErrorMergeTo == MshCommandRuntime.MergeDataStream.Output) { Context.RedirectErrorPipe(commandRuntime.OutputPipe); } else if (commandRuntime.ErrorOutputPipe.IsRedirected) { Context.RedirectErrorPipe(commandRuntime.ErrorOutputPipe); } if (dollarUnderbar != AutomationNull.Value) { _localsTuple.SetAutomaticVariable(AutomaticVariable.Underbar, dollarUnderbar, _context); } else if (_dollarUnderbar != AutomationNull.Value) { _localsTuple.SetAutomaticVariable(AutomaticVariable.Underbar, _dollarUnderbar, _context); } if (inputToProcess != AutomationNull.Value) { if (inputToProcess == null) { inputToProcess = MshCommandRuntime.StaticEmptyArray.GetEnumerator(); } else { IList list = inputToProcess as IList; inputToProcess = (list != null) ? list.GetEnumerator() : LanguagePrimitives.GetEnumerator(inputToProcess); } _localsTuple.SetAutomaticVariable(AutomaticVariable.Input, inputToProcess, _context); } clause(_functionContext); } catch (TargetInvocationException tie) { // DynamicInvoke wraps exceptions, unwrap them here. throw tie.InnerException; } finally { Context.ShellFunctionErrorOutputPipe = oldErrorOutputPipe; if (oldLanguageMode.HasValue) { Context.LanguageMode = oldLanguageMode.Value; } Context.EngineSessionState.CurrentScope.ScopeOrigin = oldScopeOrigin; } } catch (ExitException ee) { if (!this.FromScriptFile || _rethrowExitException) { throw; } this._exitWasCalled = true; int exitCode = (int)ee.Argument; this.Command.Context.SetVariable(SpecialVariables.LastExitCodeVarPath, exitCode); if (exitCode != 0) this.commandRuntime.PipelineProcessor.ExecutionFailed = true; } catch (FlowControlException) { throw; } catch (RuntimeException e) { // This method always throws. ManageScriptException(e); } catch (Exception e) { // This cmdlet threw an exception, so wrap it and bubble it up. throw ManageInvocationException(e); } } private void EnterScope() { if (!_argsBound) { _argsBound = true; // Parameter binder may need to write warning messages for obsolete parameters using (commandRuntime.AllowThisCommandToWrite(false)) { this.ScriptParameterBinderController.BindCommandLineParameters(arguments); } _localsTuple.SetAutomaticVariable(AutomaticVariable.PSBoundParameters, this.ScriptParameterBinderController.CommandLineParameters.GetValueToBindToPSBoundParameters(), _context); } } protected override void OnSetCurrentScope() { // When dotting a script, push the locals of automatic variables to // the 'DottedScopes' of the current scope. if (!UseLocalScope) { CommandSessionState.CurrentScope.DottedScopes.Push(_localsTuple); } } protected override void OnRestorePreviousScope() { // When dotting a script, pop the locals of automatic variables from // the 'DottedScopes' of the current scope. if (!UseLocalScope) { CommandSessionState.CurrentScope.DottedScopes.Pop(); } } } }
using System; using System.IO; using System.Linq; using System.Net; using System.Reflection; using System.Text; using System.Threading.Tasks; using System.Xml; using CoreProfiler.Timings; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Http.Extensions; using Microsoft.Extensions.Logging; using System.Collections.Generic; using System.Net.Http; using System.Web; namespace CoreProfiler.Web { public class CoreProfilerMiddleware { private readonly RequestDelegate _next; private readonly ILogger _logger; public const string XCorrelationId = "X-ET-Correlation-Id"; private const string ViewUrl = "/coreprofiler/view"; private const string ViewUrlNano = "/nanoprofiler/view"; private const string Import = "import"; private const string Export = "?export"; private const string CorrelationId = "correlationId"; /// <summary> /// The default Html of the view-result index page: ~/coreprofiler/view /// </summary> public static string ViewResultIndexHeaderHtml = "<h1>CoreProfiler Latest Profiling Results</h1>"; /// <summary> /// The default Html of the view-result page: ~/coreprofiler/view/{uuid} /// </summary> public static string ViewResultHeaderHtml = "<h1>CoreProfiler Profiling Result</h1>"; /// <summary> /// Tries to import drilldown result by remote address of the step /// </summary> public static bool TryToImportDrillDownResult; /// <summary> /// The handler to search for child profiling session by correlationId. /// </summary> public static Func<string, Guid?> DrillDownHandler { get; set; } /// <summary> /// The handler to search for parent profiling session by correlationId. /// </summary> public static Func<string, Guid?> DrillUpHandler { get; set; } public CoreProfilerMiddleware(RequestDelegate next, ILoggerFactory loggerFactory) { _next = next; _logger = loggerFactory.CreateLogger<CoreProfilerMiddleware>(); } public async Task Invoke(HttpContext context) { // disable view profiling if CircularBuffer is not enabled if (ProfilingSession.CircularBuffer == null) { await _next.Invoke(context); return; } ClearIfCurrentProfilingSessionStopped(); var url = UriHelper.GetDisplayUrl(context.Request); ProfilingSession.Start(url); // set correlationId if exists in header var correlationId = GetCorrelationIdFromHeaders(context); if (!string.IsNullOrWhiteSpace(correlationId)) { ProfilingSession.Current.AddField("correlationId", correlationId); } // only supports GET method for view results if (context.Request.Method != "GET") { try { await _next.Invoke(context); } catch (System.Exception) { // stop and save profiling results on error using (ProfilingSession.Current.Step("Stop on Error")) { } throw; } finally { ProfilingSession.Stop(); } return; } var path = context.Request.Path.ToString().TrimEnd('/'); // generate baseViewPath string baseViewPath = null; var posStart = path.IndexOf(ViewUrl, StringComparison.OrdinalIgnoreCase); if (posStart < 0) posStart = path.IndexOf(ViewUrlNano, StringComparison.OrdinalIgnoreCase); if (posStart >= 0) baseViewPath = path.Substring(0, posStart) + ViewUrl; // prepend pathbase if specified baseViewPath = context.Request.PathBase + baseViewPath; if (path.EndsWith("/coreprofiler-resources/icons")) { context.Response.ContentType = "image/png"; var iconsStream = GetType().GetTypeInfo().Assembly.GetManifestResourceStream("CoreProfiler.Web.icons.png"); using (var br = new BinaryReader(iconsStream)) { await context.Response.Body.WriteAsync(br.ReadBytes((int)iconsStream.Length), 0, (int)iconsStream.Length); } return; } if (path.EndsWith("/coreprofiler-resources/css")) { context.Response.ContentType = "text/css"; var cssStream = GetType().GetTypeInfo().Assembly.GetManifestResourceStream("CoreProfiler.Web.treeview_timeline.css"); using (var sr = new StreamReader(cssStream)) { await context.Response.WriteAsync(sr.ReadToEnd()); } return; } // view index of all latest results: ~/coreprofiler/view if (path.EndsWith(ViewUrl, StringComparison.OrdinalIgnoreCase) || path.EndsWith(ViewUrlNano, StringComparison.OrdinalIgnoreCase)) { // try to handle import/export first var import = context.Request.Query[Import]; if (Uri.IsWellFormedUriString(import, UriKind.Absolute)) { await ImportSessionsFromUrl(import); return; } if (context.Request.QueryString.ToString() == Export) { context.Response.ContentType = "application/json"; await context.Response.WriteAsync(ImportSerializer.SerializeSessions(ProfilingSession.CircularBuffer)); return; } var exportCorrelationId = context.Request.Query[CorrelationId]; if (!string.IsNullOrEmpty(exportCorrelationId)) { context.Response.ContentType = "application/json"; var result = ProfilingSession.CircularBuffer.FirstOrDefault( r => r.Data != null && r.Data.ContainsKey(CorrelationId) && r.Data[CorrelationId] == exportCorrelationId); if (result != null) { await context.Response.WriteAsync(ImportSerializer.SerializeSessions(new[] { result })); return; } } // render result list view context.Response.ContentType = "text/html"; var sb = new StringBuilder(); sb.Append("<head>"); sb.Append("<title>CoreProfiler Latest Profiling Results</title>"); sb.Append("<style>th { width: 200px; text-align: left; } .gray { background-color: #eee; } .nowrap { white-space: nowrap;padding-right: 20px; vertical-align:top; } </style>"); sb.Append("</head"); sb.Append("<body>"); sb.Append(ViewResultIndexHeaderHtml); var tagFilter = context.Request.Query["tag"]; if (!string.IsNullOrWhiteSpace(tagFilter)) { sb.Append("<div><strong>Filtered by tag:</strong> "); sb.Append(tagFilter); sb.Append("<br/><br /></div>"); } sb.Append("<table>"); sb.Append("<tr><th class=\"nowrap\">Time (UTC)</th><th class=\"nowrap\">Duration (ms)</th><th>Url</th></tr>"); var latestResults = ProfilingSession.CircularBuffer.OrderByDescending(r => r.Started); var i = 0; foreach (var result in latestResults) { if (!string.IsNullOrWhiteSpace(tagFilter) && (result.Tags == null || !result.Tags.Contains<string>(tagFilter, StringComparer.OrdinalIgnoreCase))) { continue; } sb.Append("<tr"); if ((i++) % 2 == 1) { sb.Append(" class=\"gray\""); } sb.Append("><td class=\"nowrap\">"); sb.Append(result.Started.ToString("yyyy-MM-ddTHH:mm:ss.FFF")); sb.Append("</td><td class=\"nowrap\">"); sb.Append(result.DurationMilliseconds); sb.Append("</td><td><a href=\""); sb.Append(baseViewPath); sb.Append("/"); sb.Append(result.Id.ToString()); sb.Append("\" target=\"_blank\">"); sb.Append(result.Name.Replace("\r\n", " ")); sb.Append("</a></td></tr>"); } sb.Append("</table>"); sb.Append("</body>"); await context.Response.WriteAsync(sb.ToString()); return; } // view specific result by uuid: ~/coreprofiler/view/{uuid} if (path.IndexOf(ViewUrl, StringComparison.OrdinalIgnoreCase) >= 0 || path.IndexOf(ViewUrlNano, StringComparison.OrdinalIgnoreCase) >= 0) { context.Response.ContentType = "text/html"; var sb = new StringBuilder(); sb.Append("<head>"); sb.Append("<meta charset=\"utf-8\" />"); sb.Append("<meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge\" />"); sb.Append("<title>CoreProfiler Profiling Result</title>"); sb.Append("<link rel=\"stylesheet\" href=\"./coreprofiler-resources/css\" />"); sb.Append("</head"); sb.Append("<body>"); sb.Append("<h1>CoreProfiler Profiling Result</h1>"); var uuid = path.Split('/').Last(); var result = ProfilingSession.CircularBuffer.FirstOrDefault( r => r.Id.ToString().ToLowerInvariant() == uuid.ToLowerInvariant()); if (result != null) { if (TryToImportDrillDownResult) { // try to import drill down results foreach (var timing in result.Timings) { if (timing.Data == null || !timing.Data.ContainsKey(CorrelationId)) continue; Guid parentResultId; if (!Guid.TryParse(timing.Data[CorrelationId], out parentResultId) || ProfilingSession.CircularBuffer.Any(r => r.Id == parentResultId)) continue; string remoteAddress; if (!timing.Data.TryGetValue("remoteAddress", out remoteAddress)) remoteAddress = timing.Name; if (!Uri.IsWellFormedUriString(remoteAddress, UriKind.Absolute)) continue; if (!remoteAddress.StartsWith("http", StringComparison.OrdinalIgnoreCase)) continue; var pos = remoteAddress.IndexOf("?"); if (pos > 0) remoteAddress = remoteAddress.Substring(0, pos); if (remoteAddress.Split('/').Last().Contains(".")) remoteAddress = remoteAddress.Substring(0, remoteAddress.LastIndexOf("/")); try { await ImportSessionsFromUrl(remoteAddress + "/coreprofiler/view?" + CorrelationId + "=" + parentResultId.ToString("N")); } catch (Exception ex) { System.Diagnostics.Debug.Write(ex.Message); //ignore exceptions } } } // render result tree sb.Append("<div class=\"css-treeview\">"); // print summary sb.Append("<ul>"); sb.Append("<li class=\"summary\">"); PrintDrillUpLink(sb, result, baseViewPath); sb.Append(result.Name.Replace("\r\n", " ")); sb.Append("</li>"); sb.Append("<li class=\"summary\">"); if (result.Data != null) { foreach (var keyValue in result.Data) { if (string.IsNullOrWhiteSpace(keyValue.Value)) continue; sb.Append("<b>"); sb.Append(keyValue.Key); sb.Append(": </b>"); var encodedValue = WebUtility.HtmlEncode(keyValue.Value); if (keyValue.Key.EndsWith("Count") || keyValue.Key.EndsWith("Duration")) { sb.Append("<span class=\""); sb.Append(keyValue.Key); sb.Append("\">"); sb.Append(encodedValue); sb.Append("</span>"); } else { sb.Append(encodedValue); } sb.Append(" &nbsp; "); } } sb.Append("<b>machine: </b>"); sb.Append(result.MachineName); sb.Append(" &nbsp; "); if (result.Tags != null && result.Tags.Any()) { sb.Append("<b>tags: </b>"); sb.Append(string.Join(", ", result.Tags.Select(t => string.Format("<a href=\"{2}?tag={0}\">{1}</a>", HttpUtility.UrlEncode(t), t, baseViewPath)))); sb.Append(" &nbsp; "); } sb.Append("</li>"); sb.Append("</ul>"); var totalLength = result.DurationMilliseconds; if (totalLength == 0) { totalLength = 1; } var factor = 300.0/totalLength; // print ruler sb.Append("<ul>"); sb.Append("<li class=\"ruler\"><span style=\"width:300px\">0</span><span style=\"width:80px\">"); sb.Append(totalLength); sb.Append( " (ms)</span><span style=\"width:20px\">&nbsp;</span><span style=\"width:60px\">Start</span><span style=\"width:60px\">Duration</span><span style=\"width:20px\">&nbsp;</span><span>Timing Hierarchy</span></li>"); sb.Append("</ul>"); // print timings sb.Append("<ul class=\"timing\">"); PrintTimings(result, result.Id, sb, factor, baseViewPath); sb.Append(""); sb.Append("</ul>"); sb.Append("</div>"); // print timing data popups foreach (var timing in result.Timings) { if (timing.Data == null || !timing.Data.Any()) continue; sb.Append("<aside id=\"data_"); sb.Append(timing.Id.ToString()); sb.Append("\" style=\"display:none\" class=\"modal\">"); sb.Append("<div>"); sb.Append("<h4><code>"); sb.Append(timing.Name.Replace("\r\n", " ")); sb.Append("</code></h4>"); sb.Append("<textarea>"); foreach (var keyValue in timing.Data) { if (string.IsNullOrWhiteSpace(keyValue.Value)) continue; sb.Append(keyValue.Key); sb.Append(":\r\n"); var value = keyValue.Value.Trim(); if (value.StartsWith("<")) { // asuume it is XML // try to format XML with indent var doc = new XmlDocument(); try { doc.LoadXml(value); var ms = new MemoryStream(); var xwSettings = new XmlWriterSettings { Encoding = new UTF8Encoding(false), Indent = true, IndentChars = "\t" }; using (var writer = XmlWriter.Create(ms, xwSettings)) { doc.Save(writer); ms.Seek(0, SeekOrigin.Begin); using (var sr = new StreamReader(ms)) { value = sr.ReadToEnd(); } } } catch { //squash exception } } sb.Append(value); sb.Append("\r\n\r\n"); } if (timing.Tags != null && timing.Tags.Any()) { sb.Append("tags:\r\n"); sb.Append(timing.Tags); sb.Append("\r\n"); } sb.Append("</textarea>"); sb.Append( "<a href=\"#close\" title=\"Close\" onclick=\"this.parentNode.parentNode.style.display='none'\">Close</a>"); sb.Append("</div>"); sb.Append("</aside>"); } } else { sb.Append("Specified result does not exist!"); } sb.Append("</body>"); await context.Response.WriteAsync(sb.ToString()); return; } try { await _next.Invoke(context); } catch (System.Exception) { // stop and save profiling results on error using (ProfilingSession.Current.Step("Stop on Error")) { } throw; } finally{ ProfilingSession.Stop(); } } #region Private Methods private void PrintTimings(ITimingSession session, Guid parentId, StringBuilder sb, double factor, string baseViewPath) { var timings = session.Timings.Where(s => s.ParentId == parentId); foreach (var timing in timings) { PrintTiming(session, timing, sb, factor, baseViewPath); } } private void PrintTiming(ITimingSession session, ITiming timing, StringBuilder sb, double factor, string baseViewPath) { sb.Append("<li><span class=\"timing\" style=\"padding-left: "); var start = Math.Floor(timing.StartMilliseconds*factor); if (start > 300) { start = 300; } sb.Append(start); sb.Append("px\"><span class=\"bar "); sb.Append(timing.Type); sb.Append("\" title=\""); sb.Append(WebUtility.HtmlEncode(timing.Name.Replace("\r\n", " "))); sb.Append("\" style=\"width: "); var width = (int)Math.Round(timing.DurationMilliseconds*factor); if (width > 300) { width = 300; } else if (width == 0) { width = 1; } sb.Append(width); sb.Append("px\"></span><span class=\"start\">+"); sb.Append(timing.StartMilliseconds); sb.Append("</span><span class=\"duration\">"); sb.Append(timing.DurationMilliseconds); sb.Append("</span></span>"); var hasChildTimings = session.Timings.Any(s => s.ParentId == timing.Id); if (hasChildTimings) { sb.Append("<input type=\"checkbox\" id=\"t_"); sb.Append(timing.Id.ToString()); sb.Append("\" checked=\"checked\" /><label for=\"t_"); sb.Append(timing.Id.ToString()); sb.Append("\">"); PrintDataLink(sb, timing); PrintDrillDownLink(sb, timing, baseViewPath); sb.Append(WebUtility.HtmlEncode(timing.Name.Replace("\r\n", " "))); sb.Append("</label>"); sb.Append("<ul>"); PrintTimings(session, timing.Id, sb, factor, baseViewPath); sb.Append("</ul>"); } else { sb.Append("<span class=\"leaf\">"); PrintDataLink(sb, timing); PrintDrillDownLink(sb, timing, baseViewPath); sb.Append(WebUtility.HtmlEncode(timing.Name.Replace("\r\n", " "))); sb.Append("</span>"); } sb.Append("</li>"); } private void PrintDataLink(StringBuilder sb, ITiming timing) { if (timing.Data == null || !timing.Data.Any()) return; sb.Append("[<a href=\"#data_"); sb.Append(timing.Id.ToString()); sb.Append("\" onclick=\"document.getElementById('data_"); sb.Append(timing.Id.ToString()); sb.Append("').style.display='block';\" class=\"openModal\">data</a>] "); } private void PrintDrillDownLink(StringBuilder sb, ITiming timing, string baseViewPath) { if (timing.Data == null || !timing.Data.ContainsKey("correlationId")) return; var correlationId = timing.Data["correlationId"]; Guid? drillDownSessionId = null; if (DrillDownHandler == null) { var drillDownSession = ProfilingSession.CircularBuffer.FirstOrDefault(s => s.Data != null && s.Data.ContainsKey("correlationId") && s.Data["correlationId"] == correlationId); if (drillDownSession != null) drillDownSessionId = drillDownSession.Id; } else { drillDownSessionId = DrillDownHandler(correlationId); } if (!drillDownSessionId.HasValue) return; sb.Append("[<a href=\""); sb.Append(baseViewPath); sb.Append("/"); sb.Append(drillDownSessionId); sb.Append("\">drill down</a>] "); } private void PrintDrillUpLink(StringBuilder sb, ITimingSession session, string baseViewPath) { if (session.Data == null || !session.Data.ContainsKey("correlationId")) return; var correlationId = session.Data["correlationId"]; Guid? drillUpSessionId = null; if (DrillUpHandler == null) { var drillUpSession = ProfilingSession.CircularBuffer.FirstOrDefault(s => s.Timings != null && s.Timings.Any(t => t.Data != null && t.Data.ContainsKey("correlationId") && t.Data["correlationId"] == correlationId)); if (drillUpSession != null) drillUpSessionId = drillUpSession.Id; } else { drillUpSessionId = DrillUpHandler(correlationId); } if (!drillUpSessionId.HasValue) return; sb.Append("[<a href=\""); sb.Append(baseViewPath); sb.Append("/"); sb.Append(drillUpSessionId); sb.Append("\">drill up</a>] "); } private static void ClearIfCurrentProfilingSessionStopped() { var profilingSession = ProfilingSession.Current; if (profilingSession == null) { return; } if (profilingSession.Profiler.IsStopped) { ProfilingSession.ProfilingSessionContainer.Clear(); } } private string GetCorrelationIdFromHeaders(HttpContext context) { if (context.Request.Headers.Keys.Contains(XCorrelationId)) { var correlationIds = context.Request.Headers.GetCommaSeparatedValues(XCorrelationId); if (correlationIds != null) { return correlationIds.FirstOrDefault(); } } return null; } private async Task ImportSessionsFromUrl(string importUrl) { IEnumerable<ITimingSession> sessions = null; using (var httpClient = new HttpClient()) { var response = await httpClient.GetAsync(importUrl); if (response.StatusCode == HttpStatusCode.OK) { var content = await response.Content.ReadAsStringAsync(); sessions = ImportSerializer.DeserializeSessions(content); } } if (sessions == null) { return; } if (ProfilingSession.CircularBuffer == null) { return; } var existingIds = ProfilingSession.CircularBuffer.Select(session => session.Id).ToList(); foreach (var session in sessions) { if (!existingIds.Contains(session.Id)) { ProfilingSession.CircularBuffer.Add(session); } } } #endregion } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Linq; using System.Reflection; using System.Collections.Generic; using Xunit; namespace System.Security.Cryptography.Encryption.Tests.Symmetric { public static class TrivialTests { [Fact] public static void TestKey() { using (Trivial s = new Trivial()) { Assert.Equal(0, s.KeySize); Assert.Throws<ArgumentNullException>(() => s.Key = null); { // Testing automatic generation of Key. Trivial t = new Trivial(); byte[] generatedKey = t.Key; Assert.Equal(generatedKey, Trivial.GeneratedKey); Assert.False(Object.ReferenceEquals(generatedKey, Trivial.GeneratedKey)); } // Testing KeySize and Key setter. int[] validKeySizes = { 40, 104, 152, 808, 816, 824, 832 }; for (int keySize = -10; keySize < 200 * 8; keySize++) { if (validKeySizes.Contains(keySize)) { s.KeySize = keySize; Assert.Equal(keySize, s.KeySize); } else { Assert.Throws<CryptographicException>(() => s.KeySize = keySize); } if (keySize >= 0) { int keySizeInBytes = keySize / 8; byte[] key = GenerateRandom(keySizeInBytes); if (validKeySizes.Contains(keySizeInBytes * 8)) { s.Key = key; byte[] copyOfKey = s.Key; Assert.Equal(key, copyOfKey); Assert.False(Object.ReferenceEquals(key, copyOfKey)); } else { Assert.Throws<CryptographicException>(() => s.Key = key); } } } // Test overflow try { byte[] hugeKey = new byte[536870917]; // value chosen so that when multiplied by 8 (bits) it overflows to the value 40 Assert.Throws<CryptographicException>(() => s.Key = hugeKey); } catch (OutOfMemoryException) { } // in case there isn't enough memory at test-time to allocate the large array } } [Fact] public static void TestIv() { using (Trivial s = new Trivial()) { Assert.Throws<ArgumentNullException>(() => s.IV = null); { // Testing automatic generation of Iv. Trivial t = new Trivial(); t.BlockSize = 5 * 8; byte[] generatedIv = t.IV; Assert.Equal(generatedIv, Trivial.GeneratedIV); Assert.False(Object.ReferenceEquals(generatedIv, Trivial.GeneratedIV)); } // Testing IV property setter { s.BlockSize = 5 * 8; { byte[] iv = GenerateRandom(5); s.IV = iv; byte[] copyOfIv = s.IV; Assert.Equal(iv, copyOfIv); Assert.False(Object.ReferenceEquals(iv, copyOfIv)); } { byte[] iv = GenerateRandom(6); Assert.Throws<CryptographicException>(() => s.IV = iv); } } } return; } [Fact] public static void TestBlockSize() { using (Trivial s = new Trivial()) { Assert.Equal(0, s.BlockSize); // Testing BlockSizeSetter. int[] validBlockSizes = { 40, 104, 152, 808, 816, 824, 832 }; for (int blockSize = -10; blockSize < 200 * 8; blockSize++) { if (validBlockSizes.Contains(blockSize)) { s.BlockSize = blockSize; Assert.Equal(blockSize, s.BlockSize); } else { Assert.Throws<CryptographicException>(() => s.BlockSize = blockSize); } } } return; } private static byte[] GenerateRandom(int size) { byte[] data = new byte[size]; Random r = new Random(); for (int i = 0; i < size; i++) { data[i] = unchecked((byte)(r.Next())); } return data; } private class Trivial : SymmetricAlgorithm { public Trivial() { // // Although the desktop CLR allows overriding the LegalKeySizes property, // the BlockSize setter does not invoke the overriding method when validating // the blockSize. Instead, it accesses the underlying field (LegalKeySizesValue) directly. // // We've since removed this field from the public surface area (and fixed the BlockSize property // to call LegalKeySizes rather than the underlying field.) To make this test also run on the desktop, however, // we will also set the LegalKeySizesValue field if present. // FieldInfo legalBlockSizesValue = typeof(SymmetricAlgorithm).GetTypeInfo().GetDeclaredField("LegalBlockSizesValue"); if (legalBlockSizesValue != null && legalBlockSizesValue.IsFamily) { legalBlockSizesValue.SetValue(this, LegalBlockSizes); } } public override ICryptoTransform CreateDecryptor(byte[] rgbKey, byte[] rgbIV) { throw new CreateDecryptorNotImplementedException(); } public override ICryptoTransform CreateEncryptor(byte[] rgbKey, byte[] rgbIV) { throw new CreateEncryptorNotImplementedException(); } public override void GenerateIV() { IV = GeneratedIV; } public override void GenerateKey() { Key = GeneratedKey; } public override KeySizes[] LegalBlockSizes { get { return new KeySizes[] { new KeySizes(5*8, -99*8, 0*8), new KeySizes(13*8, 22*8, 6*8), new KeySizes(101*8, 104*8, 1*8), }; } } public override KeySizes[] LegalKeySizes { get { return new KeySizes[] { new KeySizes(5*8, -99*8, 0*8), new KeySizes(13*8, 22*8, 6*8), new KeySizes(101*8, 104*8, 1*8), }; } } public static readonly byte[] GeneratedKey = GenerateRandom(13); public static readonly byte[] GeneratedIV = GenerateRandom(5); } private class GenerateIvNotImplementedException : Exception { } private class GenerateKeyNotImplementedException : Exception { } private class CreateDecryptorNotImplementedException : Exception { } private class CreateEncryptorNotImplementedException : Exception { } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Linq.Expressions; using Xunit; namespace System.Linq.Tests { public class AverageTests : EnumerableBasedTests { [Fact] public void NullNFloatSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<float?>)null).Average()); } [Fact] public void NullNFloatSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<float?>)null).Average(i => i)); } [Fact] public void NullNFloatFunc() { Expression<Func<float?, float?>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float?>().AsQueryable().Average(selector)); } [Fact] public void MultipleNullableFloatSource() { float?[] source = { 5.5f, 0, null, null, null, 15.5f, 40.5f, null, null, -23.5f }; float? expected = 7.6f; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void NullableFloatFromSelector() { var source = new [] { new { name = "Tim", num = (float?)5.5f }, new { name = "John", num = (float?)15.5f }, new { name = "Bob", num = default(float?) } }; float? expected = 10.5f; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void NullIntSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<int>)null).Average()); } [Fact] public void NullIntSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<int>)null).Average(i => i)); } [Fact] public void NullIntFunc() { Expression<Func <int, int>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int>().AsQueryable().Average(selector)); } [Fact] public void MultipleIntSouce() { int[] source = { 5, -10, 15, 40, 28 }; double expected = 15.6; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void MultipleIntFromSelector() { var source = new [] { new { name="Tim", num = 10 }, new { name="John", num = -10 }, new { name="Bob", num = 15 } }; double expected = 5; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void NullNIntSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<int?>)null).Average()); } [Fact] public void NullNIntSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<int?>)null).Average(i => i)); } [Fact] public void NullNIntFunc() { Expression<Func<int?, int?>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<int?>().AsQueryable().Average(selector)); } [Fact] public void MultipleNullableIntSource() { int?[] source = { 5, -10, null, null, null, 15, 40, 28, null, null }; double? expected = 15.6; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void NullableIntFromSelector() { var source = new [] { new { name = "Tim", num = (int?)10 }, new { name = "John", num = default(int?) }, new { name = "Bob", num = (int?)10 } }; double? expected = 10; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void NullLongSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<long>)null).Average()); } [Fact] public void NullLongSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<long>)null).Average(i => i)); } [Fact] public void NullLongFunc() { Expression<Func<long, long>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long>().AsQueryable().Average(selector)); } [Fact] public void MultipleLongValues() { long[] source = { 5, -10, 15, 40, 28 }; double expected = 15.6; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void MultipleLongFromSelector() { var source = new [] { new { name = "Tim", num = 40L }, new { name = "John", num = 50L }, new { name = "Bob", num = 60L } }; double expected = 50; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void NullNLongSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<long?>)null).Average()); } [Fact] public void NullNLongSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<long?>)null).Average(i => i)); } [Fact] public void NullNLongFunc() { Expression<Func<long?, long?>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<long?>().AsQueryable().Average(selector)); } [Fact] public void MultipleNullableLongSource() { long?[] source = { 5, -10, null, null, null, 15, 40, 28, null, null }; double? expected = 15.6; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void NullableLongFromSelector() { var source = new [] { new { name = "Tim", num = (long?)40L }, new { name = "John", num = default(long?) }, new { name = "Bob", num = (long?)30L } }; double? expected = 35; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void NullDoubleSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<double>)null).Average()); } [Fact] public void NullDoubleSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<double>)null).Average(i => i)); } [Fact] public void NullDoubleFunc() { Expression<Func<double, double>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double>().AsQueryable().Average(selector)); } [Fact] public void MultipleDoubleValues() { double[] source = { 5.5, -10, 15.5, 40.5, 28.5 }; double expected = 16; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void MultipleDoubleFromSelector() { var source = new [] { new { name = "Tim", num = 5.5}, new { name = "John", num = 15.5}, new { name = "Bob", num = 3.0} }; double expected = 8.0; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void NullNDoubleSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<double?>)null).Average()); } [Fact] public void NullNDoubleSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<double?>)null).Average(i => i)); } [Fact] public void NullNDoubleFunc() { Expression<Func<double?, double?>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<double?>().AsQueryable().Average(selector)); } [Fact] public void MultipleNullableDoubleSource() { double?[] source = { 5.5, 0, null, null, null, 15.5, 40.5, null, null, -23.5 }; double? expected = 7.6; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void NullableDoubleFromSelector() { var source = new[] { new{ name = "Tim", num = (double?)5.5 }, new{ name = "John", num = (double?)15.5 }, new{ name = "Bob", num = default(double?) } }; double? expected = 10.5; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void NullDecimalSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal>)null).Average()); } [Fact] public void NullDecimalSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal>)null).Average(i => i)); } [Fact] public void NullDecimalFunc() { Expression<Func<decimal, decimal>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal>().AsQueryable().Average(selector)); } [Fact] public void MultipleDecimalValues() { decimal[] source = { 5.5m, -10m, 15.5m, 40.5m, 28.5m }; decimal expected = 16m; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void MultipleDecimalFromSelector() { var source = new[] { new{ name = "Tim", num = 5.5m}, new{ name = "John", num = 15.5m}, new{ name = "Bob", num = 3.0m} }; decimal expected = 8.0m; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void NullNDecimalSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal?>)null).Average()); } [Fact] public void NullNDecimalSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<decimal?>)null).Average(i => i)); } [Fact] public void NullNDecimalFunc() { Expression<Func<decimal?, decimal?>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<decimal?>().AsQueryable().Average(selector)); } [Fact] public void MultipleNullableeDecimalSource() { decimal?[] source = { 5.5m, 0, null, null, null, 15.5m, 40.5m, null, null, -23.5m }; decimal? expected = 7.6m; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void NullableDecimalFromSelector() { var source = new[] { new{ name = "Tim", num = (decimal?)5.5m}, new{ name = "John", num = (decimal?)15.5m}, new{ name = "Bob", num = (decimal?)null} }; decimal? expected = 10.5m; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void NullFloatSource() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<float>)null).Average()); } [Fact] public void NullFloatSourceWithFunc() { Assert.Throws<ArgumentNullException>("source", () => ((IQueryable<float>)null).Average(i => i)); } [Fact] public void NullFloatFunc() { Expression<Func<float, float>> selector = null; Assert.Throws<ArgumentNullException>("selector", () => Enumerable.Empty<float>().AsQueryable().Average(selector)); } [Fact] public void MultipleFloatValues() { float[] source = { 5.5f, -10f, 15.5f, 40.5f, 28.5f }; float expected = 16f; Assert.Equal(expected, source.AsQueryable().Average()); } [Fact] public void MultipleFloatFromSelector() { var source = new[] { new{ name = "Tim", num = 5.5f}, new{ name = "John", num = 15.5f}, new{ name = "Bob", num = 3.0f} }; float expected = 8.0f; Assert.Equal(expected, source.AsQueryable().Average(e => e.num)); } [Fact] public void Average1() { var val = (new int[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((double)1, val); } [Fact] public void Average2() { var val = (new int?[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((double)1, val); } [Fact] public void Average3() { var val = (new long[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((double)1, val); } [Fact] public void Average4() { var val = (new long?[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((double)1, val); } [Fact] public void Average5() { var val = (new float[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((float)1, val); } [Fact] public void Average6() { var val = (new float?[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((float)1, val); } [Fact] public void Average7() { var val = (new double[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((double)1, val); } [Fact] public void Average8() { var val = (new double?[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((double)1, val); } [Fact] public void Average9() { var val = (new decimal[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((decimal)1, val); } [Fact] public void Average10() { var val = (new decimal?[] { 0, 2, 1 }).AsQueryable().Average(); Assert.Equal((decimal)1, val); } [Fact] public void Average11() { var val = (new int[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((double)1, val); } [Fact] public void Average12() { var val = (new int?[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((double)1, val); } [Fact] public void Average13() { var val = (new long[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((double)1, val); } [Fact] public void Average14() { var val = (new long?[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((double)1, val); } [Fact] public void Average15() { var val = (new float[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((float)1, val); } [Fact] public void Average16() { var val = (new float?[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((float)1, val); } [Fact] public void Average17() { var val = (new double[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((double)1, val); } [Fact] public void Average18() { var val = (new double?[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((double)1, val); } [Fact] public void Average19() { var val = (new decimal[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((decimal)1, val); } [Fact] public void Average20() { var val = (new decimal?[] { 0, 2, 1 }).AsQueryable().Average(n => n); Assert.Equal((decimal)1, val); } } }
// *********************************************************************** // Copyright (c) 2007 Charlie Poole // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System; using System.IO; using System.Collections; #if NET_2_0 using System.Collections.Generic; #endif namespace NUnit.Framework.Constraints { /// <summary> /// EqualConstraint is able to compare an actual value with the /// expected value provided in its constructor. Two objects are /// considered equal if both are null, or if both have the same /// value. NUnit has special semantics for some object types. /// </summary> public class EqualConstraint : Constraint { #region Static and Instance Fields private static IDictionary constraintHelpers = new Hashtable(); private readonly object expected; /// <summary> /// If true, strings in error messages will be clipped /// </summary> private bool clipStrings = true; /// <summary> /// NUnitEqualityComparer used to test equality. /// </summary> private NUnitEqualityComparer comparer = new NUnitEqualityComparer(); #region Message Strings private static readonly string StringsDiffer_1 = "String lengths are both {0}. Strings differ at index {1}."; private static readonly string StringsDiffer_2 = "Expected string length {0} but was {1}. Strings differ at index {2}."; private static readonly string StreamsDiffer_1 = "Stream lengths are both {0}. Streams differ at offset {1}."; private static readonly string StreamsDiffer_2 = "Expected Stream length {0} but was {1}.";// Streams differ at offset {2}."; private static readonly string CollectionType_1 = "Expected and actual are both {0}"; private static readonly string CollectionType_2 = "Expected is {0}, actual is {1}"; private static readonly string ValuesDiffer_1 = "Values differ at index {0}"; private static readonly string ValuesDiffer_2 = "Values differ at expected index {0}, actual index {1}"; #endregion #endregion #region Constructor /// <summary> /// Initializes a new instance of the <see cref="EqualConstraint"/> class. /// </summary> /// <param name="expected">The expected value.</param> public EqualConstraint(object expected) : base(expected) { this.expected = expected; } #endregion #region Constraint Modifiers /// <summary> /// Flag the constraint to ignore case and return self. /// </summary> public EqualConstraint IgnoreCase { get { comparer.IgnoreCase = true; return this; } } /// <summary> /// Flag the constraint to suppress string clipping /// and return self. /// </summary> public EqualConstraint NoClip { get { clipStrings = false; return this; } } /// <summary> /// Flag the constraint to compare arrays as collections /// and return self. /// </summary> public EqualConstraint AsCollection { get { comparer.CompareAsCollection = true; return this; } } /// <summary> /// Flag the constraint to use a tolerance when determining equality. /// </summary> /// <param name="amount">Tolerance value to be used</param> /// <returns>Self.</returns> public EqualConstraint Within(object amount) { if (!comparer.Tolerance.IsEmpty) throw new InvalidOperationException("Within modifier may appear only once in a constraint expression"); comparer.Tolerance = new Tolerance(amount); return this; } /// <summary> /// Switches the .Within() modifier to interpret its tolerance as /// a distance in representable values (see remarks). /// </summary> /// <returns>Self.</returns> /// <remarks> /// Ulp stands for "unit in the last place" and describes the minimum /// amount a given value can change. For any integers, an ulp is 1 whole /// digit. For floating point values, the accuracy of which is better /// for smaller numbers and worse for larger numbers, an ulp depends /// on the size of the number. Using ulps for comparison of floating /// point results instead of fixed tolerances is safer because it will /// automatically compensate for the added inaccuracy of larger numbers. /// </remarks> public EqualConstraint Ulps { get { comparer.Tolerance = comparer.Tolerance.Ulps; return this; } } /// <summary> /// Switches the .Within() modifier to interpret its tolerance as /// a percentage that the actual values is allowed to deviate from /// the expected value. /// </summary> /// <returns>Self</returns> public EqualConstraint Percent { get { comparer.Tolerance = comparer.Tolerance.Percent; return this; } } /// <summary> /// Causes the tolerance to be interpreted as a TimeSpan in days. /// </summary> /// <returns>Self</returns> public EqualConstraint Days { get { comparer.Tolerance = comparer.Tolerance.Days; return this; } } /// <summary> /// Causes the tolerance to be interpreted as a TimeSpan in hours. /// </summary> /// <returns>Self</returns> public EqualConstraint Hours { get { comparer.Tolerance = comparer.Tolerance.Hours; return this; } } /// <summary> /// Causes the tolerance to be interpreted as a TimeSpan in minutes. /// </summary> /// <returns>Self</returns> public EqualConstraint Minutes { get { comparer.Tolerance = comparer.Tolerance.Minutes; return this; } } /// <summary> /// Causes the tolerance to be interpreted as a TimeSpan in seconds. /// </summary> /// <returns>Self</returns> public EqualConstraint Seconds { get { comparer.Tolerance = comparer.Tolerance.Seconds; return this; } } /// <summary> /// Causes the tolerance to be interpreted as a TimeSpan in milliseconds. /// </summary> /// <returns>Self</returns> public EqualConstraint Milliseconds { get { comparer.Tolerance = comparer.Tolerance.Milliseconds; return this; } } /// <summary> /// Causes the tolerance to be interpreted as a TimeSpan in clock ticks. /// </summary> /// <returns>Self</returns> public EqualConstraint Ticks { get { comparer.Tolerance = comparer.Tolerance.Ticks; return this; } } /// <summary> /// Flag the constraint to use the supplied IComparer object. /// </summary> /// <param name="comparer">The IComparer object to use.</param> /// <returns>Self.</returns> [Obsolete("Replace with 'Using'")] public EqualConstraint Comparer(IComparer comparer) { return Using(comparer); } /// <summary> /// Flag the constraint to use the supplied IComparer object. /// </summary> /// <param name="comparer">The IComparer object to use.</param> /// <returns>Self.</returns> public EqualConstraint Using(IComparer comparer) { this.comparer.ExternalComparer = EqualityAdapter.For(comparer); return this; } #if NET_2_0 /// <summary> /// Flag the constraint to use the supplied IComparer object. /// </summary> /// <param name="comparer">The IComparer object to use.</param> /// <returns>Self.</returns> public EqualConstraint Using<T>(IComparer<T> comparer) { this.comparer.ExternalComparer = EqualityAdapter.For(comparer); return this; } /// <summary> /// Flag the constraint to use the supplied Comparison object. /// </summary> /// <param name="comparer">The IComparer object to use.</param> /// <returns>Self.</returns> public EqualConstraint Using<T>(Comparison<T> comparer) { this.comparer.ExternalComparer = EqualityAdapter.For(comparer); return this; } /// <summary> /// Flag the constraint to use the supplied IEqualityComparer object. /// </summary> /// <param name="comparer">The IComparer object to use.</param> /// <returns>Self.</returns> public EqualConstraint Using(IEqualityComparer comparer) { this.comparer.ExternalComparer = EqualityAdapter.For(comparer); return this; } /// <summary> /// Flag the constraint to use the supplied IEqualityComparer object. /// </summary> /// <param name="comparer">The IComparer object to use.</param> /// <returns>Self.</returns> public EqualConstraint Using<T>(IEqualityComparer<T> comparer) { this.comparer.ExternalComparer = EqualityAdapter.For(comparer); return this; } #endif #endregion #region Public Methods /// <summary> /// Test whether the constraint is satisfied by a given value /// </summary> /// <param name="actual">The value to be tested</param> /// <returns>True for success, false for failure</returns> public override bool Matches(object actual) { this.actual = actual; return comparer.ObjectsEqual( expected, actual ); } /// <summary> /// Write a failure message. Overridden to provide custom /// failure messages for EqualConstraint. /// </summary> /// <param name="writer">The MessageWriter to write to</param> public override void WriteMessageTo(MessageWriter writer) { DisplayDifferences(writer, expected, actual, 0); } /// <summary> /// Write description of this constraint /// </summary> /// <param name="writer">The MessageWriter to write to</param> public override void WriteDescriptionTo(MessageWriter writer) { writer.WriteExpectedValue( expected ); if (comparer.Tolerance != null && !comparer.Tolerance.IsEmpty) { writer.WriteConnector("+/-"); writer.WriteExpectedValue(comparer.Tolerance); } if (comparer.IgnoreCase) writer.WriteModifier("ignoring case"); } private void DisplayDifferences(MessageWriter writer, object expected, object actual, int depth) { if (expected is string && actual is string) DisplayStringDifferences(writer, (string)expected, (string)actual); else if (expected is ICollection && actual is ICollection) DisplayCollectionDifferences(writer, (ICollection)expected, (ICollection)actual, depth); else if (expected is Stream && actual is Stream) DisplayStreamDifferences(writer, (Stream)expected, (Stream)actual, depth); else if ( comparer.Tolerance != null ) writer.DisplayDifferences( expected, actual, comparer.Tolerance ); else writer.DisplayDifferences(expected, actual); } #endregion #region DisplayStringDifferences private void DisplayStringDifferences(MessageWriter writer, string expected, string actual) { int mismatch = MsgUtils.FindMismatchPosition(expected, actual, 0, comparer.IgnoreCase); if (expected.Length == actual.Length) writer.WriteMessageLine(StringsDiffer_1, expected.Length, mismatch); else writer.WriteMessageLine(StringsDiffer_2, expected.Length, actual.Length, mismatch); writer.DisplayStringDifferences(expected, actual, mismatch, comparer.IgnoreCase, clipStrings); } #endregion #region DisplayStreamDifferences private void DisplayStreamDifferences(MessageWriter writer, Stream expected, Stream actual, int depth) { if (expected.Length == actual.Length) { long offset = (long)comparer.FailurePoints[depth]; writer.WriteMessageLine(StreamsDiffer_1, expected.Length, offset); } else writer.WriteMessageLine(StreamsDiffer_2, expected.Length, actual.Length); } #endregion #region DisplayCollectionDifferences /// <summary> /// Display the failure information for two collections that did not match. /// </summary> /// <param name="writer">The MessageWriter on which to display</param> /// <param name="expected">The expected collection.</param> /// <param name="actual">The actual collection</param> /// <param name="depth">The depth of this failure in a set of nested collections</param> private void DisplayCollectionDifferences(MessageWriter writer, ICollection expected, ICollection actual, int depth) { int failurePoint = comparer.FailurePoints.Count > depth ? (int)comparer.FailurePoints[depth] : -1; DisplayCollectionTypesAndSizes(writer, expected, actual, depth); if (failurePoint >= 0) { DisplayFailurePoint(writer, expected, actual, failurePoint, depth); if (failurePoint < expected.Count && failurePoint < actual.Count) DisplayDifferences( writer, GetValueFromCollection(expected, failurePoint), GetValueFromCollection(actual, failurePoint), ++depth); else if (expected.Count < actual.Count) { writer.Write(" Extra: "); writer.WriteCollectionElements(actual, failurePoint, 3); } else { writer.Write(" Missing: "); writer.WriteCollectionElements(expected, failurePoint, 3); } } } /// <summary> /// Displays a single line showing the types and sizes of the expected /// and actual collections or arrays. If both are identical, the value is /// only shown once. /// </summary> /// <param name="writer">The MessageWriter on which to display</param> /// <param name="expected">The expected collection or array</param> /// <param name="actual">The actual collection or array</param> /// <param name="indent">The indentation level for the message line</param> private void DisplayCollectionTypesAndSizes(MessageWriter writer, ICollection expected, ICollection actual, int indent) { string sExpected = MsgUtils.GetTypeRepresentation(expected); //if (!(expected is Array)) // sExpected += string.Format(" with {0} elements", expected.Count); string sActual = MsgUtils.GetTypeRepresentation(actual); //if (!(actual is Array)) // sActual += string.Format(" with {0} elements", actual.Count); if (sExpected == sActual) writer.WriteMessageLine(indent, CollectionType_1, sExpected); else writer.WriteMessageLine(indent, CollectionType_2, sExpected, sActual); } /// <summary> /// Displays a single line showing the point in the expected and actual /// arrays at which the comparison failed. If the arrays have different /// structures or dimensions, both values are shown. /// </summary> /// <param name="writer">The MessageWriter on which to display</param> /// <param name="expected">The expected array</param> /// <param name="actual">The actual array</param> /// <param name="failurePoint">Index of the failure point in the underlying collections</param> /// <param name="indent">The indentation level for the message line</param> private void DisplayFailurePoint(MessageWriter writer, ICollection expected, ICollection actual, int failurePoint, int indent) { Array expectedArray = expected as Array; Array actualArray = actual as Array; int expectedRank = expectedArray != null ? expectedArray.Rank : 1; int actualRank = actualArray != null ? actualArray.Rank : 1; bool useOneIndex = expectedRank == actualRank; if (expectedArray != null && actualArray != null) for (int r = 1; r < expectedRank && useOneIndex; r++) if (expectedArray.GetLength(r) != actualArray.GetLength(r)) useOneIndex = false; int[] expectedIndices = MsgUtils.GetArrayIndicesFromCollectionIndex(expected, failurePoint); if (useOneIndex) { writer.WriteMessageLine(indent, ValuesDiffer_1, MsgUtils.GetArrayIndicesAsString(expectedIndices)); } else { int[] actualIndices = MsgUtils.GetArrayIndicesFromCollectionIndex(actual, failurePoint); writer.WriteMessageLine(indent, ValuesDiffer_2, MsgUtils.GetArrayIndicesAsString(expectedIndices), MsgUtils.GetArrayIndicesAsString(actualIndices)); } } private static object GetValueFromCollection(ICollection collection, int index) { Array array = collection as Array; if (array != null && array.Rank > 1) return array.GetValue(MsgUtils.GetArrayIndicesFromCollectionIndex(array, index)); if (collection is IList) return ((IList)collection)[index]; foreach (object obj in collection) if (--index < 0) return obj; return null; } #endregion } }
// Copyright 2011 Microsoft Corporation // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. namespace Microsoft.Data.OData.Atom { #region Namespaces using System; using System.Collections; using System.Collections.Generic; using System.Data.Services.Common; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using Microsoft.Data.Edm; using Microsoft.Data.OData.Metadata; using o = Microsoft.Data.OData; #endregion Namespaces /// <summary> /// Writer for the EPM syndication-only. Writes the EPM properties into ATOM metadata OM. /// </summary> internal sealed class EpmSyndicationWriter : EpmWriter { /// <summary>The EPM target tree to use.</summary> private readonly EpmTargetTree epmTargetTree; /// <summary>Atom entry metadata to write to.</summary> private readonly AtomEntryMetadata entryMetadata; /// <summary> /// Constructor. /// </summary> /// <param name="epmTargetTree">The EPM target tree to use.</param> /// <param name="atomOutputContext">The output context currently in use.</param> private EpmSyndicationWriter(EpmTargetTree epmTargetTree, ODataAtomOutputContext atomOutputContext) : base(atomOutputContext) { this.epmTargetTree = epmTargetTree; this.entryMetadata = new AtomEntryMetadata(); } /// <summary> /// Writes the syndication part of EPM for an entry into ATOM metadata OM. /// </summary> /// <param name="epmTargetTree">The EPM target tree to use.</param> /// <param name="epmValueCache">The entry properties value cache to use to access the properties.</param> /// <param name="type">The type of the entry.</param> /// <param name="atomOutputContext">The output context currently in use.</param> /// <returns>The ATOM metadata OM with the EPM values populated.</returns> internal static AtomEntryMetadata WriteEntryEpm( EpmTargetTree epmTargetTree, EntryPropertiesValueCache epmValueCache, IEdmEntityTypeReference type, ODataAtomOutputContext atomOutputContext) { DebugUtils.CheckNoExternalCallers(); Debug.Assert(epmTargetTree != null, "epmTargetTree != null"); Debug.Assert(epmValueCache != null, "epmValueCache != null"); Debug.Assert(type != null, "For any EPM to exist the metadata must be available."); EpmSyndicationWriter epmWriter = new EpmSyndicationWriter(epmTargetTree, atomOutputContext); return epmWriter.WriteEntryEpm(epmValueCache, type); } /// <summary> /// Creates a text ATOM value. /// </summary> /// <param name="textValue">The text value to use.</param> /// <param name="contentKind">The content kind of the value.</param> /// <returns>The Atom text value.</returns> private static AtomTextConstruct CreateAtomTextConstruct(string textValue, SyndicationTextContentKind contentKind) { AtomTextConstructKind kind; switch (contentKind) { case SyndicationTextContentKind.Plaintext: kind = AtomTextConstructKind.Text; break; case SyndicationTextContentKind.Html: kind = AtomTextConstructKind.Html; break; case SyndicationTextContentKind.Xhtml: kind = AtomTextConstructKind.Xhtml; break; default: throw new ODataException(o.Strings.General_InternalError(InternalErrorCodes.EpmSyndicationWriter_CreateAtomTextConstruct)); } return new AtomTextConstruct { Kind = kind, Text = textValue, }; } /// <summary> /// Given an object returns the corresponding DateTimeOffset value through conversions. /// </summary> /// <param name="propertyValue">Object containing property value.</param> /// <param name="targetProperty">The target syndication property for the mapping (used for exception messages).</param> /// <param name="writerBehavior">The current settings to control the behavior of the writer.</param> /// <returns>DateTimeOffset after conversion.</returns> [SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "writerBehavior", Justification = "Used in debug assert.")] private static DateTimeOffset CreateDateTimeValue(object propertyValue, SyndicationItemProperty targetProperty, ODataWriterBehavior writerBehavior) { Debug.Assert( writerBehavior.FormatBehaviorKind != ODataBehaviorKind.WcfDataServicesClient, "CreateDateTimeValue should not be used in WCF DS client mode."); if (propertyValue == null) { return DateTimeOffset.Now; } if (propertyValue is DateTimeOffset) { return (DateTimeOffset)propertyValue; } if (propertyValue is DateTime) { // DateTimeOffset takes care of DateTimes of Unspecified kind so we won't end up // with datetime without timezone info mapped to atom:updated or atom:published element. return new DateTimeOffset((DateTime)propertyValue); } string stringValue = propertyValue as string; if (stringValue != null) { DateTimeOffset date; if (!DateTimeOffset.TryParse(stringValue, out date)) { DateTime result; if (!DateTime.TryParse(stringValue, out result)) { throw new ODataException(o.Strings.EpmSyndicationWriter_DateTimePropertyCanNotBeConverted(targetProperty.ToString())); } return new DateTimeOffset(result); } return date; } try { return new DateTimeOffset(Convert.ToDateTime(propertyValue, CultureInfo.InvariantCulture)); } catch (Exception e) { if (!ExceptionUtils.IsCatchableExceptionType(e)) { throw; } throw new ODataException(o.Strings.EpmSyndicationWriter_DateTimePropertyCanNotBeConverted(targetProperty.ToString())); } } /// <summary> /// Given an object returns the corresponding string representation of the value. /// </summary> /// <param name="propertyValue">Object containing property value.</param> /// <param name="writerBehavior">The current settings to control the behavior of the writer.</param> /// <returns>String representation of the property value.</returns> [SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "writerBehavior", Justification = "Used in debug assert.")] private static string CreateDateTimeStringValue(object propertyValue, ODataWriterBehavior writerBehavior) { Debug.Assert( writerBehavior.FormatBehaviorKind == ODataBehaviorKind.WcfDataServicesClient, "CreateDateTimeStringValue should only be used in WCF DS client mode."); if (propertyValue == null) { propertyValue = DateTimeOffset.Now; } if (propertyValue is DateTime) { // DateTimeOffset takes care of DateTimes of Unspecified kind so we won't end up // with datetime without timezone info mapped to atom:updated or atom:published element. propertyValue = new DateTimeOffset((DateTime)propertyValue); } // For DateTimeOffset values we need to use the ATOM format when translating them // to strings since the value will be used in ATOM metadata. if (propertyValue is DateTimeOffset) { return ODataAtomConvert.ToAtomString((DateTimeOffset)propertyValue); } return EpmWriterUtils.GetPropertyValueAsText(propertyValue); } /// <summary> /// Writes the syndication part of EPM for an entry into ATOM metadata OM. /// </summary> /// <param name="epmValueCache">The entry properties value cache to use to access the properties.</param> /// <param name="entityType">The type of the entry.</param> /// <returns>The ATOM metadata OM with the EPM values populated.</returns> private AtomEntryMetadata WriteEntryEpm( EntryPropertiesValueCache epmValueCache, IEdmEntityTypeReference entityType) { // If there are no syndication mappings, just return null. EpmTargetPathSegment syndicationRootSegment = this.epmTargetTree.SyndicationRoot; Debug.Assert(syndicationRootSegment != null, "EPM Target tree must always have syndication root."); if (syndicationRootSegment.SubSegments.Count == 0) { return null; } foreach (EpmTargetPathSegment targetSegment in syndicationRootSegment.SubSegments) { if (targetSegment.HasContent) { EntityPropertyMappingInfo epmInfo = targetSegment.EpmInfo; Debug.Assert( epmInfo != null && epmInfo.Attribute != null, "If the segment has content it must have EpmInfo which in turn must have the EPM attribute"); object propertyValue = this.ReadEntryPropertyValue( epmInfo, epmValueCache, entityType); string textPropertyValue = EpmWriterUtils.GetPropertyValueAsText(propertyValue); switch (epmInfo.Attribute.TargetSyndicationItem) { case SyndicationItemProperty.Updated: if (this.WriterBehavior.FormatBehaviorKind == ODataBehaviorKind.WcfDataServicesClient) { this.entryMetadata.UpdatedString = EpmSyndicationWriter.CreateDateTimeStringValue(propertyValue, this.WriterBehavior); } else { this.entryMetadata.Updated = EpmSyndicationWriter.CreateDateTimeValue(propertyValue, SyndicationItemProperty.Updated, this.WriterBehavior); } break; case SyndicationItemProperty.Published: if (this.WriterBehavior.FormatBehaviorKind == ODataBehaviorKind.WcfDataServicesClient) { this.entryMetadata.PublishedString = EpmSyndicationWriter.CreateDateTimeStringValue(propertyValue, this.WriterBehavior); } else { this.entryMetadata.Published = EpmSyndicationWriter.CreateDateTimeValue(propertyValue, SyndicationItemProperty.Published, this.WriterBehavior); } break; case SyndicationItemProperty.Rights: this.entryMetadata.Rights = EpmSyndicationWriter.CreateAtomTextConstruct(textPropertyValue, epmInfo.Attribute.TargetTextContentKind); break; case SyndicationItemProperty.Summary: this.entryMetadata.Summary = EpmSyndicationWriter.CreateAtomTextConstruct(textPropertyValue, epmInfo.Attribute.TargetTextContentKind); break; case SyndicationItemProperty.Title: this.entryMetadata.Title = EpmSyndicationWriter.CreateAtomTextConstruct(textPropertyValue, epmInfo.Attribute.TargetTextContentKind); break; default: throw new ODataException(o.Strings.General_InternalError(InternalErrorCodes.EpmSyndicationWriter_WriteEntryEpm_ContentTarget)); } } else { this.WriteParentSegment(targetSegment, epmValueCache, entityType); } } return this.entryMetadata; } /// <summary> /// Writes a non-leaf segment which has sub segments. /// </summary> /// <param name="targetSegment">The segment being written</param> /// <param name="epmValueCache">EPM value cache to use to get property values, or a primitive value</param> /// <param name="typeReference">The type of the entry or collection item.</param> private void WriteParentSegment(EpmTargetPathSegment targetSegment, object epmValueCache, IEdmTypeReference typeReference) { Debug.Assert(targetSegment != null, "targetSegment != null"); if (targetSegment.SegmentName == AtomConstants.AtomAuthorElementName) { AtomPersonMetadata authorMetadata = this.WritePersonEpm(targetSegment, epmValueCache, typeReference); if (authorMetadata != null) { List<AtomPersonMetadata> authors = (List<AtomPersonMetadata>)this.entryMetadata.Authors; if (authors == null) { authors = new List<AtomPersonMetadata>(); this.entryMetadata.Authors = authors; } authors.Add(authorMetadata); } } else if (targetSegment.SegmentName == AtomConstants.AtomContributorElementName) { AtomPersonMetadata contributorMetadata = this.WritePersonEpm(targetSegment, epmValueCache, typeReference); if (contributorMetadata != null) { List<AtomPersonMetadata> contributors = (List<AtomPersonMetadata>)this.entryMetadata.Contributors; if (contributors == null) { contributors = new List<AtomPersonMetadata>(); this.entryMetadata.Contributors = contributors; } contributors.Add(contributorMetadata); } } else { // Unhandled EpmTargetPathSegment.SegmentName. throw new ODataException(o.Strings.General_InternalError(InternalErrorCodes.EpmSyndicationWriter_WriteParentSegment_TargetSegmentName)); } } /// <summary> /// Writes EPM value to a person construct (author or contributor). /// </summary> /// <param name="targetSegment">The target segment which points to either author or contributor element.</param> /// <param name="epmValueCache">EPM value cache to use to get property values, or a primitive value</param> /// <param name="typeReference">The type of the entry or collection item.</param> /// <returns>The person metadata or null if no person metadata should be written for this mapping.</returns> private AtomPersonMetadata WritePersonEpm(EpmTargetPathSegment targetSegment, object epmValueCache, IEdmTypeReference typeReference) { Debug.Assert(targetSegment != null, "targetSegment != null"); Debug.Assert( targetSegment.SegmentName == AtomConstants.AtomAuthorElementName || targetSegment.SegmentName == AtomConstants.AtomContributorElementName, "targetSegment must be author or contributor."); AtomPersonMetadata personMetadata = null; foreach (EpmTargetPathSegment subSegment in targetSegment.SubSegments) { Debug.Assert(subSegment.HasContent, "sub segment of author segment must have content, there are no subsegments which don't have content under author."); Debug.Assert( subSegment.EpmInfo != null && subSegment.EpmInfo.Attribute != null && subSegment.EpmInfo.Attribute.TargetSyndicationItem != SyndicationItemProperty.CustomProperty, "We should never find a subsegment without EPM attribute or for custom mapping when writing syndication person EPM."); string textPropertyValue = this.GetPropertyValueAsText(subSegment, epmValueCache, typeReference); if (textPropertyValue == null) { // In V2 we write the mapped properties always in-content when the value is null. continue; } // Initialize the person element only if we actually need to write something to it. Debug.Assert(subSegment.EpmInfo != null && subSegment.EpmInfo.Attribute != null, "The author subsegment must have EPM info and EPM attribute."); switch (subSegment.EpmInfo.Attribute.TargetSyndicationItem) { case SyndicationItemProperty.AuthorName: case SyndicationItemProperty.ContributorName: if (textPropertyValue != null) { if (personMetadata == null) { personMetadata = new AtomPersonMetadata(); } personMetadata.Name = textPropertyValue; } break; case SyndicationItemProperty.AuthorEmail: case SyndicationItemProperty.ContributorEmail: if (textPropertyValue != null && textPropertyValue.Length > 0) { if (personMetadata == null) { personMetadata = new AtomPersonMetadata(); } personMetadata.Email = textPropertyValue; } break; case SyndicationItemProperty.AuthorUri: case SyndicationItemProperty.ContributorUri: if (textPropertyValue != null && textPropertyValue.Length > 0) { if (personMetadata == null) { personMetadata = new AtomPersonMetadata(); } personMetadata.UriFromEpm = textPropertyValue; } break; default: throw new ODataException(o.Strings.General_InternalError(InternalErrorCodes.EpmSyndicationWriter_WritePersonEpm)); } } return personMetadata; } /// <summary> /// Given a target segment the method returns the text value of the property mapped to that segment to be used in EPM. /// </summary> /// <param name="targetSegment">The target segment to read the value for.</param> /// <param name="epmValueCache">EPM value cache to use to get property values, or a primitive value</param> /// <param name="typeReference">The type of the entry or collection item.</param> /// <returns>The test representation of the value, or the method throws if the text representation was not possible to obtain.</returns> private string GetPropertyValueAsText( EpmTargetPathSegment targetSegment, object epmValueCache, IEdmTypeReference typeReference) { Debug.Assert(targetSegment != null, "targetSegment != null"); Debug.Assert(targetSegment.HasContent, "The target segment to read property for must have content."); Debug.Assert(targetSegment.EpmInfo != null, "The EPM info must be available on the target segment to read its property."); Debug.Assert(epmValueCache != null, "epmValueCache != null"); Debug.Assert(typeReference != null, "typeReference != null"); object propertyValue; EntryPropertiesValueCache entryPropertiesValueCache = epmValueCache as EntryPropertiesValueCache; if (entryPropertiesValueCache != null) { propertyValue = this.ReadEntryPropertyValue( targetSegment.EpmInfo, entryPropertiesValueCache, typeReference.AsEntity()); } else { propertyValue = epmValueCache; ValidationUtils.ValidateIsExpectedPrimitiveType(propertyValue, typeReference); } return EpmWriterUtils.GetPropertyValueAsText(propertyValue); } } }
using GitTools.Testing; using GitVersion.Core.Tests.Helpers; using GitVersion.Extensions; using GitVersion.Model.Configuration; using GitVersion.VersionCalculation; using LibGit2Sharp; using NUnit.Framework; namespace GitVersion.Core.Tests.IntegrationTests; [TestFixture] public class HotfixBranchScenarios : TestBase { [Test] // This test actually validates #465 as well public void PatchLatestReleaseExample() { using var fixture = new BaseGitFlowRepositoryFixture("1.2.0"); // create hotfix Commands.Checkout(fixture.Repository, MainBranch); Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("hotfix-1.2.1")); fixture.Repository.MakeACommit(); fixture.AssertFullSemver("1.2.1-beta.1+1"); fixture.Repository.MakeACommit(); fixture.AssertFullSemver("1.2.1-beta.1+2"); fixture.Repository.ApplyTag("1.2.1-beta.1"); fixture.AssertFullSemver("1.2.1-beta.1"); fixture.Repository.MakeACommit(); fixture.AssertFullSemver("1.2.1-beta.2+3"); // Merge hotfix branch to main Commands.Checkout(fixture.Repository, MainBranch); fixture.Repository.MergeNoFF("hotfix-1.2.1", Generate.SignatureNow()); fixture.AssertFullSemver("1.2.1+4"); fixture.Repository.ApplyTag("1.2.1"); fixture.AssertFullSemver("1.2.1"); // Verify develop version Commands.Checkout(fixture.Repository, "develop"); fixture.AssertFullSemver("1.3.0-alpha.1"); fixture.Repository.MergeNoFF("hotfix-1.2.1", Generate.SignatureNow()); fixture.AssertFullSemver("1.3.0-alpha.5"); } [Test] public void CanTakeVersionFromHotfixesBranch() { using var fixture = new BaseGitFlowRepositoryFixture(r => { r.MakeATaggedCommit("1.0.0"); r.MakeATaggedCommit("1.1.0"); r.MakeATaggedCommit("2.0.0"); }); // Merge hotfix branch to support Commands.Checkout(fixture.Repository, MainBranch); Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("support-1.1", (Commit)fixture.Repository.Tags.Single(t => t.FriendlyName == "1.1.0").Target)); fixture.AssertFullSemver("1.1.0"); // create hotfix branch Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("hotfixes/1.1.1")); fixture.AssertFullSemver("1.1.0"); // We are still on a tagged commit fixture.Repository.MakeACommit(); fixture.AssertFullSemver("1.1.1-beta.1+1"); fixture.Repository.MakeACommit(); fixture.AssertFullSemver("1.1.1-beta.1+2"); } [Test] public void PatchOlderReleaseExample() { using var fixture = new BaseGitFlowRepositoryFixture(r => { r.MakeATaggedCommit("1.0.0"); r.MakeATaggedCommit("1.1.0"); r.MakeATaggedCommit("2.0.0"); }); // Merge hotfix branch to support Commands.Checkout(fixture.Repository, MainBranch); var tag = fixture.Repository.Tags.Single(t => t.FriendlyName == "1.1.0"); var supportBranch = fixture.Repository.CreateBranch("support-1.1", (Commit)tag.Target); Commands.Checkout(fixture.Repository, supportBranch); fixture.AssertFullSemver("1.1.0"); // create hotfix branch Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("hotfix-1.1.1")); fixture.AssertFullSemver("1.1.0"); // We are still on a tagged commit fixture.Repository.MakeACommit(); fixture.AssertFullSemver("1.1.1-beta.1+1"); fixture.Repository.MakeACommit(); fixture.AssertFullSemver("1.1.1-beta.1+2"); // Create feature branch off hotfix branch and complete Commands.Checkout(fixture.Repository, fixture.Repository.CreateBranch("feature/fix")); fixture.AssertFullSemver("1.1.1-fix.1+2"); fixture.Repository.MakeACommit(); fixture.AssertFullSemver("1.1.1-fix.1+3"); fixture.Repository.CreatePullRequestRef("feature/fix", "hotfix-1.1.1", normalise: true, prNumber: 8); fixture.AssertFullSemver("1.1.1-PullRequest0008.4"); Commands.Checkout(fixture.Repository, "hotfix-1.1.1"); fixture.Repository.MergeNoFF("feature/fix", Generate.SignatureNow()); fixture.AssertFullSemver("1.1.1-beta.1+4"); // Merge hotfix into support branch to complete hotfix Commands.Checkout(fixture.Repository, "support-1.1"); fixture.Repository.MergeNoFF("hotfix-1.1.1", Generate.SignatureNow()); fixture.AssertFullSemver("1.1.1+5"); fixture.Repository.ApplyTag("1.1.1"); fixture.AssertFullSemver("1.1.1"); // Verify develop version Commands.Checkout(fixture.Repository, "develop"); fixture.AssertFullSemver("2.1.0-alpha.1"); fixture.Repository.MergeNoFF("support-1.1", Generate.SignatureNow()); fixture.AssertFullSemver("2.1.0-alpha.7"); } /// <summary> /// Create a feature branch from a hotfix branch, and merge back, then delete it /// </summary> [Test] public void FeatureOnHotfixFeatureBranchDeleted() { var config = new Config { AssemblyVersioningScheme = AssemblyVersioningScheme.MajorMinorPatchTag, VersioningMode = VersioningMode.ContinuousDeployment }; using var fixture = new EmptyRepositoryFixture(); const string release450 = "release/4.5.0"; const string hotfix451 = "hotfix/4.5.1"; const string support45 = "support/4.5"; const string tag450 = "4.5.0"; const string featureBranch = "feature/some-bug-fix"; fixture.Repository.MakeACommit("initial"); fixture.Repository.CreateBranch("develop"); Commands.Checkout(fixture.Repository, "develop"); // create release branch fixture.Repository.CreateBranch(release450); Commands.Checkout(fixture.Repository, release450); fixture.AssertFullSemver("4.5.0-beta.0", config); fixture.Repository.MakeACommit("blabla"); Commands.Checkout(fixture.Repository, "develop"); fixture.Repository.MergeNoFF(release450, Generate.SignatureNow()); Commands.Checkout(fixture.Repository, MainBranch); fixture.Repository.MergeNoFF(release450, Generate.SignatureNow()); // create support branch fixture.Repository.CreateBranch(support45); Commands.Checkout(fixture.Repository, support45); fixture.Repository.ApplyTag(tag450); fixture.AssertFullSemver("4.5.0", config); // create hotfix branch fixture.Repository.CreateBranch(hotfix451); Commands.Checkout(fixture.Repository, hotfix451); // feature branch from hotfix fixture.Repository.CreateBranch(featureBranch); Commands.Checkout(fixture.Repository, featureBranch); fixture.Repository.MakeACommit("blabla"); // commit 1 Commands.Checkout(fixture.Repository, hotfix451); fixture.Repository.MergeNoFF(featureBranch, Generate.SignatureNow()); // commit 2 fixture.Repository.Branches.Remove(featureBranch); fixture.AssertFullSemver("4.5.1-beta.2", config); } /// <summary> /// Create a feature branch from a hotfix branch, and merge back, but don't delete it /// </summary> [Test] public void FeatureOnHotfixFeatureBranchNotDeleted() { var config = new Config { AssemblyVersioningScheme = AssemblyVersioningScheme.MajorMinorPatchTag, VersioningMode = VersioningMode.ContinuousDeployment }; using var fixture = new EmptyRepositoryFixture(); const string release450 = "release/4.5.0"; const string hotfix451 = "hotfix/4.5.1"; const string support45 = "support/4.5"; const string tag450 = "4.5.0"; const string featureBranch = "feature/some-bug-fix"; fixture.Repository.MakeACommit("initial"); fixture.Repository.CreateBranch("develop"); Commands.Checkout(fixture.Repository, "develop"); // create release branch fixture.Repository.CreateBranch(release450); Commands.Checkout(fixture.Repository, release450); fixture.AssertFullSemver("4.5.0-beta.0", config); fixture.Repository.MakeACommit("blabla"); Commands.Checkout(fixture.Repository, "develop"); fixture.Repository.MergeNoFF(release450, Generate.SignatureNow()); Commands.Checkout(fixture.Repository, MainBranch); fixture.Repository.MergeNoFF(release450, Generate.SignatureNow()); // create support branch fixture.Repository.CreateBranch(support45); Commands.Checkout(fixture.Repository, support45); fixture.Repository.ApplyTag(tag450); fixture.AssertFullSemver("4.5.0", config); // create hotfix branch fixture.Repository.CreateBranch(hotfix451); Commands.Checkout(fixture.Repository, hotfix451); // feature branch from hotfix fixture.Repository.CreateBranch(featureBranch); Commands.Checkout(fixture.Repository, featureBranch); fixture.Repository.MakeACommit("blabla"); // commit 1 Commands.Checkout(fixture.Repository, hotfix451); fixture.Repository.MergeNoFF(featureBranch, Generate.SignatureNow()); // commit 2 fixture.AssertFullSemver("4.5.1-beta.2", config); } }
using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Linq.Expressions; using System.Runtime.Versioning; using NuGet.Resources; using NuGet.V3Interop; namespace NuGet { public static class PackageRepositoryExtensions { public static IPackage FindPackage( this IPackageRepository repository, string packageId, SemanticVersion version, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool allowUnlisted) { if (repository == null) { throw new ArgumentNullException("repository"); } if (packageId == null) { throw new ArgumentNullException("packageId"); } // if an explicit version is specified, disregard the 'allowUnlisted' argument // and always allow unlisted packages. if (version != null) { allowUnlisted = true; } else if (!allowUnlisted && (constraintProvider == null || constraintProvider == NullConstraintProvider.Instance)) { var packageLatestLookup = repository as ILatestPackageLookup; if (packageLatestLookup != null) { IPackage package; if (packageLatestLookup.TryFindLatestPackageById(packageId, allowPrereleaseVersions, out package)) { return package; } } } // If the repository implements it's own lookup then use that instead. // This is an optimization that we use so we don't have to enumerate packages for // sources that don't need to. var packageLookup = repository as IPackageLookup; if (packageLookup != null && version != null) { return packageLookup.FindPackage(packageId, version); } IEnumerable<IPackage> packages = repository.FindPackagesById(packageId); packages = packages.ToList() .OrderByDescending(p => p.Version); if (!allowUnlisted) { packages = packages.Where(PackageExtensions.IsListed); } if (version != null) { packages = packages.Where(p => p.Version == version); } else if (constraintProvider != null) { packages = DependencyResolveUtility.FilterPackagesByConstraints(constraintProvider, packages, packageId, allowPrereleaseVersions); } return packages.FirstOrDefault(); } public static IDisposable StartOperation(this IPackageRepository self, string operation, string mainPackageId, string mainPackageVersion) { var packages = repository.FindPackages(packageId, versionSpec, allowPrereleaseVersions, allowUnlisted); if (constraintProvider != null) { packages = DependencyResolveUtility.FilterPackagesByConstraints(constraintProvider, packages, packageId, allowPrereleaseVersions); } IOperationAwareRepository repo = self as IOperationAwareRepository; if (repo != null) { return repo.StartOperation(operation, mainPackageId, mainPackageVersion); } return DisposableAction.NoOp; } public static bool Exists(this IPackageRepository repository, IPackageName package) { return repository.Exists(package.Id, package.Version); } public static bool Exists(this IPackageRepository repository, string packageId) { return Exists(repository, packageId, version: null); } public static bool Exists(this IPackageRepository repository, string packageId, SemanticVersion version) { IPackageLookup packageLookup = repository as IPackageLookup; if ((packageLookup != null) && !String.IsNullOrEmpty(packageId) && (version != null)) { return packageLookup.Exists(packageId, version); } return repository.FindPackage(packageId, version) != null; } public static bool TryFindPackage(this IPackageRepository repository, string packageId, SemanticVersion version, out IPackage package) { package = repository.FindPackage(packageId, version); return package != null; } public static IPackage FindPackage(this IPackageRepository repository, string packageId) { return repository.FindPackage(packageId, version: null); } public static IPackage FindPackage(this IPackageRepository repository, string packageId, SemanticVersion version) { // Default allow pre release versions to true here because the caller typically wants to find all packages in this scenario for e.g when checking if a // a package is already installed in the local repository. The same applies to allowUnlisted. return FindPackage(repository, packageId, version, NullConstraintProvider.Instance, allowPrereleaseVersions: true, allowUnlisted: true); } public static IPackage FindPackage(this IPackageRepository repository, string packageId, SemanticVersion version, bool allowPrereleaseVersions, bool allowUnlisted) { return FindPackage(repository, packageId, version, NullConstraintProvider.Instance, allowPrereleaseVersions, allowUnlisted); } public static IPackage FindPackage(this IPackageRepository repository, string packageId, IVersionSpec versionSpec, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool allowUnlisted) { var packages = repository.FindPackages(packageId, versionSpec, allowPrereleaseVersions, allowUnlisted); if (constraintProvider != null) { packages = FilterPackagesByConstraints(constraintProvider, packages, packageId, allowPrereleaseVersions); } return packages.FirstOrDefault(); } public static IEnumerable<IPackage> FindPackages(this IPackageRepository repository, IEnumerable<string> packageIds) { if (packageIds == null) { throw new ArgumentNullException("packageIds"); } // If we're in V3-land, find packages using that API var v3Repo = repository as IV3InteropRepository; if (v3Repo != null) { return packageIds.SelectMany(id => v3Repo.FindPackagesById(id)).ToList(); } else { return FindPackages(repository, packageIds, GetFilterExpression); } } public static IEnumerable<IPackage> FindPackagesById(this IPackageRepository repository, string packageId) { var directRepo = repository as IV3InteropRepository; if (directRepo != null) { return directRepo.FindPackagesById(packageId); } var serviceBasedRepository = repository as IPackageLookup; if (serviceBasedRepository != null) { return serviceBasedRepository.FindPackagesById(packageId).ToList(); } else { return FindPackagesByIdCore(repository, packageId); } } internal static IEnumerable<IPackage> FindPackagesByIdCore(IPackageRepository repository, string packageId) { var cultureRepository = repository as ICultureAwareRepository; if (cultureRepository != null) { packageId = packageId.ToLower(cultureRepository.Culture); } else { packageId = packageId.ToLower(CultureInfo.CurrentCulture); } return (from p in repository.GetPackages() where p.Id.ToLower() == packageId orderby p.Id select p).ToList(); } /// <summary> /// Since Odata dies when our query for updates is too big. We query for updates 10 packages at a time /// and return the full list of packages. /// </summary> private static IEnumerable<IPackage> FindPackages<T>( this IPackageRepository repository, IEnumerable<T> items, Func<IEnumerable<T>, Expression<Func<IPackage, bool>>> filterSelector) { const int batchSize = 10; while (items.Any()) { IEnumerable<T> currentItems = items.Take(batchSize); Expression<Func<IPackage, bool>> filterExpression = filterSelector(currentItems); var query = repository.GetPackages() .Where(filterExpression) .OrderBy(p => p.Id); foreach (var package in query) { yield return package; } items = items.Skip(batchSize); } } public static IEnumerable<IPackage> FindPackages( this IPackageRepository repository, string packageId, IVersionSpec versionSpec, bool allowPrereleaseVersions, bool allowUnlisted) { if (repository == null) { throw new ArgumentNullException("repository"); } if (packageId == null) { throw new ArgumentNullException("packageId"); } IEnumerable<IPackage> packages = repository.FindPackagesById(packageId) .OrderByDescending(p => p.Version); if (!allowUnlisted) { packages = packages.Where(PackageExtensions.IsListed); } if (versionSpec != null) { packages = packages.FindByVersion(versionSpec); } packages = FilterPackagesByConstraints(NullConstraintProvider.Instance, packages, packageId, allowPrereleaseVersions); return packages; } public static IPackage FindPackage( this IPackageRepository repository, string packageId, IVersionSpec versionSpec, bool allowPrereleaseVersions, bool allowUnlisted) { return repository.FindPackages(packageId, versionSpec, allowPrereleaseVersions, allowUnlisted).FirstOrDefault(); } public static IEnumerable<IPackage> FindCompatiblePackages(this IPackageRepository repository, IPackageConstraintProvider constraintProvider, IEnumerable<string> packageIds, IPackage package, FrameworkName targetFramework, bool allowPrereleaseVersions) { return (from p in repository.FindPackages(packageIds) where allowPrereleaseVersions || p.IsReleaseVersion() let dependency = p.FindDependency(package.Id, targetFramework) let otherConstaint = constraintProvider.GetConstraint(p.Id) where dependency != null && dependency.VersionSpec.Satisfies(package.Version) && (otherConstaint == null || otherConstaint.Satisfies(package.Version)) select p); } public static PackageDependency FindDependency(this IPackageMetadata package, string packageId, FrameworkName targetFramework) { return (from dependency in package.GetCompatiblePackageDependencies(targetFramework) where dependency.Id.Equals(packageId, StringComparison.OrdinalIgnoreCase) select dependency).FirstOrDefault(); } public static IQueryable<IPackage> Search(this IPackageRepository repository, string searchTerm, bool allowPrereleaseVersions) { return Search(repository, searchTerm, targetFrameworks: Enumerable.Empty<string>(), allowPrereleaseVersions: allowPrereleaseVersions); } public static IQueryable<IPackage> Search(this IPackageRepository repository, string searchTerm, IEnumerable<string> targetFrameworks, bool allowPrereleaseVersions, bool includeDelisted = false) { if (targetFrameworks == null) { throw new ArgumentNullException("targetFrameworks"); } var serviceBasedRepository = repository as IServiceBasedRepository; if (serviceBasedRepository != null) { return serviceBasedRepository.Search(searchTerm, targetFrameworks, allowPrereleaseVersions, includeDelisted); } // Ignore the target framework if the repository doesn't support searching var result = repository .GetPackages() .Find(searchTerm) .FilterByPrerelease(allowPrereleaseVersions); if (includeDelisted == false) { result = result.Where(p => p.IsListed()); } return result.AsQueryable(); } public static IPackage ResolveDependency(this IPackageRepository repository, PackageDependency dependency, bool allowPrereleaseVersions, bool preferListedPackages) { return ResolveDependency(repository, dependency, constraintProvider: null, allowPrereleaseVersions: allowPrereleaseVersions, preferListedPackages: preferListedPackages, dependencyVersion: DependencyVersion.Lowest); } public static IPackage ResolveDependency(this IPackageRepository repository, PackageDependency dependency, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool preferListedPackages) { return ResolveDependency(repository, dependency, constraintProvider, allowPrereleaseVersions, preferListedPackages, dependencyVersion: DependencyVersion.Lowest); } public static IPackage ResolveDependency(this IPackageRepository repository, PackageDependency dependency, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool preferListedPackages, DependencyVersion dependencyVersion) { IDependencyResolver dependencyResolver = repository as IDependencyResolver; if (dependencyResolver != null) { return dependencyResolver.ResolveDependency(dependency, constraintProvider, allowPrereleaseVersions, preferListedPackages, dependencyVersion); } return ResolveDependencyCore(repository, dependency, constraintProvider, allowPrereleaseVersions, preferListedPackages, dependencyVersion); } internal static IPackage ResolveDependencyCore( this IPackageRepository repository, PackageDependency dependency, IPackageConstraintProvider constraintProvider, bool allowPrereleaseVersions, bool preferListedPackages, DependencyVersion dependencyVersion) { if (repository == null) { throw new ArgumentNullException("repository"); } if (dependency == null) { throw new ArgumentNullException("dependency"); } IEnumerable<IPackage> packages = repository.FindPackagesById(dependency.Id).ToList(); // Always filter by constraints when looking for dependencies packages = FilterPackagesByConstraints(constraintProvider, packages, dependency.Id, allowPrereleaseVersions); IList<IPackage> candidates = packages.ToList(); if (preferListedPackages) { // pick among Listed packages first IPackage listedSelectedPackage = ResolveDependencyCore( candidates.Where(PackageExtensions.IsListed), dependency, dependencyVersion); if (listedSelectedPackage != null) { return listedSelectedPackage; } } return ResolveDependencyCore(candidates, dependency, dependencyVersion); } /// <summary> /// From the list of packages <paramref name="packages"/>, selects the package that best /// matches the <paramref name="dependency"/>. /// </summary> /// <param name="packages">The list of packages.</param> /// <param name="dependency">The dependency used to select package from the list.</param> /// <param name="dependencyVersion">Indicates the method used to select dependency. /// Applicable only when dependency.VersionSpec is not null.</param> /// <returns>The selected package.</returns> private static IPackage ResolveDependencyCore( IEnumerable<IPackage> packages, PackageDependency dependency, DependencyVersion dependencyVersion) { // If version info was specified then use it if (dependency.VersionSpec != null) { packages = packages.FindByVersion(dependency.VersionSpec).OrderBy(p => p.Version); return packages.SelectDependency(dependencyVersion); } else { // BUG 840: If no version info was specified then pick the latest return packages.OrderByDescending(p => p.Version) .FirstOrDefault(); } } /// <summary> /// Returns updates for packages from the repository /// </summary> /// <param name="repository">The repository to search for updates</param> /// <param name="packages">Packages to look for updates</param> /// <param name="includePrerelease">Indicates whether to consider prerelease updates.</param> /// <param name="includeAllVersions">Indicates whether to include all versions of an update as opposed to only including the latest version.</param> public static IEnumerable<IPackage> GetUpdates( this IPackageRepository repository, IEnumerable<IPackageName> packages, bool includePrerelease, bool includeAllVersions, IEnumerable<FrameworkName> targetFrameworks = null, IEnumerable<IVersionSpec> versionConstraints = null) { if (packages.IsEmpty()) { return Enumerable.Empty<IPackage>(); } var serviceBasedRepository = repository as IServiceBasedRepository; return serviceBasedRepository != null ? serviceBasedRepository.GetUpdates(packages, includePrerelease, includeAllVersions, targetFrameworks, versionConstraints) : repository.GetUpdatesCore(packages, includePrerelease, includeAllVersions, targetFrameworks, versionConstraints); } public static IEnumerable<IPackage> GetUpdatesCore( this IPackageRepository repository, IEnumerable<IPackageName> packages, bool includePrerelease, bool includeAllVersions, IEnumerable<FrameworkName> targetFramework, IEnumerable<IVersionSpec> versionConstraints) { List<IPackageName> packageList = packages.ToList(); if (!packageList.Any()) { return Enumerable.Empty<IPackage>(); } IList<IVersionSpec> versionConstraintList; if (versionConstraints == null) { versionConstraintList = new IVersionSpec[packageList.Count]; } else { versionConstraintList = versionConstraints.ToList(); } if (packageList.Count != versionConstraintList.Count) { throw new ArgumentException(NuGetResources.GetUpdatesParameterMismatch); } // These are the packages that we need to look at for potential updates. ILookup<string, IPackage> sourcePackages = GetUpdateCandidates(repository, packageList, includePrerelease) .ToList() .ToLookup(package => package.Id, StringComparer.OrdinalIgnoreCase); var results = new List<IPackage>(); for (int i = 0; i < packageList.Count; i++) { var package = packageList[i]; var constraint = versionConstraintList[i]; var updates = from candidate in sourcePackages[package.Id] where (candidate.Version > package.Version) && SupportsTargetFrameworks(targetFramework, candidate) && (constraint == null || constraint.Satisfies(candidate.Version)) select candidate; results.AddRange(updates); } if (!includeAllVersions) { return results.CollapseById(); } return results; } private static bool SupportsTargetFrameworks(IEnumerable<FrameworkName> targetFramework, IPackage package) { return targetFramework.IsEmpty() || targetFramework.Any(t => VersionUtility.IsCompatible(t, package.GetSupportedFrameworks())); } public static IPackageRepository Clone(this IPackageRepository repository) { var cloneableRepository = repository as ICloneableRepository; if (cloneableRepository != null) { return cloneableRepository.Clone(); } return repository; } /// <summary> /// Since odata dies when our query for updates is too big. We query for updates 10 packages at a time /// and return the full list of candidates for updates. /// </summary> private static IEnumerable<IPackage> GetUpdateCandidates( IPackageRepository repository, IEnumerable<IPackageName> packages, bool includePrerelease) { var query = FindPackages(repository, packages, GetFilterExpression); if (!includePrerelease) { query = query.Where(p => p.IsReleaseVersion()); } // for updates, we never consider unlisted packages query = query.Where(PackageExtensions.IsListed); return query; } /// <summary> /// For the list of input packages generate an expression like: /// p => p.Id == 'package1id' or p.Id == 'package2id' or p.Id == 'package3id'... up to package n /// </summary> private static Expression<Func<IPackage, bool>> GetFilterExpression(IEnumerable<IPackageName> packages) { return GetFilterExpression(packages.Select(p => p.Id)); } [SuppressMessage("Microsoft.Globalization", "CA1304:SpecifyCultureInfo", MessageId = "System.String.ToLower", Justification = "This is for a linq query")] private static Expression<Func<IPackage, bool>> GetFilterExpression(IEnumerable<string> ids) { ParameterExpression parameterExpression = Expression.Parameter(typeof(IPackageName)); Expression expressionBody = ids.Select(id => GetCompareExpression(parameterExpression, id.ToLower())) .Aggregate(Expression.OrElse); return Expression.Lambda<Func<IPackage, bool>>(expressionBody, parameterExpression); } /// <summary> /// Builds the expression: package.Id.ToLower() == "somepackageid" /// </summary> private static Expression GetCompareExpression(Expression parameterExpression, object value) { // package.Id Expression propertyExpression = Expression.Property(parameterExpression, "Id"); // .ToLower() Expression toLowerExpression = Expression.Call(propertyExpression, typeof(string).GetMethod("ToLower", Type.EmptyTypes)); // == localPackage.Id return Expression.Equal(toLowerExpression, Expression.Constant(value)); } private static IEnumerable<IPackage> FilterPackagesByConstraints( IPackageConstraintProvider constraintProvider, IEnumerable<IPackage> packages, string packageId, bool allowPrereleaseVersions) { constraintProvider = constraintProvider ?? NullConstraintProvider.Instance; // Filter packages by this constraint IVersionSpec constraint = constraintProvider.GetConstraint(packageId); if (constraint != null) { packages = packages.FindByVersion(constraint); } if (!allowPrereleaseVersions) { packages = packages.Where(p => p.IsReleaseVersion()); } return packages; } /// <summary> /// Selects the dependency package from the list of candidate packages /// according to <paramref name="dependencyVersion"/>. /// </summary> /// <param name="packages">The list of candidate packages.</param> /// <param name="dependencyVersion">The rule used to select the package from /// <paramref name="packages"/> </param> /// <returns>The selected package.</returns> /// <remarks>Precondition: <paramref name="packages"/> are ordered by ascending version.</remarks> internal static IPackage SelectDependency(this IEnumerable<IPackage> packages, DependencyVersion dependencyVersion) { if (packages == null || !packages.Any()) { return null; } if (dependencyVersion == DependencyVersion.Lowest) { return packages.FirstOrDefault(); } else if (dependencyVersion == DependencyVersion.Highest) { return packages.LastOrDefault(); } else if (dependencyVersion == DependencyVersion.HighestPatch) { var groups = from p in packages group p by new { p.Version.Version.Major, p.Version.Version.Minor } into g orderby g.Key.Major, g.Key.Minor select g; return (from p in groups.First() orderby p.Version descending select p).FirstOrDefault(); } else if (dependencyVersion == DependencyVersion.HighestMinor) { var groups = from p in packages group p by new { p.Version.Version.Major } into g orderby g.Key.Major select g; return (from p in groups.First() orderby p.Version descending select p).FirstOrDefault(); } throw new ArgumentOutOfRangeException("dependencyVersion"); } } }
using System; using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Threading; namespace Orleans.Runtime { // This class implements an LRU cache of values. It keeps a bounded set of values and will // flush "old" values internal class LRU<TKey, TValue> : IEnumerable<KeyValuePair<TKey, TValue>> { // The following machinery is used to notify client objects when a key and its value // is being flushed from the cache. // The client's event handler is called after the key has been removed from the cache, // but when the cache is in a consistent state so that other methods on the cache may freely // be invoked. public event Action RaiseFlushEvent; private long nextGeneration = 0; private long generationToFree = 0; private readonly TimeSpan requiredFreshness; // We want this to be a reference type so that we can update the values in the cache // without having to call AddOrUpdate, which is a nuisance private class TimestampedValue : IEquatable<TimestampedValue> { public readonly CoarseStopwatch Age; public readonly TValue Value; public long Generation; public TimestampedValue(LRU<TKey, TValue> l, TValue v) { Generation = Interlocked.Increment(ref l.nextGeneration); Value = v; Age = CoarseStopwatch.StartNew(); } public override bool Equals(object obj) => obj is TimestampedValue value && Equals(value); public bool Equals(TimestampedValue other) => ReferenceEquals(this, other) || Age == other.Age && EqualityComparer<TValue>.Default.Equals(Value, other.Value) && Generation == other.Generation; public override int GetHashCode() => HashCode.Combine(Age, Value, Generation); } private readonly ConcurrentDictionary<TKey, TimestampedValue> cache = new(); private int count; public int Count => count; public int MaximumSize { get; } /// <summary> /// Creates a new LRU cache. /// </summary> /// <param name="maxSize">Maximum number of entries to allow.</param> /// <param name="maxAge">Maximum age of an entry.</param> public LRU(int maxSize, TimeSpan maxAge) { if (maxSize <= 0) { throw new ArgumentOutOfRangeException(nameof(maxSize), "LRU maxSize must be greater than 0"); } MaximumSize = maxSize; requiredFreshness = maxAge; } public void Add(TKey key, TValue value) { var result = new TimestampedValue(this, value); AdjustSize(); // add/update delegates can be called multiple times, but only the last result counts var added = false; cache.AddOrUpdate(key, _ => { added = true; return result; }, (_, old) => { added = false; // if multiple values are added at once for the same key, take the newest one return old.Age.Elapsed >= result.Age.Elapsed && old.Generation > result.Generation ? old : result; }); if (added) Interlocked.Increment(ref count); } public bool ContainsKey(TKey key) => cache.ContainsKey(key); public bool RemoveKey(TKey key) { if (!cache.TryRemove(key, out _)) return false; Interlocked.Decrement(ref count); return true; } public bool TryRemove<T>(TKey key, Func<T, TValue, bool> predicate, T context) { if (!cache.TryGetValue(key, out var timestampedValue)) { return false; } if (predicate(context, timestampedValue.Value) && TryRemove(key, timestampedValue)) { Interlocked.Decrement(ref count); return true; } return false; } private bool TryRemove(TKey key, TimestampedValue value) { var entry = new KeyValuePair<TKey, TimestampedValue>(key, value); #if NET5_0_OR_GREATER return cache.TryRemove(entry); #else // Cast the dictionary to its interface type to access the explicitly implemented Remove method. var cacheDictionary = (IDictionary<TKey, TimestampedValue>)cache; return cacheDictionary.Remove(entry); #endif } public void Clear() { if (RaiseFlushEvent is { } FlushEvent) { foreach (var _ in cache) FlushEvent(); } // not thread-safe: if anything is added, or even removed after addition, between Clear and Count, count may be off cache.Clear(); Interlocked.Exchange(ref count, 0); } public bool TryGetValue(TKey key, out TValue value) { if (cache.TryGetValue(key, out var result)) { var age = result.Age.Elapsed; if (age > requiredFreshness) { if (RemoveKey(key)) RaiseFlushEvent?.Invoke(); } else { result.Generation = Interlocked.Increment(ref nextGeneration); value = result.Value; return true; } } value = default; return false; } public TValue Get(TKey key) { TryGetValue(key, out var value); return value; } /// <summary> /// Remove all expired value from the LRU instance. /// </summary> public void RemoveExpired() { foreach (var entry in this.cache) { if (entry.Value.Age.Elapsed > requiredFreshness) { if (RemoveKey(entry.Key)) RaiseFlushEvent?.Invoke(); } } } private void AdjustSize() { while (Count >= MaximumSize) { long generationToDelete = Interlocked.Increment(ref generationToFree); foreach (var e in cache) { if (e.Value.Generation <= generationToDelete) { if (RemoveKey(e.Key)) RaiseFlushEvent?.Invoke(); break; } } } } public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator() { return cache.Select(p => new KeyValuePair<TKey, TValue>(p.Key, p.Value.Value)).GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); } }
// Track BuildR // Available on the Unity3D Asset Store // Copyright (c) 2013 Jasper Stocker http://support.jasperstocker.com // For support contact email@jasperstocker.com // // THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY OF ANY // KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A // PARTICULAR PURPOSE. #if UNITY_EDITOR using System.IO; using System.Text; using System.Xml; #endif using UnityEngine; [ExecuteInEditMode] public class TrackBuildR : MonoBehaviour { public static float VERSION_NUMBER = 1.2f; public float version = VERSION_NUMBER; public TrackBuildRTrack track; public TrackBuildRGenerator generator; public GameObject trackEditorPreview = null; //CUSTOM EDITOR VALUES public enum modes { track, boundary, bumpers, textures, terrain, stunt, diagram, options, export } public enum pointModes { transform, controlpoint, trackup, trackpoint, add, remove } public enum boundaryModes { transform, controlpoint } public enum textureModes { track, boundary, offroad, bumpers } public enum terrainModes { mergeTerrain, conformTrack } public enum stuntModes { loop, jump, //twist, TODO: twist creation jumptwist } public modes mode = modes.track; public pointModes pointMode = pointModes.transform; public boundaryModes boundaryMode = boundaryModes.transform; public textureModes textureMode = textureModes.track; public stuntModes stuntMode = stuntModes.loop; public terrainModes terrainMode = terrainModes.mergeTerrain; //export public enum fileTypes { Fbx, Obj } public fileTypes fileType = fileTypes.Fbx; public string exportFilename = "exportedTrack"; public bool copyTexturesIntoExportFolder = true; public bool exportCollider = true; public bool createPrefabOnExport = true; public bool includeTangents = false; //preview public float previewPercentage = 0; public bool previewForward = true; public float previewStartPoint = 0.0f; public bool tangentsGenerated {get {return track.tangentsGenerated;}} public bool lightmapGenerated {get {return track.lightmapGenerated;}} public bool optimised {get {return track.optimised;}} public void Init() { track = gameObject.AddComponent<TrackBuildRTrack>(); track.InitTextures(); track.baseTransform = transform; TrackBuildRPoint p0 = gameObject.AddComponent<TrackBuildRPoint>();// ScriptableObject.CreateInstance<TrackBuildRPoint>(); TrackBuildRPoint p1 = gameObject.AddComponent<TrackBuildRPoint>();//ScriptableObject.CreateInstance<TrackBuildRPoint>(); TrackBuildRPoint p2 = gameObject.AddComponent<TrackBuildRPoint>();//ScriptableObject.CreateInstance<TrackBuildRPoint>(); TrackBuildRPoint p3 = gameObject.AddComponent<TrackBuildRPoint>();//ScriptableObject.CreateInstance<TrackBuildRPoint>(); p0.baseTransform = transform; p1.baseTransform = transform; p2.baseTransform = transform; p3.baseTransform = transform; p0.position = new Vector3(-20, 0, -20); p1.position = new Vector3(20, 0, -20); p2.position = new Vector3(20, 0, 20); p3.position = new Vector3(-20, 0, 20); p0.forwardControlPoint = new Vector3(0, 0, -20); p1.forwardControlPoint = new Vector3(40, 0, -20); p2.forwardControlPoint = new Vector3(0, 0, 20); p3.forwardControlPoint = new Vector3(-40, 0, 20); p0.leftForwardControlPoint = new Vector3(-15, 0, -20); p1.leftForwardControlPoint = new Vector3(25, 0, -20); p2.leftForwardControlPoint = new Vector3(5, 0, 20); p3.leftForwardControlPoint = new Vector3(-35, 0, 20); p0.rightForwardControlPoint = new Vector3(15, 0, -20); p1.rightForwardControlPoint = new Vector3(55, 0, -20); p2.rightForwardControlPoint = new Vector3(-5, 0, 20); p3.rightForwardControlPoint = new Vector3(-45, 0, 20); track.AddPoint(p0); track.AddPoint(p1); track.AddPoint(p2); track.AddPoint(p3); generator = gameObject.AddComponent<TrackBuildRGenerator>(); ForceFullRecalculation(); track.diagramMesh = new Mesh(); track.diagramMesh.vertices = new [] { new Vector3(-1, 0, -1), new Vector3(1, 0, -1), new Vector3(-1, 0, 1), new Vector3(1, 0, 1)}; track.diagramMesh.uv = new [] { new Vector2(0, 0), new Vector2(1, 0), new Vector2(0,1), new Vector2(1,1)}; track.diagramMesh.triangles = new []{1,0,2,1,2,3}; track.diagramGO = new GameObject("Diagram"); track.diagramGO.transform.parent = transform; track.diagramGO.transform.localPosition = Vector3.zero; track.diagramGO.AddComponent<MeshFilter>().mesh = track.diagramMesh; track.diagramMaterial = new Material(Shader.Find("Unlit/Texture")); track.diagramGO.AddComponent<MeshRenderer>().material = track.diagramMaterial; track.diagramGO.AddComponent<MeshCollider>().sharedMesh = track.diagramMesh; } public void UpdateRender() { generator.track = track; generator.UpdateRender(); foreach(Transform child in GetComponentsInChildren<Transform>()) { child.gameObject.isStatic = gameObject.isStatic; } } public void ForceFullRecalculation() { int numberOfPoints = track.realNumberOfPoints; for (int i = 0; i < numberOfPoints; i++) track[i].isDirty = true; track.RecalculateCurves(); UpdateRender(); } public void GenerateSecondaryUVSet() { track.GenerateSecondaryUVSet(); } public void GenerateTangents() { track.SolveTangents(); } public void OptimseMeshes() { track.OptimseMeshes(); } void OnDestroy() { track.Clear(); } void OnDrawGizmos() { int numberOfPoints = track.numberOfCurves; if (numberOfPoints < 1) return; } public void Clear() { track.Clear(); } void OnEnable() { if(track != null) if (track.diagramGO != null) track.diagramGO.SetActive(!Application.isPlaying&&track.showDiagram); } void Start() { if (!Application.isPlaying) { //only update the models when we're editing. UpgradeData(); } } private void UpgradeData() { float currentVersion = VERSION_NUMBER; float dataVersion = version; if (currentVersion == dataVersion) { //The data matches the current version of Track Buildr - do nothing. return; } if (currentVersion < dataVersion) { Debug.LogError("Track BuildR v." + currentVersion + ": Great scot! This data is from the future! (version:" + dataVersion + ") - need to avoid contact to ensure the survival of the universe..."); return;//don't touch ANYTHING! } if (dataVersion < 1.1f) { #if UNITY_EDITOR if(UnityEditor.EditorUtility.DisplayDialog("WARNING", "Track BuildR 1.1 does not directly support upgrading from 1.0.\nContact me at email@jasperstocker.com for instructions on how to do this.\nIt's not hard to upgrade and\nremember to backup your project!\nJasper","Ok, I'll email you")) Application.OpenURL("mailto:email@jasperstocker.com"); #endif Debug.LogWarning("Warning - There is no upgrade path to this version - sorry."); } version = currentVersion; } #if UNITY_EDITOR /// <summary> /// Convert this camera path into an xml string for export /// </summary> /// <returns>A generated XML string</returns> public string ToXML() { StringBuilder sb = new StringBuilder(); sb.AppendLine("<?xml version='1.0' encoding='ISO-8859-15'?>"); sb.AppendLine("<!-- Unity3D Asset Track BuildR XML Exporter http://trackbuildr.jasperstocker.com -->"); sb.AppendLine("<trackbuildr>"); sb.AppendLine("<version>"+version+"</version>"); sb.AppendLine("<name>"+name+"</name>"); sb.AppendLine("<fileType>"+fileType+"</fileType>"); sb.AppendLine("<exportFilename>"+exportFilename+"</exportFilename>"); sb.AppendLine("<copyTexturesIntoExportFolder>"+copyTexturesIntoExportFolder+"</copyTexturesIntoExportFolder>"); sb.AppendLine("<exportCollider>"+exportCollider+"</exportCollider>"); sb.AppendLine("<createPrefabOnExport>"+createPrefabOnExport+"</createPrefabOnExport>"); sb.AppendLine("<includeTangents>"+includeTangents+"</includeTangents>"); sb.Append(track.ToXML()); sb.AppendLine("</trackbuildr>"); return sb.ToString(); } /// <summary> /// Import XML data into this camera path overwriting the current data /// </summary> /// <param name="XMLPath">An XML file path</param> public void FromXML(string XMLPath) { Debug.Log("Import Track BuildR Track XML " + XMLPath); Clear(); XmlDocument xml = new XmlDocument(); using (StreamReader sr = new StreamReader(XMLPath)) { xml.LoadXml(sr.ReadToEnd()); } XmlNode trackNode = xml.SelectNodes("trackbuildr")[0]; version = float.Parse(trackNode["version"].FirstChild.Value); if (trackNode["name"] != null) name = trackNode["name"].FirstChild.Value; if(trackNode["fileType"] != null) { fileType = (fileTypes)System.Enum.Parse(typeof(fileTypes), trackNode["fileType"].FirstChild.Value); exportFilename = trackNode["exportFilename"].FirstChild.Value; copyTexturesIntoExportFolder = bool.Parse(trackNode["copyTexturesIntoExportFolder"].FirstChild.Value); exportCollider = bool.Parse(trackNode["exportCollider"].FirstChild.Value); createPrefabOnExport = bool.Parse(trackNode["createPrefabOnExport"].FirstChild.Value); includeTangents = bool.Parse(trackNode["includeTangents"].FirstChild.Value); } //send data to track track.FromXML(trackNode.SelectSingleNode("track")); } /// <summary> /// Import XML data into this camera path overwriting the current data /// </summary> /// <param name="KMLPath">An Google Earth KML file path</param> public void FromKML(string KMLPath) { Debug.Log("Import Google Earth KML " + KMLPath); Clear(); XmlDocument xml = new XmlDocument(); using (StreamReader sr = new StreamReader(KMLPath)) { xml.LoadXml(sr.ReadToEnd()); } name = xml["kml"]["Document"]["Placemark"]["name"].FirstChild.Value; track.FromKML(xml["kml"]["Document"]["Placemark"]["LineString"]["coordinates"].FirstChild.Value); } #endif }
using System; using System.Collections; using System.Collections.Specialized; using System.Configuration; using System.IO; using System.Text; using System.Windows.Forms; using System.Xml; using System.Xml.Serialization; namespace FeedBuilder { public class FeedBuilderSettingsProvider : SettingsProvider, IApplicationSettingsProvider { //XML Root Node private const string SETTINGSROOT = "Settings"; public void SaveAs(string filename) { try { Settings.Default.Save(); string source = Path.Combine(GetAppSettingsPath(), GetAppSettingsFilename()); File.Copy(source, filename, true); } catch (Exception ex) { string msg = string.Format("An error occurred while saving the file: {0}{0}{1}", Environment.NewLine, ex.Message); MessageBox.Show(msg, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } } public void LoadFrom(string filename) { try { string dest = Path.Combine(GetAppSettingsPath(), GetAppSettingsFilename()); if (filename == dest) return; Settings.Default.Reset(); File.Copy(filename, dest, true); } catch (Exception ex) { string msg = string.Format("An error occurred while loading the file: {0}{0}{1}", Environment.NewLine, ex.Message); MessageBox.Show(msg, "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); } } public override void Initialize(string name, NameValueCollection col) { base.Initialize(ApplicationName, col); if (!Directory.Exists(GetAppSettingsPath())) { try { Directory.CreateDirectory(GetAppSettingsPath()); } catch (IOException) { } } } public override string ApplicationName { get { return "FeedBuilder"; } //Do nothing set { } } public virtual string GetAppSettingsPath() { return Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), ApplicationName); } public virtual string GetAppSettingsFilename() { return "Settings.xml"; } public override void SetPropertyValues(SettingsContext context, SettingsPropertyValueCollection propvals) { //Iterate through the settings to be stored //Only dirty settings are included in propvals, and only ones relevant to this provider foreach (SettingsPropertyValue propval in propvals) { SetValue(propval); } try { if (!Directory.Exists(GetAppSettingsPath())) Directory.CreateDirectory(GetAppSettingsPath()); SettingsXML.Save(Path.Combine(GetAppSettingsPath(), GetAppSettingsFilename())); } catch (Exception) { //Ignore if cant save, device been ejected } } public override SettingsPropertyValueCollection GetPropertyValues(SettingsContext context, SettingsPropertyCollection props) { //Create new collection of values SettingsPropertyValueCollection values = new SettingsPropertyValueCollection(); //Iterate through the settings to be retrieved foreach (SettingsProperty setting in props) { SettingsPropertyValue value = new SettingsPropertyValue(setting) { IsDirty = false, SerializedValue = GetValue(setting) }; values.Add(value); } return values; } private XmlDocument m_SettingsXML; private XmlDocument SettingsXML { get { //If we dont hold an xml document, try opening one. //If it doesnt exist then create a new one ready. if (m_SettingsXML == null) { m_SettingsXML = new XmlDocument(); try { m_SettingsXML.Load(Path.Combine(GetAppSettingsPath(), GetAppSettingsFilename())); XmlNode node = m_SettingsXML.SelectSingleNode(string.Format("{0}/*", SETTINGSROOT)); // Adopt configuration if it is from another machine. if (node != null && node.Name != Environment.MachineName) { XmlNode machineNode = m_SettingsXML.CreateElement(Environment.MachineName); while (node.ChildNodes.Count > 0) { machineNode.AppendChild(node.FirstChild); } node.ParentNode.AppendChild(machineNode); node.ParentNode.RemoveChild(node); } } catch (Exception) { //Create new document XmlDeclaration dec = m_SettingsXML.CreateXmlDeclaration("1.0", "utf-8", string.Empty); m_SettingsXML.AppendChild(dec); XmlNode nodeRoot = m_SettingsXML.CreateNode(XmlNodeType.Element, SETTINGSROOT, ""); m_SettingsXML.AppendChild(nodeRoot); } } return m_SettingsXML; } } private string GetValue(SettingsProperty setting) { string ret = null; try { string path = IsRoaming(setting) ? string.Format("{0}/{1}", SETTINGSROOT, setting.Name) : string.Format("{0}/{1}/{2}", SETTINGSROOT, Environment.MachineName, setting.Name); if (setting.PropertyType.BaseType != null && setting.PropertyType.BaseType.Name == "CollectionBase") { XmlNode selectSingleNode = SettingsXML.SelectSingleNode(path); if (selectSingleNode != null) ret = selectSingleNode.InnerXml; } else { XmlNode singleNode = SettingsXML.SelectSingleNode(path); if (singleNode != null) ret = singleNode.InnerText; } } catch (Exception) { ret = (setting.DefaultValue != null) ? setting.DefaultValue.ToString() : string.Empty; } return ret; } private void SetValue(SettingsPropertyValue propVal) { XmlElement SettingNode; //Determine if the setting is roaming. //If roaming then the value is stored as an element under the root //Otherwise it is stored under a machine name node try { if (IsRoaming(propVal.Property)) { SettingNode = (XmlElement)SettingsXML.SelectSingleNode(SETTINGSROOT + "/" + propVal.Name); } else { SettingNode = (XmlElement)SettingsXML.SelectSingleNode(SETTINGSROOT + "/" + Environment.MachineName + "/" + propVal.Name); } } catch (Exception) { SettingNode = null; } //Check to see if the node exists, if so then set its new value if ((SettingNode != null)) { //SettingNode.InnerText = propVal.SerializedValue.ToString SetSerializedValue(SettingNode, propVal); } else { if (IsRoaming(propVal.Property)) { //Store the value as an element of the Settings Root Node SettingNode = SettingsXML.CreateElement(propVal.Name); //SettingNode.InnerText = propVal.SerializedValue.ToString SetSerializedValue(SettingNode, propVal); XmlNode selectSingleNode = SettingsXML.SelectSingleNode(SETTINGSROOT); if (selectSingleNode != null) selectSingleNode.AppendChild(SettingNode); } else { //Its machine specific, store as an element of the machine name node, //creating a new machine name node if one doesnt exist. XmlElement MachineNode; try { MachineNode = (XmlElement)SettingsXML.SelectSingleNode(SETTINGSROOT + "/" + Environment.MachineName); } catch (Exception) { MachineNode = SettingsXML.CreateElement(Environment.MachineName); XmlNode selectSingleNode = SettingsXML.SelectSingleNode(SETTINGSROOT); if (selectSingleNode != null) selectSingleNode.AppendChild(MachineNode); } if (MachineNode == null) { MachineNode = SettingsXML.CreateElement(Environment.MachineName); XmlNode selectSingleNode = SettingsXML.SelectSingleNode(SETTINGSROOT); if (selectSingleNode != null) selectSingleNode.AppendChild(MachineNode); } SettingNode = SettingsXML.CreateElement(propVal.Name); //SettingNode.InnerText = propVal.SerializedValue.ToString SetSerializedValue(SettingNode, propVal); MachineNode.AppendChild(SettingNode); } } } private void SetSerializedValue(XmlElement node, SettingsPropertyValue propVal) { if (propVal.Property.PropertyType.BaseType != null && propVal.Property.PropertyType.BaseType.Name == "CollectionBase") { StringBuilder builder = new StringBuilder(); XmlSerializerNamespaces ns = new XmlSerializerNamespaces(); XmlWriterSettings xsettings = new XmlWriterSettings(); ns.Add("", ""); xsettings.OmitXmlDeclaration = true; XmlWriter xmlWriter = XmlWriter.Create(builder, xsettings); XmlSerializer s = new XmlSerializer(propVal.Property.PropertyType); s.Serialize(xmlWriter, propVal.PropertyValue, ns); xmlWriter.Close(); node.InnerXml = builder.ToString(); } else node.InnerText = propVal.SerializedValue != null ? propVal.SerializedValue.ToString() : string.Empty; } private bool IsRoaming(SettingsProperty prop) { //Determine if the setting is marked as Roaming foreach (DictionaryEntry d in prop.Attributes) { Attribute a = (Attribute)d.Value; if (a is SettingsManageabilityAttribute) return true; } return false; } public void Reset(SettingsContext context) { string settingsFilePath = Path.Combine(GetAppSettingsPath(), GetAppSettingsFilename()); File.Delete(settingsFilePath); m_SettingsXML = null; } public SettingsPropertyValue GetPreviousVersion(SettingsContext context, SettingsProperty property) { return null; } public void Upgrade(SettingsContext context, SettingsPropertyCollection properties) { } } }
// *********************************************************************** // Copyright (c) 2012 Charlie Poole // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System; using System.Collections.Generic; using System.Threading; using System.Reflection; using NUnit.Compatibility; using NUnit.Framework.Internal.Commands; using NUnit.Framework.Interfaces; namespace NUnit.Framework.Internal.Execution { /// <summary> /// A CompositeWorkItem represents a test suite and /// encapsulates the execution of the suite as well /// as all its child tests. /// </summary> public class CompositeWorkItem : WorkItem { // static Logger log = InternalTrace.GetLogger("CompositeWorkItem"); private TestSuite _suite; private TestSuiteResult _suiteResult; private ITestFilter _childFilter; private TestCommand _setupCommand; private TestCommand _teardownCommand; private List<WorkItem> _children; /// <summary> /// List of Child WorkItems /// </summary> public List<WorkItem> Children { get { return _children; } private set { _children = value; } } /// <summary> /// A count of how many tests in the work item have a value for the Order Property /// </summary> private int _countOrder; private CountdownEvent _childTestCountdown; /// <summary> /// Construct a CompositeWorkItem for executing a test suite /// using a filter to select child tests. /// </summary> /// <param name="suite">The TestSuite to be executed</param> /// <param name="childFilter">A filter used to select child tests</param> public CompositeWorkItem(TestSuite suite, ITestFilter childFilter) : base(suite) { _suite = suite; _suiteResult = Result as TestSuiteResult; _childFilter = childFilter; _countOrder = 0; } /// <summary> /// Method that actually performs the work. Overridden /// in CompositeWorkItem to do setup, run all child /// items and then do teardown. /// </summary> protected override void PerformWork() { // Inititialize actions, setup and teardown // We can't do this in the constructor because // the context is not available at that point. InitializeSetUpAndTearDownCommands(); if (!CheckForCancellation()) if (Test.RunState == RunState.Explicit && !_childFilter.IsExplicitMatch(Test)) SkipFixture(ResultState.Explicit, GetSkipReason(), null); else switch (Test.RunState) { default: case RunState.Runnable: case RunState.Explicit: // Assume success, since the result will be inconclusive // if there is no setup method to run or if the // context initialization fails. Result.SetResult(ResultState.Success); CreateChildWorkItems(); if (_children.Count > 0) { PerformOneTimeSetUp(); if (!CheckForCancellation()) switch (Result.ResultState.Status) { case TestStatus.Passed: RunChildren(); return; // Just return: completion event will take care // of TestFixtureTearDown when all tests are done. case TestStatus.Skipped: case TestStatus.Inconclusive: case TestStatus.Failed: SkipChildren(_suite, Result.ResultState.WithSite(FailureSite.Parent), "OneTimeSetUp: " + Result.Message); break; } // Directly execute the OneTimeFixtureTearDown for tests that // were skipped, failed or set to inconclusive in one time setup // unless we are aborting. if (Context.ExecutionStatus != TestExecutionStatus.AbortRequested) PerformOneTimeTearDown(); } break; case RunState.Skipped: SkipFixture(ResultState.Skipped, GetSkipReason(), null); break; case RunState.Ignored: SkipFixture(ResultState.Ignored, GetSkipReason(), null); break; case RunState.NotRunnable: SkipFixture(ResultState.NotRunnable, GetSkipReason(), GetProviderStackTrace()); break; } // Fall through in case nothing was run. // Otherwise, this is done in the completion event. WorkItemComplete(); } #region Helper Methods private bool CheckForCancellation() { if (Context.ExecutionStatus != TestExecutionStatus.Running) { Result.SetResult(ResultState.Cancelled, "Test cancelled by user"); return true; } return false; } private void InitializeSetUpAndTearDownCommands() { List<SetUpTearDownItem> setUpTearDownItems = _suite.TypeInfo != null ? CommandBuilder.BuildSetUpTearDownList(_suite.TypeInfo.Type, typeof(OneTimeSetUpAttribute), typeof(OneTimeTearDownAttribute)) : new List<SetUpTearDownItem>(); var actionItems = new List<TestActionItem>(); foreach (ITestAction action in Actions) { // Special handling here for ParameterizedMethodSuite is a bit ugly. However, // it is needed because Tests are not supposed to know anything about Action // Attributes (or any attribute) and Attributes don't know where they were // initially applied unless we tell them. // // ParameterizedMethodSuites and individual test cases both use the same // MethodInfo as a source of attributes. We handle the Test and Default targets // in the test case, so we don't want to doubly handle it here. bool applyToSuite = (action.Targets & ActionTargets.Suite) == ActionTargets.Suite || action.Targets == ActionTargets.Default && !(Test is ParameterizedMethodSuite); bool applyToTest = (action.Targets & ActionTargets.Test) == ActionTargets.Test && !(Test is ParameterizedMethodSuite); if (applyToSuite) actionItems.Add(new TestActionItem(action)); if (applyToTest) Context.UpstreamActions.Add(action); } _setupCommand = CommandBuilder.MakeOneTimeSetUpCommand(_suite, setUpTearDownItems, actionItems); _teardownCommand = CommandBuilder.MakeOneTimeTearDownCommand(_suite, setUpTearDownItems, actionItems); } private void PerformOneTimeSetUp() { try { _setupCommand.Execute(Context); // SetUp may have changed some things in the environment Context.UpdateContextFromEnvironment(); } catch (Exception ex) { if (ex is NUnitException || ex is TargetInvocationException) ex = ex.InnerException; Result.RecordException(ex, FailureSite.SetUp); } } private void RunChildren() { int childCount = _children.Count; if (childCount == 0) throw new InvalidOperationException("RunChildren called but item has no children"); _childTestCountdown = new CountdownEvent(childCount); foreach (WorkItem child in _children) { if (CheckForCancellation()) break; child.Completed += new EventHandler(OnChildCompleted); child.InitializeContext(new TestExecutionContext(Context)); Context.Dispatcher.Dispatch(child); childCount--; } if (childCount > 0) { while (childCount-- > 0) CountDownChildTest(); } } private void CreateChildWorkItems() { _children = new List<WorkItem>(); foreach (ITest test in _suite.Tests) { if (_childFilter.Pass(test)) { var child = WorkItem.CreateWorkItem(test, _childFilter); child.WorkerId = this.WorkerId; #if !PORTABLE && !SILVERLIGHT && !NETCF if (child.TargetApartment == ApartmentState.Unknown && TargetApartment != ApartmentState.Unknown) child.TargetApartment = TargetApartment; #endif if (test.Properties.ContainsKey(PropertyNames.Order)) { _children.Insert(0, child); _countOrder++; } else { _children.Add(child); } } } if (_countOrder !=0) SortChildren(); } private class WorkItemOrderComparer : IComparer<WorkItem> { /// <summary> /// Compares two objects and returns a value indicating whether one is less than, equal to, or greater than the other. /// </summary> /// <returns> /// A signed integer that indicates the relative values of <paramref name="x"/> and <paramref name="y"/>, as shown in the following table.Value Meaning Less than zero<paramref name="x"/> is less than <paramref name="y"/>.Zero<paramref name="x"/> equals <paramref name="y"/>.Greater than zero<paramref name="x"/> is greater than <paramref name="y"/>. /// </returns> /// <param name="x">The first object to compare.</param><param name="y">The second object to compare.</param> public int Compare(WorkItem x, WorkItem y) { var xKey = int.MaxValue; var yKey = int.MaxValue; if (x.Test.Properties.ContainsKey(PropertyNames.Order)) xKey =(int)x.Test.Properties[PropertyNames.Order][0]; if (y.Test.Properties.ContainsKey(PropertyNames.Order)) yKey =(int)y.Test.Properties[PropertyNames.Order][0]; return xKey.CompareTo(yKey); } } /// <summary> /// Sorts tests under this suite. /// </summary> private void SortChildren() { _children.Sort(0, _countOrder, new WorkItemOrderComparer()); } private void SkipFixture(ResultState resultState, string message, string stackTrace) { Result.SetResult(resultState.WithSite(FailureSite.SetUp), message, StackFilter.Filter(stackTrace)); SkipChildren(_suite, resultState.WithSite(FailureSite.Parent), "OneTimeSetUp: " + message); } private void SkipChildren(TestSuite suite, ResultState resultState, string message) { foreach (Test child in suite.Tests) { if (_childFilter.Pass(child)) { TestResult childResult = child.MakeTestResult(); childResult.SetResult(resultState, message); _suiteResult.AddResult(childResult); // Some runners may depend on getting the TestFinished event // even for tests that have been skipped at a higher level. Context.Listener.TestFinished(childResult); if (child.IsSuite) SkipChildren((TestSuite)child, resultState, message); } } } private void PerformOneTimeTearDown() { // Our child tests or even unrelated tests may have // executed on the same thread since the time that // this test started, so we have to re-establish // the proper execution environment this.Context.EstablishExecutionEnvironment(); _teardownCommand.Execute(this.Context); } private string GetSkipReason() { return (string)Test.Properties.Get(PropertyNames.SkipReason); } private string GetProviderStackTrace() { return (string)Test.Properties.Get(PropertyNames.ProviderStackTrace); } private object _completionLock = new object(); private void OnChildCompleted(object sender, EventArgs e) { lock (_completionLock) { WorkItem childTask = sender as WorkItem; if (childTask != null) { childTask.Completed -= new EventHandler(OnChildCompleted); _suiteResult.AddResult(childTask.Result); if (Context.StopOnError && childTask.Result.ResultState.Status == TestStatus.Failed) Context.ExecutionStatus = TestExecutionStatus.StopRequested; // Check to see if all children completed CountDownChildTest(); } } } private void CountDownChildTest() { _childTestCountdown.Signal(); if (_childTestCountdown.CurrentCount == 0) { if (Context.ExecutionStatus != TestExecutionStatus.AbortRequested) PerformOneTimeTearDown(); foreach (var childResult in _suiteResult.Children) if (childResult.ResultState == ResultState.Cancelled) { this.Result.SetResult(ResultState.Cancelled, "Cancelled by user"); break; } WorkItemComplete(); } } private static bool IsStaticClass(Type type) { return type.GetTypeInfo().IsAbstract && type.GetTypeInfo().IsSealed; } private object cancelLock = new object(); /// <summary> /// Cancel (abort or stop) a CompositeWorkItem and all of its children /// </summary> /// <param name="force">true if the CompositeWorkItem and all of its children should be aborted, false if it should allow all currently running tests to complete</param> public override void Cancel(bool force) { lock (cancelLock) { if (_children == null) return; foreach (var child in _children) { var ctx = child.Context; if (ctx != null) ctx.ExecutionStatus = force ? TestExecutionStatus.AbortRequested : TestExecutionStatus.StopRequested; if (child.State == WorkItemState.Running) child.Cancel(force); } } } #endregion } }
using System; using System.Windows.Forms; namespace UltraSFV { public partial class Options : Form { private UltraSFV _fm; #region Constructor public Options(UltraSFV parent) { _fm = parent; InitializeComponent(); ParseUserSettings(); } #endregion #region Form Events private void Options_Load(object sender, EventArgs e) { this.TopMost = Properties.Settings.Default.AlwaysOnTop; this.checkBoxUseRecycleBin.CheckedChanged += new EventHandler(checkBoxUseRecycleBin_CheckedChanged); } #endregion #region Button Events private void buttonOK_Click(object sender, EventArgs e) { SaveUserSettings(); this.DialogResult = DialogResult.OK; this.Close(); } private void buttonCancel_Click(object sender, EventArgs e) { this.DialogResult = DialogResult.Cancel; this.Close(); } #endregion #region CheckBox Events private void checkBoxDeleteBadFiles_CheckedChanged(object sender, EventArgs e) { if (checkBoxDeleteBadFiles.Checked) { checkBoxUseRecycleBin.Enabled = true; } else { checkBoxUseRecycleBin.Enabled = false; } } private void checkBoxUseRecycleBin_CheckedChanged(object sender, EventArgs e) { if (!checkBoxUseRecycleBin.Checked) { if (MessageBox.Show("Warning: Disabling this feature will cause bad files to be permanently deleted!\n\nAre you sure you want to do this?", "Dangerous Option", MessageBoxButtons.YesNo, MessageBoxIcon.Warning) == DialogResult.No) checkBoxUseRecycleBin.Checked = true; } } private void checkBoxCloseWhenCheckingFinished_CheckedChanged(object sender, EventArgs e) { if (checkBoxCloseWhenCheckingFinished.Checked) { checkBoxCloseOnlyWhenAllGood.Enabled = true; } else { checkBoxCloseOnlyWhenAllGood.Enabled = false; } } private void checkBoxKeepFileLog_CheckedChanged(object sender, EventArgs e) { if (checkBoxKeepFileLog.Checked) { checkBoxLogGood.Enabled = true; checkBoxLogBad.Enabled = true; checkBoxLogMissing.Enabled = true; checkBoxLogSkipped.Enabled = true; checkBoxLogLocked.Enabled = true; } else { checkBoxLogGood.Enabled = false; checkBoxLogBad.Enabled = false; checkBoxLogMissing.Enabled = false; checkBoxLogSkipped.Enabled = false; checkBoxLogLocked.Enabled = false; } } #endregion #region Parse/Save User Settings private void ParseUserSettings() { // General Tab checkBoxRememberWindowLocation.Checked = Properties.Settings.Default.RememberWindoLocation; checkBoxReuseWindows.Checked = Properties.Settings.Default.ReuseWindows; checkBoxEnableSounds.Checked = Properties.Settings.Default.EnableSounds; checkBoxAlwaysOnTop.Checked = Properties.Settings.Default.AlwaysOnTop; checkBoxCheckForUpdates.Checked = Properties.Settings.Default.CheckForUpdates; checkBoxKeepFileLog.Checked = Properties.Settings.Default.KeepFileLog; checkBoxLogGood.Checked = Properties.Settings.Default.LogGood; checkBoxLogBad.Checked = Properties.Settings.Default.LogBad; checkBoxLogMissing.Checked = Properties.Settings.Default.LogMissing; checkBoxLogSkipped.Checked = Properties.Settings.Default.LogSkipped; checkBoxLogLocked.Checked = Properties.Settings.Default.LogLocked; // File List Tab checkBoxShowColumn1.Checked = Properties.Settings.Default.ShowColumn1; checkBoxShowColumn2.Checked = Properties.Settings.Default.ShowColumn2; checkBoxShowColumn3.Checked = Properties.Settings.Default.ShowColumn3; checkBoxShowColumn4.Checked = Properties.Settings.Default.ShowColumn4; checkBoxShowColumn5.Checked = Properties.Settings.Default.ShowColumn5; checkBoxAutomaticallyScrollFileList.Checked = Properties.Settings.Default.AutomaticallyScrollFileList; checkBoxDoubleClickToOpen.Checked = Properties.Settings.Default.DoubleClickToOpen; checkBoxFullRowSelect.Checked = Properties.Settings.Default.ListFullRowSelect; checkBoxShowGridLines.Checked = Properties.Settings.Default.ListShowGridLines; checkBoxAlternateRowColors.Checked = Properties.Settings.Default.AlternateRowColors; labelColorSample.BackColor = Properties.Settings.Default.AlternateRowColor; // Checking Tab checkBoxRenameBadFiles.Checked = Properties.Settings.Default.RenameBadFiles; checkBoxDeleteBadFiles.Checked = Properties.Settings.Default.DeleteBadFiles; checkBoxUseRecycleBin.Checked = Properties.Settings.Default.UseRecycleBinWhenDeleting; checkBoxCloseWhenCheckingFinished.Checked = Properties.Settings.Default.CloseWhenCheckingFinished; checkBoxCloseOnlyWhenAllGood.Checked = Properties.Settings.Default.CloseOnlyWhenAllGood; // Creating Tab checkBoxPromptForFileName.Checked = Properties.Settings.Default.PromptForFileName; checkBoxAlertWhenFileCreated.Checked = Properties.Settings.Default.AlertWhenFileCreated; checkBoxCloseWhenCreatingFinished.Checked = Properties.Settings.Default.CloseWhenCreatingFinished; } private void SaveUserSettings() { // General Tab Properties.Settings.Default.RememberWindoLocation = checkBoxRememberWindowLocation.Checked; Properties.Settings.Default.ReuseWindows = checkBoxReuseWindows.Checked; Properties.Settings.Default.EnableSounds = checkBoxEnableSounds.Checked; Properties.Settings.Default.AlwaysOnTop = checkBoxAlwaysOnTop.Checked; Properties.Settings.Default.CheckForUpdates = checkBoxCheckForUpdates.Checked; Properties.Settings.Default.KeepFileLog = checkBoxKeepFileLog.Checked; Properties.Settings.Default.LogGood = checkBoxLogGood.Checked; Properties.Settings.Default.LogBad = checkBoxLogBad.Checked; Properties.Settings.Default.LogMissing = checkBoxLogMissing.Checked; Properties.Settings.Default.LogSkipped = checkBoxLogSkipped.Checked; Properties.Settings.Default.LogLocked = checkBoxLogLocked.Checked; // File List Tab Properties.Settings.Default.ShowColumn1 = checkBoxShowColumn1.Checked; Properties.Settings.Default.ShowColumn2 = checkBoxShowColumn2.Checked; Properties.Settings.Default.ShowColumn3 = checkBoxShowColumn3.Checked; Properties.Settings.Default.ShowColumn4 = checkBoxShowColumn4.Checked; Properties.Settings.Default.ShowColumn5 = checkBoxShowColumn5.Checked; Properties.Settings.Default.AutomaticallyScrollFileList = checkBoxAutomaticallyScrollFileList.Checked; Properties.Settings.Default.DoubleClickToOpen = checkBoxDoubleClickToOpen.Checked; Properties.Settings.Default.ListShowGridLines = checkBoxShowGridLines.Checked; Properties.Settings.Default.ListFullRowSelect = checkBoxFullRowSelect.Checked; Properties.Settings.Default.AlternateRowColors = checkBoxAlternateRowColors.Checked; Properties.Settings.Default.AlternateRowColor = labelColorSample.BackColor; // Checking Tab Properties.Settings.Default.RenameBadFiles = checkBoxRenameBadFiles.Checked; Properties.Settings.Default.DeleteBadFiles = checkBoxDeleteBadFiles.Checked; Properties.Settings.Default.UseRecycleBinWhenDeleting = checkBoxUseRecycleBin.Checked; Properties.Settings.Default.CloseWhenCheckingFinished = checkBoxCloseWhenCheckingFinished.Checked; Properties.Settings.Default.CloseOnlyWhenAllGood = checkBoxCloseOnlyWhenAllGood.Checked; // Creating Tab Properties.Settings.Default.PromptForFileName = checkBoxPromptForFileName.Checked; Properties.Settings.Default.AlertWhenFileCreated = checkBoxAlertWhenFileCreated.Checked; Properties.Settings.Default.CloseWhenCreatingFinished = checkBoxCloseWhenCreatingFinished.Checked; // Save it and update the main form Properties.Settings.Default.Save(); _fm.UpdateColumnsFromSettings(); } #endregion private void labelColorSample_Click(object sender, EventArgs e) { colorDialog1.Color = Properties.Settings.Default.AlternateRowColor; if (colorDialog1.ShowDialog() == DialogResult.OK) { labelColorSample.BackColor = colorDialog1.Color; } } } }
#region Copyright notice and license // Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // http://github.com/jskeet/dotnet-protobufs/ // Original C++/Java/Python code: // http://code.google.com/p/protobuf/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #endregion using System; using System.Collections; using System.Collections.Generic; using Google.ProtocolBuffers; using Google.ProtocolBuffers.TestProtos; using Microsoft.VisualStudio.TestTools.UnitTesting; namespace Google.ProtocolBuffers { [TestClass] public class ExtendableBuilderLiteTest { [TestMethod] public void TestHasExtensionT() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .SetExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite, 123); Assert.IsTrue(builder.HasExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); } [TestMethod] public void TestHasExtensionTMissing() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder(); Assert.IsFalse(builder.HasExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); } [TestMethod] public void TestGetExtensionCountT() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 1) .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 2) .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 3); Assert.AreEqual(3, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); } [TestMethod] public void TestGetExtensionCountTEmpty() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder(); Assert.AreEqual(0, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); } [TestMethod] public void TestGetExtensionTNull() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder(); string value = builder.GetExtension(UnitTestLiteProtoFile.OptionalStringExtensionLite); Assert.IsNull(value); } [TestMethod] public void TestGetExtensionTValue() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .SetExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite, 3); Assert.AreEqual(3, builder.GetExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); } [TestMethod] public void TestGetExtensionTEmpty() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder(); Assert.AreEqual(0, builder.GetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite).Count); } [TestMethod] public void TestGetExtensionTList() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 1) .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 2) .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 3); IList<int> values = builder.GetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite); Assert.AreEqual(3, values.Count); } [TestMethod] public void TestGetExtensionTIndex() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0) .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 1) .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 2); for (int i = 0; i < 3; i++) Assert.AreEqual(i, builder.GetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, i)); } [TestMethod, ExpectedException(typeof(ArgumentOutOfRangeException))] public void TestGetExtensionTIndexOutOfRange() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder(); builder.GetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0); } [TestMethod] public void TestSetExtensionTIndex() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0) .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 1) .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 2); for (int i = 0; i < 3; i++) Assert.AreEqual(i, builder.GetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, i)); builder.SetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0, 5); builder.SetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 1, 6); builder.SetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 2, 7); for (int i = 0; i < 3; i++) Assert.AreEqual(5 + i, builder.GetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, i)); } [TestMethod, ExpectedException(typeof(ArgumentOutOfRangeException))] public void TestSetExtensionTIndexOutOfRange() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder(); builder.SetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0, -1); } [TestMethod] public void TestClearExtensionTList() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0); Assert.AreEqual(1, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); builder.ClearExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite); Assert.AreEqual(0, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); } [TestMethod] public void TestClearExtensionTValue() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .SetExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite, 0); Assert.IsTrue(builder.HasExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); builder.ClearExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite); Assert.IsFalse(builder.HasExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); } [TestMethod] public void TestIndexedByDescriptor() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder(); Assert.IsFalse(builder.HasExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); builder[UnitTestLiteProtoFile.OptionalInt32ExtensionLite.Descriptor] = 123; Assert.IsTrue(builder.HasExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); Assert.AreEqual(123, builder.GetExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); } [TestMethod] public void TestIndexedByDescriptorAndOrdinal() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0); Assert.AreEqual(1, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); IFieldDescriptorLite f = UnitTestLiteProtoFile.RepeatedInt32ExtensionLite.Descriptor; builder[f, 0] = 123; Assert.AreEqual(1, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); Assert.AreEqual(123, builder.GetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0)); } [TestMethod, ExpectedException(typeof(ArgumentOutOfRangeException))] public void TestIndexedByDescriptorAndOrdinalOutOfRange() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder(); Assert.AreEqual(0, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); IFieldDescriptorLite f = UnitTestLiteProtoFile.RepeatedInt32ExtensionLite.Descriptor; builder[f, 0] = 123; } [TestMethod] public void TestClearFieldByDescriptor() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0); Assert.AreEqual(1, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); IFieldDescriptorLite f = UnitTestLiteProtoFile.RepeatedInt32ExtensionLite.Descriptor; builder.ClearField(f); Assert.AreEqual(0, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); } [TestMethod] public void TestAddRepeatedFieldByDescriptor() { TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder() .AddExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 0); Assert.AreEqual(1, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); IFieldDescriptorLite f = UnitTestLiteProtoFile.RepeatedInt32ExtensionLite.Descriptor; builder.AddRepeatedField(f, 123); Assert.AreEqual(2, builder.GetExtensionCount(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite)); Assert.AreEqual(123, builder.GetExtension(UnitTestLiteProtoFile.RepeatedInt32ExtensionLite, 1)); } [TestMethod] public void TestMissingExtensionsLite() { const int optionalInt32 = 12345678; TestAllExtensionsLite.Builder builder = TestAllExtensionsLite.CreateBuilder(); builder.SetExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite, optionalInt32); builder.AddExtension(UnitTestLiteProtoFile.RepeatedDoubleExtensionLite, 1.1); builder.AddExtension(UnitTestLiteProtoFile.RepeatedDoubleExtensionLite, 1.2); builder.AddExtension(UnitTestLiteProtoFile.RepeatedDoubleExtensionLite, 1.3); TestAllExtensionsLite msg = builder.Build(); Assert.IsTrue(msg.HasExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); Assert.AreEqual(3, msg.GetExtensionCount(UnitTestLiteProtoFile.RepeatedDoubleExtensionLite)); byte[] bits = msg.ToByteArray(); TestAllExtensionsLite copy = TestAllExtensionsLite.ParseFrom(bits); Assert.IsFalse(copy.HasExtension(UnitTestLiteProtoFile.OptionalInt32ExtensionLite)); Assert.AreEqual(0, copy.GetExtensionCount(UnitTestLiteProtoFile.RepeatedDoubleExtensionLite)); Assert.AreNotEqual(msg, copy); //The lite runtime removes all unknown fields and extensions byte[] copybits = copy.ToByteArray(); Assert.AreEqual(0, copybits.Length); } [TestMethod] public void TestMissingFieldsLite() { TestAllTypesLite msg = TestAllTypesLite.CreateBuilder() .SetOptionalInt32(123) .SetOptionalString("123") .Build(); byte[] bits = msg.ToByteArray(); TestAllExtensionsLite copy = TestAllExtensionsLite.ParseFrom(bits); Assert.AreNotEqual(msg, copy); //The lite runtime removes all unknown fields and extensions byte[] copybits = copy.ToByteArray(); Assert.AreEqual(0, copybits.Length); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.Network { using Azure; using Management; using Rest; using Rest.Azure; using Rest.Serialization; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// Composite Swagger for Network Client /// </summary> public partial class NetworkManagementClient : ServiceClient<NetworkManagementClient>, INetworkManagementClient, IAzureClient { /// <summary> /// The base URI of the service. /// </summary> public System.Uri BaseUri { get; set; } /// <summary> /// Gets or sets json serialization settings. /// </summary> public JsonSerializerSettings SerializationSettings { get; private set; } /// <summary> /// Gets or sets json deserialization settings. /// </summary> public JsonSerializerSettings DeserializationSettings { get; private set; } /// <summary> /// Credentials needed for the client to connect to Azure. /// </summary> public ServiceClientCredentials Credentials { get; private set; } /// <summary> /// The subscription credentials which uniquely identify the Microsoft Azure /// subscription. The subscription ID forms part of the URI for every service /// call. /// </summary> public string SubscriptionId { get; set; } /// <summary> /// Gets or sets the preferred language for the response. /// </summary> public string AcceptLanguage { get; set; } /// <summary> /// Gets or sets the retry timeout in seconds for Long Running Operations. /// Default value is 30. /// </summary> public int? LongRunningOperationRetryTimeout { get; set; } /// <summary> /// When set to true a unique x-ms-client-request-id value is generated and /// included in each request. Default is true. /// </summary> public bool? GenerateClientRequestId { get; set; } /// <summary> /// Gets the IApplicationGatewaysOperations. /// </summary> public virtual IApplicationGatewaysOperations ApplicationGateways { get; private set; } /// <summary> /// Gets the IExpressRouteCircuitAuthorizationsOperations. /// </summary> public virtual IExpressRouteCircuitAuthorizationsOperations ExpressRouteCircuitAuthorizations { get; private set; } /// <summary> /// Gets the IExpressRouteCircuitPeeringsOperations. /// </summary> public virtual IExpressRouteCircuitPeeringsOperations ExpressRouteCircuitPeerings { get; private set; } /// <summary> /// Gets the IExpressRouteCircuitsOperations. /// </summary> public virtual IExpressRouteCircuitsOperations ExpressRouteCircuits { get; private set; } /// <summary> /// Gets the IExpressRouteServiceProvidersOperations. /// </summary> public virtual IExpressRouteServiceProvidersOperations ExpressRouteServiceProviders { get; private set; } /// <summary> /// Gets the ILoadBalancersOperations. /// </summary> public virtual ILoadBalancersOperations LoadBalancers { get; private set; } /// <summary> /// Gets the INetworkInterfacesOperations. /// </summary> public virtual INetworkInterfacesOperations NetworkInterfaces { get; private set; } /// <summary> /// Gets the INetworkSecurityGroupsOperations. /// </summary> public virtual INetworkSecurityGroupsOperations NetworkSecurityGroups { get; private set; } /// <summary> /// Gets the ISecurityRulesOperations. /// </summary> public virtual ISecurityRulesOperations SecurityRules { get; private set; } /// <summary> /// Gets the INetworkWatchersOperations. /// </summary> public virtual INetworkWatchersOperations NetworkWatchers { get; private set; } /// <summary> /// Gets the IPacketCapturesOperations. /// </summary> public virtual IPacketCapturesOperations PacketCaptures { get; private set; } /// <summary> /// Gets the IPublicIPAddressesOperations. /// </summary> public virtual IPublicIPAddressesOperations PublicIPAddresses { get; private set; } /// <summary> /// Gets the IRouteFiltersOperations. /// </summary> public virtual IRouteFiltersOperations RouteFilters { get; private set; } /// <summary> /// Gets the IRouteFilterRulesOperations. /// </summary> public virtual IRouteFilterRulesOperations RouteFilterRules { get; private set; } /// <summary> /// Gets the IRouteTablesOperations. /// </summary> public virtual IRouteTablesOperations RouteTables { get; private set; } /// <summary> /// Gets the IRoutesOperations. /// </summary> public virtual IRoutesOperations Routes { get; private set; } /// <summary> /// Gets the IBgpServiceCommunitiesOperations. /// </summary> public virtual IBgpServiceCommunitiesOperations BgpServiceCommunities { get; private set; } /// <summary> /// Gets the IUsagesOperations. /// </summary> public virtual IUsagesOperations Usages { get; private set; } /// <summary> /// Gets the IVirtualNetworksOperations. /// </summary> public virtual IVirtualNetworksOperations VirtualNetworks { get; private set; } /// <summary> /// Gets the ISubnetsOperations. /// </summary> public virtual ISubnetsOperations Subnets { get; private set; } /// <summary> /// Gets the IVirtualNetworkPeeringsOperations. /// </summary> public virtual IVirtualNetworkPeeringsOperations VirtualNetworkPeerings { get; private set; } /// <summary> /// Gets the IVirtualNetworkGatewaysOperations. /// </summary> public virtual IVirtualNetworkGatewaysOperations VirtualNetworkGateways { get; private set; } /// <summary> /// Gets the IVirtualNetworkGatewayConnectionsOperations. /// </summary> public virtual IVirtualNetworkGatewayConnectionsOperations VirtualNetworkGatewayConnections { get; private set; } /// <summary> /// Gets the ILocalNetworkGatewaysOperations. /// </summary> public virtual ILocalNetworkGatewaysOperations LocalNetworkGateways { get; private set; } /// <summary> /// Initializes a new instance of the NetworkManagementClient class. /// </summary> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected NetworkManagementClient(params System.Net.Http.DelegatingHandler[] handlers) : base(handlers) { Initialize(); } /// <summary> /// Initializes a new instance of the NetworkManagementClient class. /// </summary> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> protected NetworkManagementClient(System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : base(rootHandler, handlers) { Initialize(); } /// <summary> /// Initializes a new instance of the NetworkManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected NetworkManagementClient(System.Uri baseUri, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the NetworkManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> protected NetworkManagementClient(System.Uri baseUri, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } BaseUri = baseUri; } /// <summary> /// Initializes a new instance of the NetworkManagementClient class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public NetworkManagementClient(ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the NetworkManagementClient class. /// </summary> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public NetworkManagementClient(ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (credentials == null) { throw new System.ArgumentNullException("credentials"); } Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the NetworkManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public NetworkManagementClient(System.Uri baseUri, ServiceClientCredentials credentials, params System.Net.Http.DelegatingHandler[] handlers) : this(handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } BaseUri = baseUri; Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// Initializes a new instance of the NetworkManagementClient class. /// </summary> /// <param name='baseUri'> /// Optional. The base URI of the service. /// </param> /// <param name='credentials'> /// Required. Credentials needed for the client to connect to Azure. /// </param> /// <param name='rootHandler'> /// Optional. The http client handler used to handle http transport. /// </param> /// <param name='handlers'> /// Optional. The delegating handlers to add to the http client pipeline. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> public NetworkManagementClient(System.Uri baseUri, ServiceClientCredentials credentials, System.Net.Http.HttpClientHandler rootHandler, params System.Net.Http.DelegatingHandler[] handlers) : this(rootHandler, handlers) { if (baseUri == null) { throw new System.ArgumentNullException("baseUri"); } if (credentials == null) { throw new System.ArgumentNullException("credentials"); } BaseUri = baseUri; Credentials = credentials; if (Credentials != null) { Credentials.InitializeServiceClient(this); } } /// <summary> /// An optional partial-method to perform custom initialization. /// </summary> partial void CustomInitialize(); /// <summary> /// Initializes client properties. /// </summary> private void Initialize() { ApplicationGateways = new ApplicationGatewaysOperations(this); ExpressRouteCircuitAuthorizations = new ExpressRouteCircuitAuthorizationsOperations(this); ExpressRouteCircuitPeerings = new ExpressRouteCircuitPeeringsOperations(this); ExpressRouteCircuits = new ExpressRouteCircuitsOperations(this); ExpressRouteServiceProviders = new ExpressRouteServiceProvidersOperations(this); LoadBalancers = new LoadBalancersOperations(this); NetworkInterfaces = new NetworkInterfacesOperations(this); NetworkSecurityGroups = new NetworkSecurityGroupsOperations(this); SecurityRules = new SecurityRulesOperations(this); NetworkWatchers = new NetworkWatchersOperations(this); PacketCaptures = new PacketCapturesOperations(this); PublicIPAddresses = new PublicIPAddressesOperations(this); RouteFilters = new RouteFiltersOperations(this); RouteFilterRules = new RouteFilterRulesOperations(this); RouteTables = new RouteTablesOperations(this); Routes = new RoutesOperations(this); BgpServiceCommunities = new BgpServiceCommunitiesOperations(this); Usages = new UsagesOperations(this); VirtualNetworks = new VirtualNetworksOperations(this); Subnets = new SubnetsOperations(this); VirtualNetworkPeerings = new VirtualNetworkPeeringsOperations(this); VirtualNetworkGateways = new VirtualNetworkGatewaysOperations(this); VirtualNetworkGatewayConnections = new VirtualNetworkGatewayConnectionsOperations(this); LocalNetworkGateways = new LocalNetworkGatewaysOperations(this); BaseUri = new System.Uri("https://management.azure.com"); AcceptLanguage = "en-US"; LongRunningOperationRetryTimeout = 30; GenerateClientRequestId = true; SerializationSettings = new JsonSerializerSettings { Formatting = Formatting.Indented, DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; SerializationSettings.Converters.Add(new TransformationJsonConverter()); DeserializationSettings = new JsonSerializerSettings { DateFormatHandling = DateFormatHandling.IsoDateFormat, DateTimeZoneHandling = DateTimeZoneHandling.Utc, NullValueHandling = NullValueHandling.Ignore, ReferenceLoopHandling = ReferenceLoopHandling.Serialize, ContractResolver = new ReadOnlyJsonContractResolver(), Converters = new List<JsonConverter> { new Iso8601TimeSpanConverter() } }; CustomInitialize(); DeserializationSettings.Converters.Add(new TransformationJsonConverter()); DeserializationSettings.Converters.Add(new CloudErrorJsonConverter()); } /// <summary> /// Checks whether a domain name in the cloudapp.net zone is available for use. /// </summary> /// <param name='location'> /// The location of the domain name. /// </param> /// <param name='domainNameLabel'> /// The domain name to be verified. It must conform to the following regular /// expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<DnsNameAvailabilityResult>> CheckDnsNameAvailabilityWithHttpMessagesAsync(string location, string domainNameLabel = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (location == null) { throw new ValidationException(ValidationRules.CannotBeNull, "location"); } if (SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.SubscriptionId"); } string apiVersion = "2016-12-01"; // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("location", location); tracingParameters.Add("domainNameLabel", domainNameLabel); tracingParameters.Add("apiVersion", apiVersion); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "CheckDnsNameAvailability", tracingParameters); } // Construct URL var _baseUrl = BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/CheckDnsNameAvailability").ToString(); _url = _url.Replace("{location}", System.Uri.EscapeDataString(location)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(SubscriptionId)); List<string> _queryParameters = new List<string>(); if (domainNameLabel != null) { _queryParameters.Add(string.Format("domainNameLabel={0}", System.Uri.EscapeDataString(domainNameLabel))); } if (apiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new System.Net.Http.HttpRequestMessage(); System.Net.Http.HttpResponseMessage _httpResponse = null; _httpRequest.Method = new System.Net.Http.HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (GenerateClientRequestId != null && GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<DnsNameAvailabilityResult>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = SafeJsonConvert.DeserializeObject<DnsNameAvailabilityResult>(_responseContent, DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
using System; using System.Collections.Generic; using System.Diagnostics; using hw.Helper; using JetBrains.Annotations; // ReSharper disable CheckNamespace namespace hw.DebugFormatter; [Dump("Dump")] [DebuggerDisplay("{" + nameof(DebuggerDumpString) + "}")] public class Dumpable { sealed class MethodDumpTraceItem { internal int FrameCount { get; } internal bool Trace { get; } public MethodDumpTraceItem(int frameCount, bool trace) { FrameCount = frameCount; Trace = trace; } } [PublicAPI] public static bool? IsMethodDumpTraceInhibited; static readonly Stack<MethodDumpTraceItem> MethodDumpTraceSwitches = new(); /// <summary> /// Gets a value indicating whether this instance is in dump. /// </summary> /// <value> /// <c>true</c> /// if this instance is in dump; otherwise, /// <c>false</c> /// . /// </value> /// created 23.09.2006 17:39 [DisableDump] [PublicAPI] [DebuggerBrowsable(DebuggerBrowsableState.Never)] public bool IsInDump { get; set; } /// <summary> /// generate dump string to be shown in debug windows /// </summary> /// <returns> </returns> [PublicAPI] public virtual string DebuggerDump() => Tracer.Dump(this); [PublicAPI] public virtual string DumpData() { string result; try { result = Tracer.DumpData(this); } catch(Exception) { result = "<not implemented>"; } return result; } protected virtual string Dump(bool isRecursion) { var surround = "<recursion>"; if(!isRecursion) surround = DumpData().Surround("{", "}"); return GetType().PrettyName() + surround; } /// <summary> /// dump string to be shown in debug windows /// </summary> [DisableDump] [PublicAPI] [DebuggerBrowsable(DebuggerBrowsableState.Never)] public string DebuggerDumpString => DebuggerDump().Replace("\n", "\r\n"); [DisableDump] [PublicAPI] // ReSharper disable once InconsistentNaming public string d => DebuggerDumpString; static bool IsMethodDumpTraceActive { get { if(IsMethodDumpTraceInhibited == null) return Debugger.IsAttached && MethodDumpTraceSwitches.Peek().Trace; return !IsMethodDumpTraceInhibited.Value; } } /// <summary> /// Method dump with break, /// </summary> /// <param name="p"> </param> /// <returns> </returns> [DebuggerHidden] [IsLoggingFunction] [PublicAPI] public static void NotImplementedFunction(params object[] p) { var os = Tracer.DumpMethodWithData("not implemented", null, p, 1); os.Log(); Tracer.TraceBreak(); } /// <summary> /// Method start dump, /// </summary> /// <param name="name"> </param> /// <param name="value"> </param> /// <returns> </returns> [DebuggerHidden] [IsLoggingFunction] [PublicAPI] public static void Dump(string name, object value) { if(IsMethodDumpTraceActive) { var os = Tracer.DumpData("", new[] { name, value }, 1); os.Log(); } } /// <summary> /// Method start dump, /// </summary> /// <param name="name"> </param> /// <param name="getValue"> </param> /// <returns> </returns> [DebuggerHidden] [IsLoggingFunction] [PublicAPI] public static void Dump(string name, Func<object> getValue) { if(IsMethodDumpTraceActive) { var os = Tracer.DumpData("", new[] { name, getValue() }, 1); os.Log(); } } // ReSharper disable once InconsistentNaming [PublicAPI] [IsLoggingFunction] public void t() => DebuggerDumpString.Log(); public string Dump() { var oldIsInDump = IsInDump; IsInDump = true; try { return Dump(oldIsInDump); } finally { IsInDump = oldIsInDump; } } [PublicAPI] public void Dispose() { } /// <summary> /// Method dump, /// </summary> /// <param name="rv"> </param> /// <param name="breakExecution"> </param> /// <returns> </returns> [DebuggerHidden] [IsLoggingFunction] protected static T ReturnMethodDump<T>(T rv, bool breakExecution = true) { if(IsMethodDumpTraceActive) { Tracer.IndentEnd(); (Tracer.MethodHeader(stackFrameDepth: 1) + "[returns] " + Tracer.Dump(rv)).Log(); if(breakExecution) Tracer.TraceBreak(); } return rv; } /// <summary> /// Method dump, /// </summary> [DebuggerHidden] [PublicAPI] [IsLoggingFunction] protected static void ReturnVoidMethodDump(bool breakExecution = true) { if(IsMethodDumpTraceActive) { Tracer.IndentEnd(); (Tracer.MethodHeader(stackFrameDepth: 1) + "[returns]").Log(); if(breakExecution) Tracer.TraceBreak(); } } /// <summary> /// Method dump, /// </summary> [DebuggerHidden] protected static void EndMethodDump() { if(!Debugger.IsAttached) return; CheckDumpLevel(1); MethodDumpTraceSwitches.Pop(); } /// <summary> /// Method dump with break, /// </summary> /// <param name="text"> </param> /// <param name="p"> </param> /// <returns> </returns> [DebuggerHidden] [PublicAPI] [IsLoggingFunction] protected static void DumpDataWithBreak(string text, params object[] p) { var os = Tracer.DumpData(text, p, 1); os.Log(); Tracer.TraceBreak(); } /// <summary> /// Method start dump, /// </summary> /// <param name="trace"> </param> /// <param name="p"> </param> /// <returns> </returns> [DebuggerHidden] [IsLoggingFunction] protected void StartMethodDump(bool trace, params object[] p) { StartMethodDump(1, trace); if(!IsMethodDumpTraceActive) return; var os = Tracer.DumpMethodWithData("", this, p, 1); os.Log(); Tracer.IndentStart(); } [DebuggerHidden] [PublicAPI] protected void BreakExecution() { if(IsMethodDumpTraceActive) Tracer.TraceBreak(); } /// <summary> /// Method dump with break, /// </summary> /// <param name="text"> </param> /// <param name="p"> </param> /// <returns> </returns> [DebuggerHidden] [PublicAPI] [IsLoggingFunction] protected void DumpMethodWithBreak(string text, params object[] p) { var os = Tracer.DumpMethodWithData(text, this, p, 1); os.Log(); Tracer.TraceBreak(); } /// <summary> /// Method dump with break, /// </summary> /// <param name="p"> </param> /// <returns> </returns> [DebuggerHidden] [IsLoggingFunction] protected void NotImplementedMethod(params object[] p) { if(IsInDump) throw new NotImplementedException(); var os = Tracer.DumpMethodWithData("not implemented", this, p, 1); os.Log(); Tracer.TraceBreak(); } static void CheckDumpLevel(int depth) { if(!Debugger.IsAttached) return; var top = MethodDumpTraceSwitches.Peek(); if(top.Trace) (top.FrameCount == Tracer.CurrentFrameCount(depth + 1)).Assert(); } static void StartMethodDump(int depth, bool trace) { if(!Debugger.IsAttached) return; var frameCount = trace? Tracer.CurrentFrameCount(depth + 1) : 0; MethodDumpTraceSwitches.Push(new(frameCount, trace)); } public static TValue[] T<TValue>(params TValue[] value) => value; }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Generic; using System.Globalization; using System.Runtime.Tests.Common; using Xunit; public static class Int32Tests { [Fact] public static void TestCtorEmpty() { int i = new int(); Assert.Equal(0, i); } [Fact] public static void TestCtorValue() { int i = 41; Assert.Equal(41, i); } [Fact] public static void TestMaxValue() { Assert.Equal(0x7FFFFFFF, int.MaxValue); } [Fact] public static void TestMinValue() { Assert.Equal(unchecked((int)0x80000000), int.MinValue); } [Theory] [InlineData(234, 0)] [InlineData(int.MinValue, 1)] [InlineData(-123, 1)] [InlineData(0, 1)] [InlineData(45, 1)] [InlineData(123, 1)] [InlineData(456, -1)] [InlineData(int.MaxValue, -1)] public static void TestCompareTo(int value, int expected) { int i = 234; int result = CompareHelper.NormalizeCompare(i.CompareTo(value)); Assert.Equal(expected, result); } [Theory] [InlineData(null, 1)] [InlineData(234, 0)] [InlineData(int.MinValue, 1)] [InlineData(-123, 1)] [InlineData(0, 1)] [InlineData(45, 1)] [InlineData(123, 1)] [InlineData(456, -1)] [InlineData(int.MaxValue, -1)] public static void TestCompareToObject(object obj, int expected) { IComparable comparable = 234; int i = CompareHelper.NormalizeCompare(comparable.CompareTo(obj)); Assert.Equal(expected, i); } [Fact] public static void TestCompareToObjectInvalid() { IComparable comparable = 234; Assert.Throws<ArgumentException>(null, () => comparable.CompareTo("a")); //Obj is not a int } [Theory] [InlineData(789, true)] [InlineData(-789, false)] [InlineData(0, false)] public static void TestEqualsObject(object obj, bool expected) { int i = 789; Assert.Equal(expected, i.Equals(obj)); } [Theory] [InlineData(789, true)] [InlineData(-789, false)] [InlineData(0, false)] public static void TestEquals(int i2, bool expected) { int i = 789; Assert.Equal(expected, i.Equals(i2)); } [Fact] public static void TestGetHashCode() { int i1 = 123; int i2 = 654; Assert.NotEqual(0, i1.GetHashCode()); Assert.NotEqual(i1.GetHashCode(), i2.GetHashCode()); } [Fact] public static void TestToString() { int i1 = 6310; Assert.Equal("6310", i1.ToString()); int i2 = -8249; Assert.Equal("-8249", i2.ToString()); } [Fact] public static void TestToStringFormatProvider() { var numberFormat = new NumberFormatInfo(); int i1 = 6310; Assert.Equal("6310", i1.ToString(numberFormat)); int i2 = -8249; Assert.Equal("-8249", i2.ToString(numberFormat)); int i3 = -2468; // Changing the negative pattern doesn't do anything without also passing in a format string numberFormat.NumberNegativePattern = 0; Assert.Equal("-2468", i3.ToString(numberFormat)); } [Fact] public static void TestToStringFormat() { int i1 = 6310; Assert.Equal("6310", i1.ToString("G")); int i2 = -8249; Assert.Equal("-8249", i2.ToString("g")); int i3 = -2468; Assert.Equal(string.Format("{0:N}", -2468.00), i3.ToString("N")); int i4 = 0x248; Assert.Equal("248", i4.ToString("x")); } [Fact] public static void TestToStringFormatFormatProvider() { var numberFormat = new NumberFormatInfo(); int i1 = 6310; Assert.Equal("6310", i1.ToString("G", numberFormat)); int i2 = -8249; Assert.Equal("-8249", i2.ToString("g", numberFormat)); numberFormat.NegativeSign = "xx"; // setting it to trash to make sure it doesn't show up numberFormat.NumberGroupSeparator = "*"; numberFormat.NumberNegativePattern = 0; int i3 = -2468; Assert.Equal("(2*468.00)", i3.ToString("N", numberFormat)); } public static IEnumerable<object[]> ParseValidData() { NumberFormatInfo defaultFormat = null; NumberStyles defaultStyle = NumberStyles.Integer; var emptyNfi = new NumberFormatInfo(); var testNfi = new NumberFormatInfo(); testNfi.CurrencySymbol = "$"; yield return new object[] { "-2147483648", defaultStyle, defaultFormat, -2147483648 }; yield return new object[] { "0", defaultStyle, defaultFormat, 0 }; yield return new object[] { "123", defaultStyle, defaultFormat, 123 }; yield return new object[] { " 123 ", defaultStyle, defaultFormat, 123 }; yield return new object[] { "2147483647", defaultStyle, defaultFormat, 2147483647 }; yield return new object[] { "123", NumberStyles.HexNumber, defaultFormat, 0x123 }; yield return new object[] { "abc", NumberStyles.HexNumber, defaultFormat, 0xabc }; yield return new object[] { "1000", NumberStyles.AllowThousands, defaultFormat, 1000 }; yield return new object[] { "(123)", NumberStyles.AllowParentheses, defaultFormat, -123 }; // Parentheses = negative yield return new object[] { "123", defaultStyle, emptyNfi, 123 }; yield return new object[] { "123", NumberStyles.Any, emptyNfi, 123 }; yield return new object[] { "12", NumberStyles.HexNumber, emptyNfi, 0x12 }; yield return new object[] { "$1,000", NumberStyles.Currency, testNfi, 1000 }; } public static IEnumerable<object[]> ParseInvalidData() { NumberFormatInfo defaultFormat = null; NumberStyles defaultStyle = NumberStyles.Integer; var emptyNfi = new NumberFormatInfo(); var testNfi = new NumberFormatInfo(); testNfi.CurrencySymbol = "$"; testNfi.NumberDecimalSeparator = "."; yield return new object[] { null, defaultStyle, defaultFormat, typeof(ArgumentNullException) }; yield return new object[] { "", defaultStyle, defaultFormat, typeof(FormatException) }; yield return new object[] { " ", defaultStyle, defaultFormat, typeof(FormatException) }; yield return new object[] { "Garbage", defaultStyle, defaultFormat, typeof(FormatException) }; yield return new object[] { "abc", defaultStyle, defaultFormat, typeof(FormatException) }; // Hex value yield return new object[] { "1E23", defaultStyle, defaultFormat, typeof(FormatException) }; // Exponent yield return new object[] { "(123)", defaultStyle, defaultFormat, typeof(FormatException) }; // Parentheses yield return new object[] { 1000.ToString("C0"), defaultStyle, defaultFormat, typeof(FormatException) }; //Currency yield return new object[] { 1000.ToString("N0"), defaultStyle, defaultFormat, typeof(FormatException) }; //Thousands yield return new object[] { 678.90.ToString("F2"), defaultStyle, defaultFormat, typeof(FormatException) }; //Decimal yield return new object[] { "abc", NumberStyles.None, defaultFormat, typeof(FormatException) }; // Negative hex value yield return new object[] { " 123 ", NumberStyles.None, defaultFormat, typeof(FormatException) }; // Trailing and leading whitespace yield return new object[] { "67.90", defaultStyle, testNfi, typeof(FormatException) }; // Decimal yield return new object[] { "-2147483649", defaultStyle, defaultFormat, typeof(OverflowException) }; // > max value yield return new object[] { "2147483648", defaultStyle, defaultFormat, typeof(OverflowException) }; // < min value } [Theory, MemberData("ParseValidData")] public static void TestParse(string value, NumberStyles style, NumberFormatInfo nfi, int expected) { int i; //If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.Equal(true, int.TryParse(value, out i)); Assert.Equal(expected, i); Assert.Equal(expected, int.Parse(value)); //If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (nfi != null) { Assert.Equal(expected, int.Parse(value, nfi)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.Equal(true, int.TryParse(value, style, nfi ?? new NumberFormatInfo(), out i)); Assert.Equal(expected, i); //If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (nfi == null) { Assert.Equal(expected, int.Parse(value, style)); } Assert.Equal(expected, int.Parse(value, style, nfi ?? new NumberFormatInfo())); } [Theory, MemberData("ParseInvalidData")] public static void TestParseInvalid(string value, NumberStyles style, NumberFormatInfo nfi, Type exceptionType) { int i; //If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.Equal(false, int.TryParse(value, out i)); Assert.Equal(default(int), i); Assert.Throws(exceptionType, () => int.Parse(value)); //If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (nfi != null) { Assert.Throws(exceptionType, () => int.Parse(value, nfi)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.Equal(false, int.TryParse(value, style, nfi ?? new NumberFormatInfo(), out i)); Assert.Equal(default(int), i); //If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (nfi == null) { Assert.Throws(exceptionType, () => int.Parse(value, style)); } Assert.Throws(exceptionType, () => int.Parse(value, style, nfi ?? new NumberFormatInfo())); } }
using EdiEngine.Common.Enums; using EdiEngine.Common.Definitions; using EdiEngine.Standards.X12_004010.Segments; namespace EdiEngine.Standards.X12_004010.Maps { public class M_404 : MapLoop { public M_404() : base(null) { Content.AddRange(new MapBaseEntity[] { new ZC1() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new BX() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new BNX() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new M3() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new N9() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 30 }, new CM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new M1() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new DTM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 }, new L_N7(this) { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 500 }, new NA() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 }, new F9() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new D9() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_N1(this) { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 10 }, new L_S1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 12 }, new R2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 13 }, new R9() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new L_E1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new H3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 }, new PS() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 }, new L_LX(this) { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 25 }, new L_T1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 64 }, new L3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new LS() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new L_LH1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 100 }, new LE() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 }, new LH2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 6 }, new LHR() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new LH6() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 }, new XH() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new X7() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 }, }); } //1000 public class L_N7 : MapLoop { public L_N7(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new N7() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new EM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new L_VC(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 21 }, new M7() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 }, new N5() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new IC() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new IM() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new M12() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new L_E1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new GA() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 15 }, new L_REF(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 99 }, }); } } //1100 public class L_VC : MapLoop { public L_VC(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new VC() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L_N1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, }); } } //1110 public class L_N1 : MapLoop { public L_N1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new N1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new H3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //1200 public class L_E1 : MapLoop { public L_E1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new E1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new E4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new E5() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 13 }, new PI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //1300 public class L_REF : MapLoop { public L_REF(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new REF() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new N10() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 15 }, new L_N1_1(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 }, }); } } //1310 public class L_N1_1 : MapLoop { public L_N1_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new N1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //2000 public class L_N1_2 : MapLoop { public L_N1_2(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new N1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new REF() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new BL() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 12 }, }); } } //3000 public class L_S1 : MapLoop { public L_S1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new S1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new S2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 2 }, new S9() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new N1() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new N2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new N3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new N4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //4000 public class L_E1_1 : MapLoop { public L_E1_1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new E1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new E4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new E5() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 13 }, new PI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, }); } } //5000 public class L_LX : MapLoop { public L_LX(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new LX() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new L5() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 15 }, new L_L0(this) { ReqDes = RequirementDesignator.Optional, MaxOccurs = 25 }, new X1() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 6 }, }); } } //5100 public class L_L0 : MapLoop { public L_L0(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new L0() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new MEA() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 3 }, new L1() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 }, new PI() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 30 }, }); } } //6000 public class L_T1 : MapLoop { public L_T1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new T1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new T2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 30 }, new T3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 12 }, new T6() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new T8() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 99 }, }); } } //7000 public class L_LH1 : MapLoop { public L_LH1(MapLoop parentLoop) : base(parentLoop) { Content.AddRange(new MapBaseEntity[] { new LH1() { ReqDes = RequirementDesignator.Mandatory, MaxOccurs = 1 }, new LH2() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 4 }, new LH3() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 10 }, new LFH() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 20 }, new LEP() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 3 }, new LH4() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 1 }, new LHT() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 3 }, new LHR() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 }, new PER() { ReqDes = RequirementDesignator.Optional, MaxOccurs = 5 }, }); } } } }
// <copyright file="PersistedQueueBase{T}.cs" company="Adrian Mos"> // Copyright (c) Adrian Mos with all rights reserved. Part of the IX Framework. // </copyright> using System.Collections; using System.Diagnostics.CodeAnalysis; using System.Runtime.Serialization; using IX.StandardExtensions; using IX.StandardExtensions.Contracts; using IX.StandardExtensions.Threading; using IX.System.Collections.Generic; using IX.System.IO; using JetBrains.Annotations; namespace IX.Guaranteed.Collections; /// <summary> /// A base class for persisted queues. /// </summary> /// <typeparam name="T">The type of object in the queue.</typeparam> /// <seealso cref="StandardExtensions.ComponentModel.DisposableBase" /> /// <seealso cref="System.Collections.Generic.IQueue{T}" /> [PublicAPI] [SuppressMessage( "Design", "CA1010:Generic interface should also be implemented", Justification = "This is not necessary.")] public abstract class PersistedQueueBase<T> : ReaderWriterSynchronizedBase, IQueue<T> { #region Internal state private readonly IDirectory directoryShim; private readonly IFile fileShim; private readonly IPath pathShim; /// <summary> /// The poisoned non-removable files list. /// </summary> private readonly List<string> poisonedUnremovableFiles; private readonly DataContractSerializer serializer; #endregion #region Constructors and destructors /// <summary> /// Initializes a new instance of the <see cref="PersistedQueueBase{T}" /> class. /// </summary> /// <param name="persistenceFolderPath"> /// The persistence folder path. /// </param> /// <param name="fileShim"> /// The file shim. /// </param> /// <param name="directoryShim"> /// The directory shim. /// </param> /// <param name="pathShim"> /// The path shim. /// </param> /// <param name="serializer"> /// The serializer. /// </param> /// <param name="timeout"> /// The timeout. /// </param> /// <exception cref="ArgumentNullException"> /// <paramref name="persistenceFolderPath" /> /// or /// <paramref name="fileShim" /> /// or /// <paramref name="directoryShim" /> /// or /// <paramref name="pathShim" /> /// or /// <paramref name="serializer" /> /// is <see langword="null" /> (<see langword="Nothing" /> in Visual Basic). /// </exception> /// <exception cref="ArgumentInvalidPathException"> /// The folder at <paramref name="persistenceFolderPath" /> does not exist, or is not accessible. /// </exception> protected PersistedQueueBase( string persistenceFolderPath, IFile fileShim, IDirectory directoryShim, IPath pathShim, DataContractSerializer serializer, TimeSpan timeout) : base(timeout) { // Dependency validation Requires.NotNull( out this.fileShim, fileShim); Requires.NotNull( out this.pathShim, pathShim); Requires.NotNull( out this.directoryShim, directoryShim); Requires.NotNull( out this.serializer, serializer); // Parameter validation directoryShim.RequiresExists(persistenceFolderPath); // Internal state this.poisonedUnremovableFiles = new List<string>(); // Persistence folder paths var dataFolderPath = pathShim.Combine( persistenceFolderPath, "Data"); this.DataFolderPath = dataFolderPath; var poisonFolderPath = pathShim.Combine( persistenceFolderPath, "Poison"); this.PoisonFolderPath = poisonFolderPath; // Initialize folder paths if (!directoryShim.Exists(dataFolderPath)) { directoryShim.CreateDirectory(dataFolderPath); } if (!directoryShim.Exists(poisonFolderPath)) { directoryShim.CreateDirectory(poisonFolderPath); } } #endregion #region Properties and indexers /// <summary> /// Gets the number of elements contained in the <see cref="PersistedQueueBase{T}" />. /// </summary> /// <value>The count.</value> public abstract int Count { get; } /// <summary> /// Gets a value indicating whether this queue is empty. /// </summary> /// <value> /// <c>true</c> if this queue is empty; otherwise, <c>false</c>. /// </value> public bool IsEmpty => this.Count == 0; /// <summary> /// Gets a value indicating whether access to the <see cref="PersistedQueueBase{T}" /> is synchronized (thread safe). /// </summary> /// <value>The is synchronized.</value> bool ICollection.IsSynchronized => true; /// <summary> /// Gets an object that can be used to synchronize access to the <see cref="PersistedQueueBase{T}" />. /// </summary> /// <value>The synchronize root.</value> object ICollection.SyncRoot { get; } = new(); /// <summary> /// Gets the data folder path. /// </summary> /// <value>The data folder path.</value> protected string DataFolderPath { get; } /// <summary> /// Gets the folder shim. /// </summary> /// <value>The folder shim.</value> protected IDirectory DirectoryShim => this.directoryShim; /// <summary> /// Gets the file shim. /// </summary> /// <value>The file shim.</value> protected IFile FileShim => this.fileShim; /// <summary> /// Gets the path shim. /// </summary> /// <value>The path shim.</value> protected IPath PathShim => this.pathShim; /// <summary> /// Gets the poison folder path. /// </summary> /// <value>The poison folder path.</value> protected string PoisonFolderPath { get; } /// <summary> /// Gets the serializer. /// </summary> /// <value>The serializer.</value> protected DataContractSerializer Serializer => this.serializer; #endregion #region Methods #region Interface implementations /// <summary> /// Copies the elements of the <see cref="PersistedQueueBase{T}" /> to an <see cref="Array" />, starting at a /// particular <see cref="Array" /> index. /// </summary> /// <param name="array"> /// The one-dimensional <see cref="Array" /> that is the destination of the elements copied /// from <see cref="PersistedQueueBase{T}" />. The <see cref="Array" /> must have zero-based indexing. /// </param> /// <param name="index">The zero-based index in <paramref name="array" /> at which copying begins.</param> public abstract void CopyTo( Array array, int index); /// <summary> /// Returns an enumerator that iterates through the queue. /// </summary> /// <returns>An enumerator that can be used to iterate through the queue.</returns> public abstract IEnumerator<T> GetEnumerator(); /// <summary> /// Clears the queue of all elements. /// </summary> public abstract void Clear(); /// <summary> /// Verifies whether or not an item is contained in the queue. /// </summary> /// <param name="item">The item to verify.</param> /// <returns><see langword="true" /> if the item is queued, <see langword="false" /> otherwise.</returns> public abstract bool Contains(T item); /// <summary> /// De-queues an item and removes it from the queue. /// </summary> /// <returns>The item that has been de-queued.</returns> public abstract T Dequeue(); /// <summary> /// Queues an item, adding it to the queue. /// </summary> /// <param name="item">The item to enqueue.</param> public abstract void Enqueue(T item); /// <summary> /// Queues a range of elements, adding them to the queue. /// </summary> /// <param name="items">The item range to push.</param> public void EnqueueRange(T[] items) { Requires.NotNull( items); foreach (T item in items) { this.Enqueue(item); } } /// <summary> /// Queues a range of elements, adding them to the queue. /// </summary> /// <param name="items">The item range to enqueue.</param> /// <param name="startIndex">The start index.</param> /// <param name="count">The number of items to enqueue.</param> public void EnqueueRange( T[] items, int startIndex, int count) { Requires.NotNull( items); Requires.ValidArrayRange( in startIndex, in count, items); var itemsRange = new ReadOnlySpan<T>( items, startIndex, count); foreach (T item in itemsRange) { this.Enqueue(item); } } /// <summary> /// Peeks at the topmost element in the queue, without removing it. /// </summary> /// <returns>The item peeked at, if any.</returns> public abstract T Peek(); /// <summary> /// Copies all elements of the queue into a new array. /// </summary> /// <returns>The created array with all element of the queue.</returns> public abstract T[] ToArray(); /// <summary> /// Trims the excess free space from within the queue, reducing the capacity to the actual number of elements. /// </summary> public virtual void TrimExcess() { } /// <summary> /// Attempts to de-queue an item and to remove it from queue. /// </summary> /// <param name="item">The item that has been de-queued, default if unsuccessful.</param> /// <returns> /// <see langword="true" /> if an item is de-queued successfully, <see langword="false" /> otherwise, or if the /// queue is empty. /// </returns> public abstract bool TryDequeue(out T item); /// <summary> /// Attempts to peek at the current queue and return the item that is next in line to be dequeued. /// </summary> /// <param name="item">The item, or default if unsuccessful.</param> /// <returns> /// <see langword="true" /> if an item is found, <see langword="false" /> otherwise, or if the queue is empty. /// </returns> public abstract bool TryPeek(out T item); /// <summary> /// Returns an enumerator that iterates through the queue. /// </summary> /// <returns>An <see cref="T:System.Collections.IEnumerator" /> object that can be used to iterate through the queue.</returns> [ExcludeFromCodeCoverage] IEnumerator IEnumerable.GetEnumerator() => this.GetEnumerator(); #endregion /// <summary> /// Loads the topmost item from the folder, ensuring its deletion afterwards. /// </summary> /// <returns>An item, if one exists and can be loaded, a default value otherwise.</returns> /// <exception cref="InvalidOperationException">There are no more valid items in the folder.</exception> protected T LoadTopmostItem() { this.RequiresNotDisposed(); using (this.WriteLock()) { string[] files = this.GetPossibleDataFiles(); var i = 0; string possibleFilePath; T obj; while (true) { if (i >= files.Length) { throw new InvalidOperationException(); } possibleFilePath = files[i]; try { using Stream stream = this.FileShim.OpenRead(possibleFilePath); obj = (T)(this.Serializer.ReadObject(stream) ?? throw new SerializationException()); break; } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); i++; } } try { this.FileShim.Delete(possibleFilePath); } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); } return obj; } } /// <summary> /// Tries the load topmost item and execute an action on it, deleting the topmost object data if the operation is /// successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <returns><see langword="true" /> if de-queuing and executing is successful, <see langword="false" /> otherwise.</returns> protected bool TryLoadTopmostItemWithAction<TState>( Action<TState, T> actionToInvoke, TState state) { Requires.NotNull( actionToInvoke); this.RequiresNotDisposed(); using ReadWriteSynchronizationLocker locker = this.ReadWriteLock(); string[] files = this.GetPossibleDataFiles(); var i = 0; T obj; string possibleFilePath; while (true) { if (i >= files.Length) { return false; } possibleFilePath = files[i]; try { using Stream stream = this.FileShim.OpenRead(possibleFilePath); obj = (T)(this.Serializer.ReadObject(stream) ?? throw new SerializationException()); break; } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); i++; } } try { actionToInvoke( state, obj); } catch (Exception) { return false; } locker.Upgrade(); try { this.FileShim.Delete(possibleFilePath); } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); } return true; } /// <summary> /// Asynchronously tries the load topmost item and execute an action on it, deleting the topmost object data if the /// operation is successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <param name="cancellationToken">The cancellation token for this operation.</param> /// <returns><see langword="true" /> if de-queuing and executing is successful, <see langword="false" /> otherwise.</returns> [SuppressMessage( "Performance", "HAA0603:Delegate allocation from a method group", Justification = "Acceptable - we're doing a lot of allocation in the Task method anyway.")] protected async Task<bool> TryLoadTopmostItemWithActionAsync<TState>( Func<TState, T, Task> actionToInvoke, TState state, CancellationToken cancellationToken = default) { // TODO BREAKING: In next breaking-changes version, switch this to a ValueTask-returning method return await this.TryLoadTopmostItemWithActionAsync( InvokeActionLocal, state, cancellationToken); async ValueTask InvokeActionLocal( TState stateInternal, T obj) { await actionToInvoke( stateInternal, obj); } } /// <summary> /// Asynchronously tries the load topmost item and execute an action on it, deleting the topmost object data if the /// operation is successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <param name="cancellationToken">The cancellation token for this operation.</param> /// <returns><see langword="true" /> if de-queuing and executing is successful, <see langword="false" /> otherwise.</returns> protected async ValueTask<bool> TryLoadTopmostItemWithActionAsync<TState>( Func<TState, T, ValueTask> actionToInvoke, TState state, CancellationToken cancellationToken = default) { Requires.NotNull(actionToInvoke); this.RequiresNotDisposed(); using ReadWriteSynchronizationLocker locker = this.ReadWriteLock(); string[] files = this.GetPossibleDataFiles(); var i = 0; T obj; string possibleFilePath; while (true) { cancellationToken.ThrowIfCancellationRequested(); if (i >= files.Length) { return false; } possibleFilePath = files[i]; try { #if FRAMEWORK_ADVANCED await using Stream stream = this.FileShim.OpenRead(possibleFilePath); #else using Stream stream = this.FileShim.OpenRead(possibleFilePath); #endif obj = (T)(this.Serializer.ReadObject(stream) ?? throw new SerializationException()); break; } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); i++; } } try { await actionToInvoke( state, obj) .ConfigureAwait(false); } catch (Exception) { return false; } locker.Upgrade(); try { await this.FileShim.DeleteAsync( possibleFilePath, cancellationToken); } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); } return true; } /// <summary> /// Tries to load the topmost item and execute an action on it, deleting the topmost object data if the operation is /// successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="predicate">The predicate.</param> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <returns>The number of items that have been de-queued.</returns> /// <remarks> /// <para> /// Warning! This method has the potential of overrunning its read/write lock timeouts. Please ensure that the /// <paramref name="predicate" /> method /// filters out items in a way that limits the amount of data passing through. /// </para> /// </remarks> protected int TryLoadWhilePredicateWithAction<TState>( Func<TState, T, bool> predicate, Action<TState, IEnumerable<T>> actionToInvoke, TState state) { Requires.NotNull( predicate); Requires.NotNull( actionToInvoke); this.RequiresNotDisposed(); using ReadWriteSynchronizationLocker locker = this.ReadWriteLock(); string[] files = this.GetPossibleDataFiles(); var i = 0; var accumulatedObjects = new List<T>(); var accumulatedPaths = new List<string>(); while (i < files.Length) { var possibleFilePath = files[i]; try { T obj; using (Stream stream = this.FileShim.OpenRead(possibleFilePath)) { obj = (T)(this.Serializer.ReadObject(stream) ?? throw new SerializationException()); } if (!predicate( state, obj)) { break; } accumulatedObjects.Add(obj); accumulatedPaths.Add(possibleFilePath); i++; } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); i++; } } if (accumulatedObjects.Count <= 0) { return accumulatedPaths.Count; } try { actionToInvoke( state, accumulatedObjects); } catch (Exception) { return 0; } locker.Upgrade(); foreach (var possibleFilePath in accumulatedPaths) { try { this.FileShim.Delete(possibleFilePath); } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); } } return accumulatedPaths.Count; } /// <summary> /// Asynchronously tries to load the topmost item and execute an action on it, deleting the topmost object data if the /// operation is successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="predicate">The predicate.</param> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <param name="cancellationToken">The cancellation token for this operation.</param> /// <returns>The number of items that have been de-queued.</returns> /// <remarks> /// <para> /// Warning! This method has the potential of overrunning its read/write lock timeouts. Please ensure that the /// <paramref name="predicate" /> method /// filters out items in a way that limits the amount of data passing through. /// </para> /// </remarks> [SuppressMessage( "Performance", "HAA0603:Delegate allocation from a method group", Justification = "Acceptable - we're doing a lot of allocation in the Task method anyway.")] protected async Task<int> TryLoadWhilePredicateWithActionAsync<TState>( Func<TState, T, Task<bool>> predicate, Action<TState, IEnumerable<T>> actionToInvoke, TState state, CancellationToken cancellationToken = default) { // TODO BREAKING: In next breaking-changes version, switch this to a ValueTask-returning method return await this.TryLoadWhilePredicateWithActionAsync( InvokePredicateLocal, actionToInvoke, state, cancellationToken); async ValueTask<bool> InvokePredicateLocal( TState stateInternal, T obj) { return await predicate( stateInternal, obj); } } /// <summary> /// Asynchronously tries to load the topmost item and execute an action on it, deleting the topmost object data if the /// operation is successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="predicate">The predicate.</param> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <param name="cancellationToken">The cancellation token for this operation.</param> /// <returns>The number of items that have been de-queued.</returns> /// <remarks> /// <para> /// Warning! This method has the potential of overrunning its read/write lock timeouts. Please ensure that the /// <paramref name="predicate" /> method /// filters out items in a way that limits the amount of data passing through. /// </para> /// </remarks> protected async ValueTask<int> TryLoadWhilePredicateWithActionAsync<TState>( Func<TState, T, ValueTask<bool>> predicate, Action<TState, IEnumerable<T>> actionToInvoke, TState state, CancellationToken cancellationToken = default) { Requires.NotNull(predicate); Requires.NotNull(actionToInvoke); this.RequiresNotDisposed(); using ReadWriteSynchronizationLocker locker = this.ReadWriteLock(); string[] files = this.GetPossibleDataFiles(); var i = 0; var accumulatedObjects = new List<T>(); var accumulatedPaths = new List<string>(); while (i < files.Length) { if (cancellationToken.IsCancellationRequested) { break; } var possibleFilePath = files[i]; try { T obj; #if FRAMEWORK_ADVANCED await using (Stream stream = this.FileShim.OpenRead(possibleFilePath)) #else using (Stream stream = this.FileShim.OpenRead(possibleFilePath)) #endif { obj = (T)(this.Serializer.ReadObject(stream) ?? throw new SerializationException()); } if (!await predicate( state, obj) .ConfigureAwait(false)) { break; } accumulatedObjects.Add(obj); accumulatedPaths.Add(possibleFilePath); i++; } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); i++; } } if (accumulatedObjects.Count <= 0) { cancellationToken.ThrowIfCancellationRequested(); return accumulatedPaths.Count; } try { actionToInvoke( state, accumulatedObjects); } catch (Exception) { cancellationToken.ThrowIfCancellationRequested(); return 0; } locker.Upgrade(); foreach (var possibleFilePath in accumulatedPaths) { try { await this.FileShim.DeleteAsync( possibleFilePath, cancellationToken); } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); } } cancellationToken.ThrowIfCancellationRequested(); return accumulatedPaths.Count; } /// <summary> /// Asynchronously tries to load the topmost item and execute an action on it, deleting the topmost object data if the /// operation is successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="predicate">The predicate.</param> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <param name="cancellationToken">The cancellation token for this operation.</param> /// <returns>The number of items that have been de-queued.</returns> /// <remarks> /// <para> /// Warning! This method has the potential of overrunning its read/write lock timeouts. Please ensure that the /// <paramref name="predicate" /> method /// filters out items in a way that limits the amount of data passing through. /// </para> /// </remarks> [SuppressMessage( "Performance", "HAA0603:Delegate allocation from a method group", Justification = "Acceptable - we're doing a lot of allocation in the Task method anyway.")] protected async Task<int> TryLoadWhilePredicateWithActionAsync<TState>( Func<TState, T, bool> predicate, Func<TState, IEnumerable<T>, Task> actionToInvoke, TState state, CancellationToken cancellationToken = default) { // TODO BREAKING: In next breaking-changes version, switch this to a ValueTask-returning method return await this.TryLoadWhilePredicateWithActionAsync( predicate, InvokeActionLocal, state, cancellationToken); async ValueTask InvokeActionLocal( TState stateInternal, IEnumerable<T> obj) { await actionToInvoke( stateInternal, obj); } } /// <summary> /// Asynchronously tries to load the topmost item and execute an action on it, deleting the topmost object data if the /// operation is successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="predicate">The predicate.</param> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <param name="cancellationToken">The cancellation token for this operation.</param> /// <returns>The number of items that have been de-queued.</returns> /// <remarks> /// <para> /// Warning! This method has the potential of overrunning its read/write lock timeouts. Please ensure that the /// <paramref name="predicate" /> method /// filters out items in a way that limits the amount of data passing through. /// </para> /// </remarks> protected async ValueTask<int> TryLoadWhilePredicateWithActionAsync<TState>( Func<TState, T, bool> predicate, Func<TState, IEnumerable<T>, ValueTask> actionToInvoke, TState state, CancellationToken cancellationToken = default) { Requires.NotNull(predicate); Requires.NotNull(actionToInvoke); this.RequiresNotDisposed(); using ReadWriteSynchronizationLocker locker = this.ReadWriteLock(); string[] files = this.GetPossibleDataFiles(); var i = 0; var accumulatedObjects = new List<T>(); var accumulatedPaths = new List<string>(); while (i < files.Length) { if (cancellationToken.IsCancellationRequested) { break; } var possibleFilePath = files[i]; try { T obj; #if FRAMEWORK_ADVANCED await using (Stream stream = this.FileShim.OpenRead(possibleFilePath)) #else using (Stream stream = this.FileShim.OpenRead(possibleFilePath)) #endif { obj = (T)(this.Serializer.ReadObject(stream) ?? throw new SerializationException()); } if (!predicate( state, obj)) { break; } accumulatedObjects.Add(obj); accumulatedPaths.Add(possibleFilePath); i++; } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); i++; } } if (accumulatedObjects.Count <= 0) { cancellationToken.ThrowIfCancellationRequested(); return accumulatedPaths.Count; } try { await actionToInvoke( state, accumulatedObjects) .ConfigureAwait(false); } catch (Exception) { cancellationToken.ThrowIfCancellationRequested(); return 0; } locker.Upgrade(); foreach (var possibleFilePath in accumulatedPaths) { try { await this.FileShim.DeleteAsync( possibleFilePath, cancellationToken); } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); } } cancellationToken.ThrowIfCancellationRequested(); return accumulatedPaths.Count; } /// <summary> /// Asynchronously tries to load the topmost item and execute an action on it, deleting the topmost object data if the /// operation is successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="predicate">The predicate.</param> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <param name="cancellationToken">The cancellation token for this operation.</param> /// <returns>The number of items that have been de-queued.</returns> /// <remarks> /// <para> /// Warning! This method has the potential of overrunning its read/write lock timeouts. Please ensure that the /// <paramref name="predicate" /> method /// filters out items in a way that limits the amount of data passing through. /// </para> /// </remarks> [SuppressMessage( "Performance", "HAA0603:Delegate allocation from a method group", Justification = "Acceptable - we're doing a lot of allocation in the Task method anyway.")] protected async Task<int> TryLoadWhilePredicateWithActionAsync<TState>( Func<TState, T, Task<bool>> predicate, Func<TState, IEnumerable<T>, Task> actionToInvoke, TState state, CancellationToken cancellationToken = default) { // TODO BREAKING: In next breaking-changes version, switch this to a ValueTask-returning method return await this.TryLoadWhilePredicateWithActionAsync( InvokePredicateLocal, InvokeActionLocal, state, cancellationToken); async ValueTask<bool> InvokePredicateLocal( TState stateInternal, T obj) { return await predicate( stateInternal, obj); } async ValueTask InvokeActionLocal( TState stateInternal, IEnumerable<T> obj) { await actionToInvoke( stateInternal, obj); } } /// <summary> /// Asynchronously tries to load the topmost item and execute an action on it, deleting the topmost object data if the /// operation is successful. /// </summary> /// <typeparam name="TState">The type of the state object to send to the action.</typeparam> /// <param name="predicate">The predicate.</param> /// <param name="actionToInvoke">The action to invoke.</param> /// <param name="state">The state object to pass to the invoked action.</param> /// <param name="cancellationToken">The cancellation token for this operation.</param> /// <returns>The number of items that have been de-queued.</returns> /// <remarks> /// <para> /// Warning! This method has the potential of overrunning its read/write lock timeouts. Please ensure that the /// <paramref name="predicate" /> method /// filters out items in a way that limits the amount of data passing through. /// </para> /// </remarks> protected async ValueTask<int> TryLoadWhilePredicateWithActionAsync<TState>( Func<TState, T, ValueTask<bool>> predicate, Func<TState, IEnumerable<T>, ValueTask> actionToInvoke, TState state, CancellationToken cancellationToken = default) { Requires.NotNull(predicate); Requires.NotNull(actionToInvoke); this.RequiresNotDisposed(); using ReadWriteSynchronizationLocker locker = this.ReadWriteLock(); string[] files = this.GetPossibleDataFiles(); var i = 0; var accumulatedObjects = new List<T>(); var accumulatedPaths = new List<string>(); while (i < files.Length) { if (cancellationToken.IsCancellationRequested) { break; } var possibleFilePath = files[i]; try { T obj; #if FRAMEWORK_ADVANCED await using (Stream stream = this.FileShim.OpenRead(possibleFilePath)) #else using (Stream stream = this.FileShim.OpenRead(possibleFilePath)) #endif { obj = (T)(this.Serializer.ReadObject(stream) ?? throw new SerializationException()); } if (!await predicate( state, obj) .ConfigureAwait(false)) { break; } accumulatedObjects.Add(obj); accumulatedPaths.Add(possibleFilePath); i++; } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); i++; } } if (accumulatedObjects.Count <= 0) { cancellationToken.ThrowIfCancellationRequested(); return accumulatedPaths.Count; } try { await actionToInvoke( state, accumulatedObjects) .ConfigureAwait(false); } catch (Exception) { cancellationToken.ThrowIfCancellationRequested(); return 0; } locker.Upgrade(); foreach (var possibleFilePath in accumulatedPaths) { try { await this.FileShim.DeleteAsync( possibleFilePath, cancellationToken); } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); } } cancellationToken.ThrowIfCancellationRequested(); return accumulatedPaths.Count; } /// <summary> /// Peeks at the topmost item in the folder. /// </summary> /// <returns>An item, if one exists and can be loaded, or an exception otherwise.</returns> /// <exception cref="InvalidOperationException">There are no more valid items in the folder.</exception> protected T PeekTopmostItem() { if (!this.TryPeekTopmostItem(out T item)) { throw new InvalidOperationException(); } return item; } /// <summary> /// Peeks at the topmost item in the folder. /// </summary> /// <param name="item">The item.</param> /// <returns> /// <see langword="true" /> if an item is found, <see langword="false" /> otherwise, or if the queue is empty. /// </returns> protected bool TryPeekTopmostItem(out T item) { this.RequiresNotDisposed(); using (this.ReadLock()) { string[] files = this.GetPossibleDataFiles(); var i = 0; while (true) { if (i >= files.Length) { item = default!; return false; } var possibleFilePath = files[i]; try { using Stream stream = this.FileShim.OpenRead(possibleFilePath); item = (T)(this.Serializer.ReadObject(stream) ?? throw new SerializationException()); return true; } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); i++; } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); i++; } } } } /// <summary> /// Loads the items from the folder. /// </summary> /// <returns>An item, if one exists and can be loaded, a default value otherwise.</returns> /// <remarks> /// <para>Warning! Not synchronized.</para> /// <para> /// This method is not synchronized between threads. Please ensure that you only use this method in a guaranteed /// one-time-access manner (such as a constructor). /// </para> /// </remarks> /// <exception cref="InvalidOperationException">There are no more valid items in the folder.</exception> protected IEnumerable<Tuple<T, string>> LoadValidItemObjectHandles() { foreach (var possibleFilePath in this.GetPossibleDataFiles()) { T obj; try { using Stream stream = this.FileShim.OpenRead(possibleFilePath); obj = (T)(this.Serializer.ReadObject(stream) ?? throw new SerializationException()); } catch (IOException) { this.HandleFileLoadProblem(possibleFilePath); continue; } catch (UnauthorizedAccessException) { this.HandleFileLoadProblem(possibleFilePath); continue; } catch (SerializationException) { this.HandleFileLoadProblem(possibleFilePath); continue; } yield return new Tuple<T, string>( obj, possibleFilePath); } } /// <summary> /// Saves the new item to the disk. /// </summary> /// <param name="item">The item to save.</param> /// <returns>The path of the newly-saved file.</returns> /// <exception cref="InvalidOperationException"> /// We have reached the maximum number of items saved in the same femtosecond. /// This is theoretically not possible. /// </exception> [SuppressMessage( "Performance", "HAA0601:Value type to reference type conversion causing boxing allocation", Justification = "This is unavoidable, considering how the method works.")] protected string SaveNewItem(T item) { this.RequiresNotDisposed(); using (this.WriteLock()) { var i = 1; string filePath; DateTime now = DateTime.UtcNow; do { filePath = this.PathShim.Combine( this.DataFolderPath, $"{now:yyyy.MM.dd.HH.mm.ss.fffffff}.{i}.dat"); i++; if (i == int.MaxValue) { throw new InvalidOperationException(); } } while (this.FileShim.Exists(filePath)); using (Stream stream = this.FileShim.Create(filePath)) { this.Serializer.WriteObject( stream, item!); } return filePath; } } /// <summary> /// Clears the data. /// </summary> protected void ClearData() { this.RequiresNotDisposed(); using (this.WriteLock()) { foreach (var possibleFilePath in this.DirectoryShim.EnumerateFiles( this.DataFolderPath, "*.dat") .ToArray()) { this.HandleImpossibleMoveToPoison(possibleFilePath); } } this.FixUnmovableReferences(); } /// <summary> /// Gets the possible data files. /// </summary> /// <returns>An array of data file names.</returns> protected string[] GetPossibleDataFiles() => this.DirectoryShim.EnumerateFiles( this.DataFolderPath, "*.dat") .Except(this.poisonedUnremovableFiles) .ToArray(); /// <summary> /// Handles the file load problem. /// </summary> /// <param name="possibleFilePath">The possible file path.</param> private void HandleFileLoadProblem(string possibleFilePath) { var newFilePath = this.PathShim.Combine( this.PoisonFolderPath, this.PathShim.GetFileName(possibleFilePath)); // Seemingly-redundant catch code below will be replaced at a later time with an opt-in-based logging solution // and a more try/finally general approach // If an item by the same name exists in the poison queue, delete it try { if (this.FileShim.Exists(newFilePath)) { this.FileShim.Delete(newFilePath); } } catch (IOException) { this.HandleImpossibleMoveToPoison(possibleFilePath); return; } catch (UnauthorizedAccessException) { this.HandleImpossibleMoveToPoison(possibleFilePath); return; } try { // Move to poison queue this.FileShim.Move( possibleFilePath, newFilePath); } catch (IOException) { this.HandleImpossibleMoveToPoison(possibleFilePath); } catch (UnauthorizedAccessException) { this.HandleImpossibleMoveToPoison(possibleFilePath); } } /// <summary> /// Handles the situation where it is impossible to move a file to poison. /// </summary> /// <param name="possibleFilePath">The possible file path.</param> private void HandleImpossibleMoveToPoison(string possibleFilePath) { try { // If deletion was not possible, delete the offending item this.FileShim.Delete(possibleFilePath); } catch (IOException) { this.poisonedUnremovableFiles.Add(possibleFilePath); } catch (UnauthorizedAccessException) { this.poisonedUnremovableFiles.Add(possibleFilePath); } } /// <summary> /// Fixes the unmovable references. /// </summary> [SuppressMessage( "StyleCop.CSharp.LayoutRules", "SA1501:Statement should not be on a single line", Justification = "It's fine.")] private void FixUnmovableReferences() { foreach (var file in this.poisonedUnremovableFiles.ToArray()) { try { if (!this.FileShim.Exists(file)) { this.poisonedUnremovableFiles.Remove(file); } } catch (IOException) { } catch (UnauthorizedAccessException) { } } } #endregion }
using System; using System.Collections.Generic; using System.Text; using FlatRedBall; using FlatRedBall.Input; using FlatRedBall.AI.Pathfinding; using FlatRedBall.Graphics.Animation; using FlatRedBall.Graphics.Particle; using FlatRedBall.Math.Geometry; using FlatRedBall.Math.Splines; using Cursor = FlatRedBall.Gui.Cursor; using GuiManager = FlatRedBall.Gui.GuiManager; using FlatRedBall.Localization; using FlatRedBall.Instructions; #if FRB_XNA || SILVERLIGHT using Keys = Microsoft.Xna.Framework.Input.Keys; using Vector3 = Microsoft.Xna.Framework.Vector3; using Texture2D = Microsoft.Xna.Framework.Graphics.Texture2D; using GlueTestProject.Entities; using FlatRedBall.ManagedSpriteGroups; #endif namespace GlueTestProject.Screens { public partial class FlatRedBallTypeScreen { #region Fields bool mHasCheckedX = false; bool mHasCheckedPosition = false; bool mHasCheckedTextInterpolation = false; SpriteFrame layeredSpriteFrameInstantiatedInCode; SpriteFrame unlayeredSpriteFrameInstantiatedInCode; Sprite managedInvisiblTestSprite1; #endregion void CustomInitialize() { if (!SpriteManager.OrderedSprites.Contains(this.SpriteObject)) { throw new Exception("The Sprite object is not being added to the managers but it should be."); } if (ShapeCollectionFile.AxisAlignedRectangles.Contains(this.InvisibleRectangle) == false) { throw new Exception("The ShapeCollection does not contain the rectangle - possibly because it's being improperly cloned"); } if (this.InvisibleRectangle.Visible) { throw new Exception("Rectangles that come from files that have their Visible set to false are still visible"); } this.SceneInstanceSetFromFileAtRuntime = SceneFile; SpriteWithInstructions.Set("X").To(4.0f).After(.1f); SpriteWithInstructions.Set("Position").To(Vector3.One).After(.25); TestingTextInterpolationInstance.CurrentTextValuesState = InterpolationEntity.TextValues.Transparent; TestingTextInterpolationInstance.InterpolateToState(InterpolationEntity.TextValues.Opaque, 1); this.DynamicallyAssignedSceneSourceFile = SceneOption1; if(this.DynamicallyAssignedSceneSourceFile != SceneOption1) { throw new Exception("Setting the source file does not do anything"); } // The CameraModifyingEntity sets the Z. This should persist if (SpriteManager.Camera.Z != CameraModifyingEntity.CameraZToSet) { throw new Exception("The CameraModifyingEntity should modify the Camera's Z, but it's not!"); } ManuallyUpdatedEntityInstance.ConvertToManuallyUpdated(); // let's make sure that this thing actually has a collision function: bool didCollide = CollisionEntityInstance.CollideAgainst(CollisionEntityInstance2); // And that it worked if (!didCollide) { throw new Exception("ICollidable entities aren't properly detecting collisions"); } CollisionEntityInstance.CollideAgainstMove(CollisionEntityInstance2, 1, 0); CollisionEntityInstance.CollideAgainstBounce(CollisionEntityInstance2, 1, 0, 1); if (this.LayerInstance.Sprites.Contains(SpriteFrameInheritingEntityInstanceLayered.CenterSprite) == false) { throw new Exception("SpriteFrame-inheriting entities do not get put on Layers properly"); } if (this.LayerInstance.Sprites.Contains(this.SpriteInheritingEntityInstanceLayered) == false) { throw new Exception("SpriteFrame-inheriting entities do not get put on Layers properly"); } if (this.LayerInstance.Texts.Contains(TextInheritingEntityInstanceLayered) == false) { throw new Exception("SpriteFrame-inheriting entities do not get put on Layers properly"); } SpriteFrameInheritingEntityInstanceLayered.MoveToLayer(LayerInstance2); SpriteInheritingEntityInstanceLayered.MoveToLayer(LayerInstance2); TextInheritingEntityInstanceLayered.MoveToLayer(LayerInstance2); // Make sure that entities with objects that are not instantiated an be moved to layers: NoInstantiationForMoveToLayer.MoveToLayer(LayerInstance2); if (this.LayerInstance2.Sprites.Contains(SpriteFrameInheritingEntityInstanceLayered.CenterSprite) == false) { throw new Exception("SpriteFrame-inheriting entities aren't moved to other layers properly"); } if (this.LayerInstance2.Sprites.Contains(this.SpriteInheritingEntityInstanceLayered) == false) { throw new Exception("SpriteFrame-inheriting entities aren't moved to other layers properly"); } if (this.LayerInstance2.Texts.Contains(TextInheritingEntityInstanceLayered) == false) { throw new Exception("SpriteFrame-inheriting entities aren't moved to other layers properly"); } SetToInvisibleTestTextVisibility.Visible = false; if (SetToInvisibleTestTextVisibility.TextInstance.Visible) { throw new Exception("Setting an Entity that inherits from a FRB type to Invisible should set its IsContainer NOS to invisible too"); } SpriteManager.AddToLayer(UnlayeredSpriteFrameNotAllSidesMoveToLayer, this.LayerInstance); // Now let's test SpriteFrames that we instantiate and add to layer right away layeredSpriteFrameInstantiatedInCode = new SpriteFrame(Aura, SpriteFrame.BorderSides.Left); SpriteManager.AddToLayer(layeredSpriteFrameInstantiatedInCode, LayerInstance); unlayeredSpriteFrameInstantiatedInCode = new SpriteFrame(Aura, SpriteFrame.BorderSides.Left); SpriteManager.AddToLayer(unlayeredSpriteFrameInstantiatedInCode, null); // Make sure AARects have the "all" reposition direction initially: var rectangle = new AxisAlignedRectangle(); if(rectangle.RepositionDirections != RepositionDirections.All) { throw new Exception("AARects should have the All reposition direction, but they don't."); } managedInvisiblTestSprite1 = new Sprite(); // January 25, 2015 // A FRB user found a // crash bug occurring // if adding a Sprite as // a ManagedInvisibleSprite, // then later adding it to a Layer. // I was able to reproduce it in the // test project. Before the bug was fixed // the engine would crash internall. SpriteManager.AddManagedInvisibleSprite(managedInvisiblTestSprite1); SpriteManager.AddToLayer(managedInvisiblTestSprite1, this.LayerInstance); // The IDrawableBatchEntity depends on draw calls happening, and we // want to make sure that they don't get skipped on platforms like iOS // where performance may be lower than PC. We're going to force draw calls // to happen after every activity. FlatRedBallServices.Game.IsFixedTimeStep = false; } void CustomActivity(bool firstTimeCalled) { // We need this screen to survive a while to make sure the emitter is emitting properly //if (!firstTimeCalled) //{ // IsActivityFinished = true; //} if(!firstTimeCalled) { if (!mHasCheckedX && this.PauseAdjustedSecondsSince(0) > .21f) { if (SpriteWithInstructions.X != 4.0f) { throw new Exception("Property instructions are not working"); } mHasCheckedX = true; } if (!mHasCheckedPosition && this.PauseAdjustedSecondsSince(0) > .51f) { if (SpriteWithInstructions.Position != Vector3.One) { throw new Exception("Field instructions are not working"); } mHasCheckedPosition = true; } if (!mHasCheckedTextInterpolation && this.PauseAdjustedSecondsSince(0) > .5f) { if (TestingTextInterpolationInstance.TextInstanceX == 0) { throw new Exception("Text position interpolation over time doesn't work"); } if (TestingTextInterpolationInstance.TextInstanceAlpha == 0) { throw new Exception("Text alpha interpolation over time doesn't work"); } mHasCheckedTextInterpolation = true; } const float secondsToLast = .66f; if (this.PauseAdjustedSecondsSince(0) > secondsToLast && this.IDrawableBatchEntityInstance.HasFinishedTests) { IsActivityFinished = true; } } } void CustomDestroy() { SpriteManager.RemoveSpriteFrame(layeredSpriteFrameInstantiatedInCode); SpriteManager.RemoveSpriteFrame(unlayeredSpriteFrameInstantiatedInCode); SpriteManager.RemoveSprite(managedInvisiblTestSprite1); } static void CustomLoadStaticContent(string contentManagerName) { } } }
//--------------------------------------------------------------------- // <copyright file="UriWriter.cs" company="Microsoft"> // Copyright (C) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information. // </copyright> //--------------------------------------------------------------------- namespace Microsoft.OData.Client { #region Namespaces using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics; using System.Globalization; using System.Linq; using System.Linq.Expressions; using System.Reflection; using System.Text; using Microsoft.OData.Client.Metadata; #endregion Namespaces /// <summary> /// Translates resource bound expression trees into URIs. /// </summary> internal class UriWriter : DataServiceALinqExpressionVisitor { /// <summary>Data context used to generate type names for types.</summary> private readonly DataServiceContext context; /// <summary>stringbuilder for constructed URI.</summary> private readonly StringBuilder uriBuilder = new StringBuilder(); /// <summary>The dictionary to store the alias.</summary> private readonly Dictionary<string, string> alias = new Dictionary<string, string>(StringComparer.Ordinal); /// <summary>the request data service version for the uri.</summary> private Version uriVersion; /// <summary> /// For caching query options to be grouped /// </summary> private Dictionary<string, List<string>> cachedQueryOptions = new Dictionary<string, List<string>>(StringComparer.Ordinal); /// <summary> /// Private constructor for creating UriWriter /// </summary> /// <param name='context'>Data context used to generate type names for types.</param> private UriWriter(DataServiceContext context) { Debug.Assert(context != null, "context != null"); this.context = context; this.uriVersion = Util.ODataVersion4; } /// <summary> /// Translates resource bound expression tree to a URI. /// </summary> /// <param name='context'>Data context used to generate type names for types.</param> /// <param name="addTrailingParens">flag to indicate whether generated URI should include () if leaf is ResourceSet</param> /// <param name="e">The expression to translate</param> /// <param name="uri">uri</param> /// <param name="version">version for query</param> internal static void Translate(DataServiceContext context, bool addTrailingParens, Expression e, out Uri uri, out Version version) { var writer = new UriWriter(context); writer.Visit(e); string fullUri = writer.uriBuilder.ToString(); if (writer.alias.Any()) { if (fullUri.IndexOf(UriHelper.QUESTIONMARK) > -1) { fullUri += UriHelper.AMPERSAND; } else { fullUri += UriHelper.QUESTIONMARK; } foreach (var kv in writer.alias) { fullUri += kv.Key; fullUri += UriHelper.EQUALSSIGN; fullUri += kv.Value; fullUri += UriHelper.AMPERSAND; } fullUri = fullUri.Substring(0, fullUri.Length - 1); } uri = UriUtil.CreateUri(fullUri, UriKind.Absolute); version = writer.uriVersion; } /// <summary> /// MethodCallExpression visit method /// </summary> /// <param name="m">The MethodCallExpression expression to visit</param> /// <returns>The visited MethodCallExpression expression </returns> internal override Expression VisitMethodCall(MethodCallExpression m) { throw Error.MethodNotSupported(m); } /// <summary> /// UnaryExpression visit method /// </summary> /// <param name="u">The UnaryExpression expression to visit</param> /// <returns>The visited UnaryExpression expression </returns> internal override Expression VisitUnary(UnaryExpression u) { throw new NotSupportedException(Strings.ALinq_UnaryNotSupported(u.NodeType.ToString())); } /// <summary> /// BinaryExpression visit method /// </summary> /// <param name="b">The BinaryExpression expression to visit</param> /// <returns>The visited BinaryExpression expression </returns> internal override Expression VisitBinary(BinaryExpression b) { throw new NotSupportedException(Strings.ALinq_BinaryNotSupported(b.NodeType.ToString())); } /// <summary> /// ConstantExpression visit method /// </summary> /// <param name="c">The ConstantExpression expression to visit</param> /// <returns>The visited ConstantExpression expression </returns> internal override Expression VisitConstant(ConstantExpression c) { throw new NotSupportedException(Strings.ALinq_ConstantNotSupported(c.Value)); } /// <summary> /// TypeBinaryExpression visit method /// </summary> /// <param name="b">The TypeBinaryExpression expression to visit</param> /// <returns>The visited TypeBinaryExpression expression </returns> internal override Expression VisitTypeIs(TypeBinaryExpression b) { throw new NotSupportedException(Strings.ALinq_TypeBinaryNotSupported); } /// <summary> /// ConditionalExpression visit method /// </summary> /// <param name="c">The ConditionalExpression expression to visit</param> /// <returns>The visited ConditionalExpression expression </returns> internal override Expression VisitConditional(ConditionalExpression c) { throw new NotSupportedException(Strings.ALinq_ConditionalNotSupported); } /// <summary> /// ParameterExpression visit method /// </summary> /// <param name="p">The ParameterExpression expression to visit</param> /// <returns>The visited ParameterExpression expression </returns> internal override Expression VisitParameter(ParameterExpression p) { throw new NotSupportedException(Strings.ALinq_ParameterNotSupported); } /// <summary> /// MemberExpression visit method /// </summary> /// <param name="m">The MemberExpression expression to visit</param> /// <returns>The visited MemberExpression expression </returns> internal override Expression VisitMemberAccess(MemberExpression m) { throw new NotSupportedException(Strings.ALinq_MemberAccessNotSupported(m.Member.Name)); } /// <summary> /// LambdaExpression visit method /// </summary> /// <param name="lambda">The LambdaExpression to visit</param> /// <returns>The visited LambdaExpression</returns> internal override Expression VisitLambda(LambdaExpression lambda) { throw new NotSupportedException(Strings.ALinq_LambdaNotSupported); } /// <summary> /// NewExpression visit method /// </summary> /// <param name="nex">The NewExpression to visit</param> /// <returns>The visited NewExpression</returns> internal override NewExpression VisitNew(NewExpression nex) { throw new NotSupportedException(Strings.ALinq_NewNotSupported); } /// <summary> /// MemberInitExpression visit method /// </summary> /// <param name="init">The MemberInitExpression to visit</param> /// <returns>The visited MemberInitExpression</returns> internal override Expression VisitMemberInit(MemberInitExpression init) { throw new NotSupportedException(Strings.ALinq_MemberInitNotSupported); } /// <summary> /// ListInitExpression visit method /// </summary> /// <param name="init">The ListInitExpression to visit</param> /// <returns>The visited ListInitExpression</returns> internal override Expression VisitListInit(ListInitExpression init) { throw new NotSupportedException(Strings.ALinq_ListInitNotSupported); } /// <summary> /// NewArrayExpression visit method /// </summary> /// <param name="na">The NewArrayExpression to visit</param> /// <returns>The visited NewArrayExpression</returns> internal override Expression VisitNewArray(NewArrayExpression na) { throw new NotSupportedException(Strings.ALinq_NewArrayNotSupported); } /// <summary> /// InvocationExpression visit method /// </summary> /// <param name="iv">The InvocationExpression to visit</param> /// <returns>The visited InvocationExpression</returns> internal override Expression VisitInvocation(InvocationExpression iv) { throw new NotSupportedException(Strings.ALinq_InvocationNotSupported); } /// <summary> /// NavigationPropertySingletonExpression visit method. /// </summary> /// <param name="npse">NavigationPropertySingletonExpression expression to visit</param> /// <returns>Visited NavigationPropertySingletonExpression expression</returns> internal override Expression VisitNavigationPropertySingletonExpression(NavigationPropertySingletonExpression npse) { this.Visit(npse.Source); this.uriBuilder.Append(UriHelper.FORWARDSLASH).Append(this.ExpressionToString(npse.MemberExpression, /*inPath*/ true)); this.VisitQueryOptions(npse); return npse; } /// <summary> /// QueryableResourceExpression visit method. /// </summary> /// <param name="rse">QueryableResourceExpression expression to visit</param> /// <returns>Visited QueryableResourceExpression expression</returns> internal override Expression VisitQueryableResourceExpression(QueryableResourceExpression rse) { if ((ResourceExpressionType)rse.NodeType == ResourceExpressionType.ResourceNavigationProperty) { if (rse.IsOperationInvocation && !(rse.Source is QueryableResourceExpression)) { var normalizerRewrites = new Dictionary<Expression, Expression>(ReferenceEqualityComparer<Expression>.Instance); var e = Evaluator.PartialEval(rse.Source); e = ExpressionNormalizer.Normalize(e, normalizerRewrites); e = ResourceBinder.Bind(e, this.context); this.Visit(e); } else { this.Visit(rse.Source); } this.uriBuilder.Append(UriHelper.FORWARDSLASH).Append(this.ExpressionToString(rse.MemberExpression, /*inPath*/ true)); } else if (rse.MemberExpression != null) { // this is a resource set expression // we should be at the very begining of // the URI Debug.Assert(this.uriBuilder.Length == 0, "The builder is not empty while we are adding a resourset"); string entitySetName = (String)((ConstantExpression)rse.MemberExpression).Value; this.uriBuilder.Append(this.context.BaseUriResolver.GetEntitySetUri(entitySetName)); } else { this.uriBuilder.Append(this.context.BaseUriResolver.BaseUriOrNull); } WebUtil.RaiseVersion(ref this.uriVersion, rse.UriVersion); if (rse.ResourceTypeAs != null) { this.uriBuilder.Append(UriHelper.FORWARDSLASH); UriHelper.AppendTypeSegment(this.uriBuilder, rse.ResourceTypeAs, this.context, /*inPath*/ true, ref this.uriVersion); } if (rse.KeyPredicateConjuncts.Count > 0) { this.context.UrlConventions.AppendKeyExpression(rse.GetKeyProperties(), kvp => ClientTypeUtil.GetServerDefinedName(kvp.Key), kvp => kvp.Value.Value, this.uriBuilder); } if (rse.IsOperationInvocation) { this.VisitOperationInvocation(rse); } if (rse.CountOption == CountOption.CountSegment) { // append $count segment: /$count this.uriBuilder.Append(UriHelper.FORWARDSLASH).Append(UriHelper.DOLLARSIGN).Append(UriHelper.COUNT); } this.VisitQueryOptions(rse); return rse; } /// <summary> /// Visit Function Invocation /// </summary> /// <param name="rse">Resource Expression with function invocation</param> internal void VisitOperationInvocation(QueryableResourceExpression rse) { if (!this.uriBuilder.ToString().EndsWith(UriHelper.FORWARDSLASH.ToString(), StringComparison.Ordinal)) { this.uriBuilder.Append(UriHelper.FORWARDSLASH); } if (rse.IsOperationInvocation) { this.uriBuilder.Append(rse.OperationName); if (rse.IsAction) { return; } this.uriBuilder.Append(UriHelper.LEFTPAREN); bool needComma = false; KeyValuePair<string, string>[] parameters = rse.OperationParameters.ToArray(); for (int i = 0; i < parameters.Length; ++i) { KeyValuePair<string, string> param = parameters[i]; if (needComma) { this.uriBuilder.Append(UriHelper.COMMA); } this.uriBuilder.Append(param.Key); this.uriBuilder.Append(UriHelper.EQUALSSIGN); // non-primitive value, use alias. if (!UriHelper.IsPrimitiveValue(param.Value)) { string aliasName = UriHelper.ATSIGN + param.Key; int count = 1; while (this.alias.ContainsKey(aliasName)) { aliasName = UriHelper.ATSIGN + param.Key + count; count++; } this.uriBuilder.Append(aliasName); this.alias.Add(aliasName, param.Value); } else { // primitive value, do not use alias. this.uriBuilder.Append(param.Value); } needComma = true; } this.uriBuilder.Append(UriHelper.RIGHTPAREN); } } /// <summary> /// Visit Query options for Resource /// </summary> /// <param name="re">Resource Expression with query options</param> internal void VisitQueryOptions(ResourceExpression re) { if (re.HasQueryOptions) { this.uriBuilder.Append(UriHelper.QUESTIONMARK); QueryableResourceExpression rse = re as QueryableResourceExpression; if (rse != null) { IEnumerator options = rse.SequenceQueryOptions.GetEnumerator(); while (options.MoveNext()) { Expression e = ((Expression)options.Current); ResourceExpressionType et = (ResourceExpressionType)e.NodeType; switch (et) { case ResourceExpressionType.SkipQueryOption: this.VisitQueryOptionExpression((SkipQueryOptionExpression)e); break; case ResourceExpressionType.TakeQueryOption: this.VisitQueryOptionExpression((TakeQueryOptionExpression)e); break; case ResourceExpressionType.OrderByQueryOption: this.VisitQueryOptionExpression((OrderByQueryOptionExpression)e); break; case ResourceExpressionType.FilterQueryOption: this.VisitQueryOptionExpression((FilterQueryOptionExpression)e); break; default: Debug.Assert(false, "Unexpected expression type " + (int)et); break; } } } if (re.ExpandPaths.Count > 0) { this.VisitExpandOptions(re.ExpandPaths); } if (re.Projection != null && re.Projection.Paths.Count > 0) { this.VisitProjectionPaths(re.Projection.Paths); } if (re.CountOption == CountOption.CountQuery) { this.VisitCountQueryOptions(); } if (re.CustomQueryOptions.Count > 0) { this.VisitCustomQueryOptions(re.CustomQueryOptions); } this.AppendCachedQueryOptionsToUriBuilder(); } } /// <summary> /// SkipQueryOptionExpression visit method. /// </summary> /// <param name="sqoe">SkipQueryOptionExpression expression to visit</param> internal void VisitQueryOptionExpression(SkipQueryOptionExpression sqoe) { this.AddAsCachedQueryOption(UriHelper.DOLLARSIGN + UriHelper.OPTIONSKIP, this.ExpressionToString(sqoe.SkipAmount, /*inPath*/ false)); } /// <summary> /// TakeQueryOptionExpression visit method. /// </summary> /// <param name="tqoe">TakeQueryOptionExpression expression to visit</param> internal void VisitQueryOptionExpression(TakeQueryOptionExpression tqoe) { this.AddAsCachedQueryOption(UriHelper.DOLLARSIGN + UriHelper.OPTIONTOP, this.ExpressionToString(tqoe.TakeAmount, /*inPath*/ false)); } /// <summary> /// FilterQueryOptionExpression visit method. /// </summary> /// <param name="fqoe">FilterQueryOptionExpression expression to visit</param> internal void VisitQueryOptionExpression(FilterQueryOptionExpression fqoe) { this.AddAsCachedQueryOption(UriHelper.DOLLARSIGN + UriHelper.OPTIONFILTER, this.ExpressionToString(fqoe.GetPredicate(), /*inPath*/ false)); } /// <summary> /// OrderByQueryOptionExpression visit method. /// </summary> /// <param name="oboe">OrderByQueryOptionExpression expression to visit</param> internal void VisitQueryOptionExpression(OrderByQueryOptionExpression oboe) { StringBuilder tmpBuilder = new StringBuilder(); int ii = 0; while (true) { var selector = oboe.Selectors[ii]; tmpBuilder.Append(this.ExpressionToString(selector.Expression, /*inPath*/ false)); if (selector.Descending) { tmpBuilder.Append(UriHelper.SPACE); tmpBuilder.Append(UriHelper.OPTIONDESC); } if (++ii == oboe.Selectors.Count) { break; } tmpBuilder.Append(UriHelper.COMMA); } this.AddAsCachedQueryOption(UriHelper.DOLLARSIGN + UriHelper.OPTIONORDERBY, tmpBuilder.ToString()); } /// <summary> /// VisitExpandOptions visit method. /// </summary> /// <param name="paths">Expand Paths</param> internal void VisitExpandOptions(List<string> paths) { StringBuilder tmpBuilder = new StringBuilder(); int ii = 0; while (true) { tmpBuilder.Append(paths[ii]); if (++ii == paths.Count) { break; } tmpBuilder.Append(UriHelper.COMMA); } this.AddAsCachedQueryOption(UriHelper.DOLLARSIGN + UriHelper.OPTIONEXPAND, tmpBuilder.ToString()); } /// <summary> /// ProjectionPaths visit method. /// </summary> /// <param name="paths">Projection Paths</param> internal void VisitProjectionPaths(List<string> paths) { StringBuilder tmpBuilder = new StringBuilder(); int ii = 0; while (true) { string path = paths[ii]; tmpBuilder.Append(path); if (++ii == paths.Count) { break; } tmpBuilder.Append(UriHelper.COMMA); } this.AddAsCachedQueryOption(UriHelper.DOLLARSIGN + UriHelper.OPTIONSELECT, tmpBuilder.ToString()); } /// <summary> /// VisitCountQueryOptions visit method. /// </summary> internal void VisitCountQueryOptions() { this.AddAsCachedQueryOption(UriHelper.DOLLARSIGN + UriHelper.OPTIONCOUNT, UriHelper.COUNTTRUE); } /// <summary> /// VisitCustomQueryOptions visit method. /// </summary> /// <param name="options">Custom query options</param> internal void VisitCustomQueryOptions(Dictionary<ConstantExpression, ConstantExpression> options) { List<ConstantExpression> keys = options.Keys.ToList(); List<ConstantExpression> values = options.Values.ToList(); for (int i = 0; i < keys.Count; i++) { string k = keys[i].Value + ""; string v = values[i].Value + ""; this.AddAsCachedQueryOption(k, v); } } /// <summary> /// Caches query option to be grouped /// </summary> /// <param name="optionKey">The key.</param> /// <param name="optionValue">The value</param> private void AddAsCachedQueryOption(string optionKey, string optionValue) { List<string> tmp = null; if (!this.cachedQueryOptions.TryGetValue(optionKey, out tmp)) { tmp = new List<string>(); this.cachedQueryOptions.Add(optionKey, tmp); } tmp.Add(optionValue); } /// <summary> /// Append all cached query options to uri. /// </summary> private void AppendCachedQueryOptionsToUriBuilder() { int i = 0; foreach (var queryOption in this.cachedQueryOptions) { if (i++ != 0) { this.uriBuilder.Append(UriHelper.AMPERSAND); } string keyStr = queryOption.Key; string valueStr = string.Join(",", queryOption.Value); this.uriBuilder.Append(keyStr); this.uriBuilder.Append(UriHelper.EQUALSSIGN); this.uriBuilder.Append(valueStr); } } /// <summary>Serializes an expression to a string.</summary> /// <param name="expression">Expression to serialize</param> /// <param name='inPath'>Whether or not the expression being written is part of the path of the URI.</param> /// <returns>The serialized expression.</returns> private string ExpressionToString(Expression expression, bool inPath) { return ExpressionWriter.ExpressionToString(this.context, expression, inPath, ref this.uriVersion); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // // Licensed under the MIT License. See LICENSE.txt in the project root for license information. using System; using System.Runtime.InteropServices; using Microsoft.VisualStudio.TestPlatform.UnitTestFramework; using System.Numerics; namespace NumericsTests { [TestClass()] public class Vector4Test { // A test for DistanceSquared (Vector4, Vector4) [TestMethod] public void Vector4DistanceSquaredTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 6.0f, 7.0f, 8.0f); float expected = 64.0f; float actual; actual = Vector4.DistanceSquared(a, b); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.DistanceSquared did not return the expected value."); } // A test for Distance (Vector4, Vector4) [TestMethod] public void Vector4DistanceTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 6.0f, 7.0f, 8.0f); float expected = 8.0f; float actual; actual = Vector4.Distance(a, b); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Distance did not return the expected value."); } // A test for Distance (Vector4, Vector4) // Distance from the same point [TestMethod] public void Vector4DistanceTest1() { Vector4 a = new Vector4(new Vector2(1.051f, 2.05f), 3.478f, 1.0f); Vector4 b = new Vector4(new Vector3(1.051f, 2.05f, 3.478f), 0.0f); b.W = 1.0f; float actual = Vector4.Distance(a, b); Assert.AreEqual(0.0f, actual, "Vector4.Distance did not return the expected value."); } // A test for Dot (Vector4, Vector4) [TestMethod] public void Vector4DotTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 6.0f, 7.0f, 8.0f); float expected = 70.0f; float actual; actual = Vector4.Dot(a, b); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Dot did not return the expected value."); } // A test for Dot (Vector4, Vector4) // Dot test for perpendicular vector [TestMethod] public void Vector4DotTest1() { Vector3 a = new Vector3(1.55f, 1.55f, 1); Vector3 b = new Vector3(2.5f, 3, 1.5f); Vector3 c = Vector3.Cross(a, b); Vector4 d = new Vector4(a, 0); Vector4 e = new Vector4(c, 0); float actual = Vector4.Dot(d, e); Assert.IsTrue(MathHelper.Equal(0.0f, actual), "Vector4.Dot did not return the expected value."); } // A test for Length () [TestMethod] public void Vector4LengthTest() { Vector3 a = new Vector3(1.0f, 2.0f, 3.0f); float w = 4.0f; Vector4 target = new Vector4(a, w); float expected = (float)System.Math.Sqrt(30.0f); float actual; actual = target.Length(); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Length did not return the expected value."); } // A test for Length () // Length test where length is zero [TestMethod] public void Vector4LengthTest1() { Vector4 target = new Vector4(); float expected = 0.0f; float actual = target.Length(); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Length did not return the expected value."); } // A test for LengthSquared () [TestMethod] public void Vector4LengthSquaredTest() { Vector3 a = new Vector3(1.0f, 2.0f, 3.0f); float w = 4.0f; Vector4 target = new Vector4(a, w); float expected = 30; float actual; actual = target.LengthSquared(); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.LengthSquared did not return the expected value."); } // A test for Min (Vector4, Vector4) [TestMethod] public void Vector4MinTest() { Vector4 a = new Vector4(-1.0f, 4.0f, -3.0f, 1000.0f); Vector4 b = new Vector4(2.0f, 1.0f, -1.0f, 0.0f); Vector4 expected = new Vector4(-1.0f, 1.0f, -3.0f, 0.0f); Vector4 actual; actual = Vector4.Min(a, b); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Min did not return the expected value."); } // A test for Max (Vector4, Vector4) [TestMethod] public void Vector4MaxTest() { Vector4 a = new Vector4(-1.0f, 4.0f, -3.0f, 1000.0f); Vector4 b = new Vector4(2.0f, 1.0f, -1.0f, 0.0f); Vector4 expected = new Vector4(2.0f, 4.0f, -1.0f, 1000.0f); Vector4 actual; actual = Vector4.Max(a, b); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Max did not return the expected value."); } [TestMethod] public void Vector4MinMaxCodeCoverageTest() { Vector4 min = Vector4.Zero; Vector4 max = Vector4.One; Vector4 actual; // Min. actual = Vector4.Min(min, max); Assert.AreEqual(actual, min); actual = Vector4.Min(max, min); Assert.AreEqual(actual, min); // Max. actual = Vector4.Max(min, max); Assert.AreEqual(actual, max); actual = Vector4.Max(max, min); Assert.AreEqual(actual, max); } // A test for Clamp (Vector4, Vector4, Vector4) [TestMethod] public void Vector4ClampTest() { Vector4 a = new Vector4(0.5f, 0.3f, 0.33f, 0.44f); Vector4 min = new Vector4(0.0f, 0.1f, 0.13f, 0.14f); Vector4 max = new Vector4(1.0f, 1.1f, 1.13f, 1.14f); // Normal case. // Case N1: specfied value is in the range. Vector4 expected = new Vector4(0.5f, 0.3f, 0.33f, 0.44f); Vector4 actual = Vector4.Clamp(a, min, max); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Clamp did not return the expected value."); // Normal case. // Case N2: specfied value is bigger than max value. a = new Vector4(2.0f, 3.0f, 4.0f, 5.0f); expected = max; actual = Vector4.Clamp(a, min, max); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Clamp did not return the expected value."); // Case N3: specfied value is smaller than max value. a = new Vector4(-2.0f, -3.0f, -4.0f, -5.0f); expected = min; actual = Vector4.Clamp(a, min, max); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Clamp did not return the expected value."); // Case N4: combination case. a = new Vector4(-2.0f, 0.5f, 4.0f, -5.0f); expected = new Vector4(min.X, a.Y, max.Z, min.W); actual = Vector4.Clamp(a, min, max); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Clamp did not return the expected value."); // User specfied min value is bigger than max value. max = new Vector4(0.0f, 0.1f, 0.13f, 0.14f); min = new Vector4(1.0f, 1.1f, 1.13f, 1.14f); // Case W1: specfied value is in the range. a = new Vector4(0.5f, 0.3f, 0.33f, 0.44f); expected = min; actual = Vector4.Clamp(a, min, max); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Clamp did not return the expected value."); // Normal case. // Case W2: specfied value is bigger than max and min value. a = new Vector4(2.0f, 3.0f, 4.0f, 5.0f); expected = min; actual = Vector4.Clamp(a, min, max); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Clamp did not return the expected value."); // Case W3: specfied value is smaller than min and max value. a = new Vector4(-2.0f, -3.0f, -4.0f, -5.0f); expected = min; actual = Vector4.Clamp(a, min, max); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Clamp did not return the expected value."); } // A test for Lerp (Vector4, Vector4, float) [TestMethod] public void Vector4LerpTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 6.0f, 7.0f, 8.0f); float t = 0.5f; Vector4 expected = new Vector4(3.0f, 4.0f, 5.0f, 6.0f); Vector4 actual; actual = Vector4.Lerp(a, b, t); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Lerp did not return the expected value."); } // A test for Lerp (Vector4, Vector4, float) // Lerp test with factor zero [TestMethod] public void Vector4LerpTest1() { Vector4 a = new Vector4(new Vector3(1.0f, 2.0f, 3.0f), 4.0f); Vector4 b = new Vector4(4.0f, 5.0f, 6.0f, 7.0f); float t = 0.0f; Vector4 expected = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 actual = Vector4.Lerp(a, b, t); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Lerp did not return the expected value."); } // A test for Lerp (Vector4, Vector4, float) // Lerp test with factor one [TestMethod] public void Vector4LerpTest2() { Vector4 a = new Vector4(new Vector3(1.0f, 2.0f, 3.0f), 4.0f); Vector4 b = new Vector4(4.0f, 5.0f, 6.0f, 7.0f); float t = 1.0f; Vector4 expected = new Vector4(4.0f, 5.0f, 6.0f, 7.0f); Vector4 actual = Vector4.Lerp(a, b, t); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Lerp did not return the expected value."); } // A test for Lerp (Vector4, Vector4, float) // Lerp test with factor > 1 [TestMethod] public void Vector4LerpTest3() { Vector4 a = new Vector4(new Vector3(0.0f, 0.0f, 0.0f), 0.0f); Vector4 b = new Vector4(4.0f, 5.0f, 6.0f, 7.0f); float t = 2.0f; Vector4 expected = new Vector4(8.0f, 10.0f, 12.0f, 14.0f); Vector4 actual = Vector4.Lerp(a, b, t); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Lerp did not return the expected value."); } // A test for Lerp (Vector4, Vector4, float) // Lerp test with factor < 0 [TestMethod] public void Vector4LerpTest4() { Vector4 a = new Vector4(new Vector3(0.0f, 0.0f, 0.0f), 0.0f); Vector4 b = new Vector4(4.0f, 5.0f, 6.0f, 7.0f); float t = -2.0f; Vector4 expected = -(b * 2); Vector4 actual = Vector4.Lerp(a, b, t); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Lerp did not return the expected value."); } // A test for Lerp (Vector4, Vector4, float) // Lerp test from the same point [TestMethod] public void Vector4LerpTest5() { Vector4 a = new Vector4(4.0f, 5.0f, 6.0f, 7.0f); Vector4 b = new Vector4(4.0f, 5.0f, 6.0f, 7.0f); float t = 0.85f; Vector4 expected = a; Vector4 actual = Vector4.Lerp(a, b, t); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Lerp did not return the expected value."); } // A test for Transform (Vector2, Matrix4x4) [TestMethod] public void Vector4TransformTest1() { Vector2 v = new Vector2(1.0f, 2.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); m.M41 = 10.0f; m.M42 = 20.0f; m.M43 = 30.0f; Vector4 expected = new Vector4(10.316987f, 22.183012f, 30.3660259f, 1.0f); Vector4 actual; actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector3, Matrix4x4) [TestMethod] public void Vector4TransformTest2() { Vector3 v = new Vector3(1.0f, 2.0f, 3.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); m.M41 = 10.0f; m.M42 = 20.0f; m.M43 = 30.0f; Vector4 expected = new Vector4(12.19198728f, 21.53349376f, 32.61602545f, 1.0f); Vector4 actual; actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "vector4.Transform did not return the expected value."); } // A test for Transform (Vector4, Matrix4x4) [TestMethod] public void Vector4TransformVector4Test() { Vector4 v = new Vector4(1.0f, 2.0f, 3.0f, 0.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); m.M41 = 10.0f; m.M42 = 20.0f; m.M43 = 30.0f; Vector4 expected = new Vector4(2.19198728f, 1.53349376f, 2.61602545f, 0.0f); Vector4 actual; actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); // v.W = 1.0f; expected = new Vector4(12.19198728f, 21.53349376f, 32.61602545f, 1.0f); actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector4, Matrix4x4) // Transform vector4 with zero matrix [TestMethod] public void Vector4TransformVector4Test1() { Vector4 v = new Vector4(1.0f, 2.0f, 3.0f, 0.0f); Matrix4x4 m = new Matrix4x4(); Vector4 expected = new Vector4(0, 0, 0, 0); Vector4 actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector4, Matrix4x4) // Transform vector4 with identity matrix [TestMethod] public void Vector4TransformVector4Test2() { Vector4 v = new Vector4(1.0f, 2.0f, 3.0f, 0.0f); Matrix4x4 m = Matrix4x4.Identity; Vector4 expected = new Vector4(1.0f, 2.0f, 3.0f, 0.0f); Vector4 actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector3, Matrix4x4) // Transform Vector3 test [TestMethod] public void Vector4TransformVector3Test() { Vector3 v = new Vector3(1.0f, 2.0f, 3.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); m.M41 = 10.0f; m.M42 = 20.0f; m.M43 = 30.0f; Vector4 expected = Vector4.Transform(new Vector4(v, 1.0f), m); Vector4 actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector3, Matrix4x4) // Transform vector3 with zero matrix [TestMethod] public void Vector4TransformVector3Test1() { Vector3 v = new Vector3(1.0f, 2.0f, 3.0f); Matrix4x4 m = new Matrix4x4(); Vector4 expected = new Vector4(0, 0, 0, 0); Vector4 actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector3, Matrix4x4) // Transform vector3 with identity matrix [TestMethod] public void Vector4TransformVector3Test2() { Vector3 v = new Vector3(1.0f, 2.0f, 3.0f); Matrix4x4 m = Matrix4x4.Identity; Vector4 expected = new Vector4(1.0f, 2.0f, 3.0f, 1.0f); Vector4 actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector2, Matrix4x4) // Transform Vector2 test [TestMethod] public void Vector4TransformVector2Test() { Vector2 v = new Vector2(1.0f, 2.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); m.M41 = 10.0f; m.M42 = 20.0f; m.M43 = 30.0f; Vector4 expected = Vector4.Transform(new Vector4(v, 0.0f, 1.0f), m); Vector4 actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector2, Matrix4x4) // Transform Vector2 with zero matrix [TestMethod] public void Vector4TransformVector2Test1() { Vector2 v = new Vector2(1.0f, 2.0f); Matrix4x4 m = new Matrix4x4(); Vector4 expected = new Vector4(0, 0, 0, 0); Vector4 actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector2, Matrix4x4) // Transform vector2 with identity matrix [TestMethod] public void Vector4TransformVector2Test2() { Vector2 v = new Vector2(1.0f, 2.0f); Matrix4x4 m = Matrix4x4.Identity; Vector4 expected = new Vector4(1.0f, 2.0f, 0, 1.0f); Vector4 actual = Vector4.Transform(v, m); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector2, Quaternion) [TestMethod] public void Vector4TransformVector2QuatanionTest() { Vector2 v = new Vector2(1.0f, 2.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); Quaternion q = Quaternion.CreateFromRotationMatrix(m); Vector4 expected = Vector4.Transform(v, m); Vector4 actual; actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector3, Quaternion) [TestMethod] public void Vector4TransformVector3Quaternion() { Vector3 v = new Vector3(1.0f, 2.0f, 3.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); Quaternion q = Quaternion.CreateFromRotationMatrix(m); Vector4 expected = Vector4.Transform(v, m); Vector4 actual; actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "vector4.Transform did not return the expected value."); } // A test for Transform (Vector4, Quaternion) [TestMethod] public void Vector4TransformVector4QuaternionTest() { Vector4 v = new Vector4(1.0f, 2.0f, 3.0f, 0.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); Quaternion q = Quaternion.CreateFromRotationMatrix(m); Vector4 expected = Vector4.Transform(v, m); Vector4 actual; actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); // v.W = 1.0f; expected.W = 1.0f; actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector4, Quaternion) // Transform vector4 with zero quaternion [TestMethod] public void Vector4TransformVector4QuaternionTest1() { Vector4 v = new Vector4(1.0f, 2.0f, 3.0f, 0.0f); Quaternion q = new Quaternion(); Vector4 expected = v; Vector4 actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector4, Quaternion) // Transform vector4 with identity matrix [TestMethod] public void Vector4TransformVector4QuaternionTest2() { Vector4 v = new Vector4(1.0f, 2.0f, 3.0f, 0.0f); Quaternion q = Quaternion.Identity; Vector4 expected = new Vector4(1.0f, 2.0f, 3.0f, 0.0f); Vector4 actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector3, Quaternion) // Transform Vector3 test [TestMethod] public void Vector4TransformVector3QuaternionTest() { Vector3 v = new Vector3(1.0f, 2.0f, 3.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); Quaternion q = Quaternion.CreateFromRotationMatrix(m); Vector4 expected = Vector4.Transform(v, m); Vector4 actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector3, Quaternion) // Transform vector3 with zero quaternion [TestMethod] public void Vector4TransformVector3QuaternionTest1() { Vector3 v = new Vector3(1.0f, 2.0f, 3.0f); Quaternion q = new Quaternion(); Vector4 expected = new Vector4(v, 1.0f); Vector4 actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector3, Quaternion) // Transform vector3 with identity quaternion [TestMethod] public void Vector4TransformVector3QuaternionTest2() { Vector3 v = new Vector3(1.0f, 2.0f, 3.0f); Quaternion q = Quaternion.Identity; Vector4 expected = new Vector4(1.0f, 2.0f, 3.0f, 1.0f); Vector4 actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector2, Quaternion) // Transform Vector2 by quaternion test [TestMethod] public void Vector4TransformVector2QuaternionTest() { Vector2 v = new Vector2(1.0f, 2.0f); Matrix4x4 m = Matrix4x4.CreateRotationX(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationY(MathHelper.ToRadians(30.0f)) * Matrix4x4.CreateRotationZ(MathHelper.ToRadians(30.0f)); Quaternion q = Quaternion.CreateFromRotationMatrix(m); Vector4 expected = Vector4.Transform(v, m); Vector4 actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector2, Quaternion) // Transform Vector2 with zero quaternion [TestMethod] public void Vector4TransformVector2QuaternionTest1() { Vector2 v = new Vector2(1.0f, 2.0f); Quaternion q = new Quaternion(); Vector4 expected = new Vector4(1.0f, 2.0f, 0, 1.0f); Vector4 actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Transform (Vector2, Matrix4x4) // Transform vector2 with identity Quaternion [TestMethod] public void Vector4TransformVector2QuaternionTest2() { Vector2 v = new Vector2(1.0f, 2.0f); Quaternion q = Quaternion.Identity; Vector4 expected = new Vector4(1.0f, 2.0f, 0, 1.0f); Vector4 actual = Vector4.Transform(v, q); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Transform did not return the expected value."); } // A test for Normalize (Vector4) [TestMethod] public void Vector4NormalizeTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 expected = new Vector4( 0.1825741858350553711523232609336f, 0.3651483716701107423046465218672f, 0.5477225575051661134569697828008f, 0.7302967433402214846092930437344f); Vector4 actual; actual = Vector4.Normalize(a); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Normalize did not return the expected value."); } // A test for Normalize (Vector4) // Normalize vector of length one [TestMethod] public void Vector4NormalizeTest1() { Vector4 a = new Vector4(1.0f, 0.0f, 0.0f, 0.0f); Vector4 expected = new Vector4(1.0f, 0.0f, 0.0f, 0.0f); Vector4 actual = Vector4.Normalize(a); Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.Normalize did not return the expected value."); } // A test for Normalize (Vector4) // Normalize vector of length zero [TestMethod] public void Vector4NormalizeTest2() { Vector4 a = new Vector4(0.0f, 0.0f, 0.0f, 0.0f); Vector4 expected = new Vector4(0.0f, 0.0f, 0.0f, 0.0f); Vector4 actual = Vector4.Normalize(a); Assert.IsTrue(float.IsNaN(actual.X) && float.IsNaN(actual.Y) && float.IsNaN(actual.Z) && float.IsNaN(actual.W), "Vector4.Normalize did not return the expected value."); } // A test for operator - (Vector4) [TestMethod] public void Vector4UnaryNegationTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 expected = new Vector4(-1.0f, -2.0f, -3.0f, -4.0f); Vector4 actual; actual = -a; Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.operator - did not return the expected value."); } // A test for operator - (Vector4, Vector4) [TestMethod] public void Vector4SubtractionTest() { Vector4 a = new Vector4(1.0f, 6.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 2.0f, 3.0f, 9.0f); Vector4 expected = new Vector4(-4.0f, 4.0f, 0.0f, -5.0f); Vector4 actual; actual = a - b; Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.operator - did not return the expected value."); } // A test for operator * (Vector4, float) [TestMethod] public void Vector4MultiplyTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); float factor = 2.0f; Vector4 expected = new Vector4(2.0f, 4.0f, 6.0f, 8.0f); Vector4 actual; actual = a * factor; Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.operator * did not return the expected value."); } // A test for operator * (float, Vector4) [TestMethod] public void Vector4MultiplyTest4() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); float factor = 2.0f; Vector4 expected = new Vector4(2.0f, 4.0f, 6.0f, 8.0f); Vector4 actual; actual = factor * a; Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.operator * did not return the expected value."); } // A test for operator * (Vector4, Vector4) [TestMethod] public void Vector4MultiplyTest1() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 6.0f, 7.0f, 8.0f); Vector4 expected = new Vector4(5.0f, 12.0f, 21.0f, 32.0f); Vector4 actual; actual = a * b; Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.operator * did not return the expected value."); } // A test for operator / (Vector4, float) [TestMethod] public void Vector4DivisionTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); float div = 2.0f; Vector4 expected = new Vector4(0.5f, 1.0f, 1.5f, 2.0f); Vector4 actual; actual = a / div; Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.operator / did not return the expected value."); } // A test for operator / (Vector4, Vector4) [TestMethod] public void Vector4DivisionTest1() { Vector4 a = new Vector4(1.0f, 6.0f, 7.0f, 4.0f); Vector4 b = new Vector4(5.0f, 2.0f, 3.0f, 8.0f); Vector4 expected = new Vector4(1.0f / 5.0f, 6.0f / 2.0f, 7.0f / 3.0f, 4.0f / 8.0f); Vector4 actual; actual = a / b; Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.operator / did not return the expected value."); } // A test for operator / (Vector4, Vector4) // Divide by zero [TestMethod] public void Vector4DivisionTest2() { Vector4 a = new Vector4(-2.0f, 3.0f, float.MaxValue, float.NaN); float div = 0.0f; Vector4 actual = a / div; Assert.IsTrue(float.IsNegativeInfinity(actual.X), "Vector4.operator / did not return the expected value."); Assert.IsTrue(float.IsPositiveInfinity(actual.Y), "Vector4.operator / did not return the expected value."); Assert.IsTrue(float.IsPositiveInfinity(actual.Z), "Vector4.operator / did not return the expected value."); Assert.IsTrue(float.IsNaN(actual.W), "Vector4.operator / did not return the expected value."); } // A test for operator / (Vector4, Vector4) // Divide by zero [TestMethod] public void Vector4DivisionTest3() { Vector4 a = new Vector4(0.047f, -3.0f, float.NegativeInfinity, float.MinValue); Vector4 b = new Vector4(); Vector4 actual = a / b; Assert.IsTrue(float.IsPositiveInfinity(actual.X), "Vector4.operator / did not return the expected value."); Assert.IsTrue(float.IsNegativeInfinity(actual.Y), "Vector4.operator / did not return the expected value."); Assert.IsTrue(float.IsNegativeInfinity(actual.Z), "Vector4.operator / did not return the expected value."); Assert.IsTrue(float.IsNegativeInfinity(actual.W), "Vector4.operator / did not return the expected value."); } // A test for operator + (Vector4, Vector4) [TestMethod] public void Vector4AdditionTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 6.0f, 7.0f, 8.0f); Vector4 expected = new Vector4(6.0f, 8.0f, 10.0f, 12.0f); Vector4 actual; actual = a + b; Assert.IsTrue(MathHelper.Equal(expected, actual), "Vector4.operator + did not return the expected value."); } // A test for Vector4 (float, float, float, float) [TestMethod] public void Vector4ConstructorTest() { float x = 1.0f; float y = 2.0f; float z = 3.0f; float w = 4.0f; Vector4 target = new Vector4(x, y, z, w); Assert.IsTrue(MathHelper.Equal(target.X, x) && MathHelper.Equal(target.Y, y) && MathHelper.Equal(target.Z, z) && MathHelper.Equal(target.W, w), "Vector4 constructor(x,y,z,w) did not return the expected value."); } // A test for Vector4 (Vector2, float, float) [TestMethod] public void Vector4ConstructorTest1() { Vector2 a = new Vector2(1.0f, 2.0f); float z = 3.0f; float w = 4.0f; Vector4 target = new Vector4(a, z, w); Assert.IsTrue(MathHelper.Equal(target.X, a.X) && MathHelper.Equal(target.Y, a.Y) && MathHelper.Equal(target.Z, z) && MathHelper.Equal(target.W, w), "Vector4 constructor(Vector2,z,w) did not return the expected value."); } // A test for Vector4 (Vector3, float) [TestMethod] public void Vector4ConstructorTest2() { Vector3 a = new Vector3(1.0f, 2.0f, 3.0f); float w = 4.0f; Vector4 target = new Vector4(a, w); Assert.IsTrue(MathHelper.Equal(target.X, a.X) && MathHelper.Equal(target.Y, a.Y) && MathHelper.Equal(target.Z, a.Z) && MathHelper.Equal(target.W, w), "Vector4 constructor(Vector3,w) did not return the expected value."); } // A test for Vector4 () // Constructor with no parameter [TestMethod] public void Vector4ConstructorTest4() { Vector4 a = new Vector4(); Assert.AreEqual(a.X, 0.0f, "Vector4.constructor () did not return the expected value."); Assert.AreEqual(a.Y, 0.0f, "Vector4.constructor () did not return the expected value."); Assert.AreEqual(a.Z, 0.0f, "Vector4.constructor () did not return the expected value."); Assert.AreEqual(a.W, 0.0f, "Vector4.constructor () did not return the expected value."); } // A test for Vector4 () // Constructor with special floating values [TestMethod] public void Vector4ConstructorTest5() { Vector4 target = new Vector4(float.NaN, float.MaxValue, float.PositiveInfinity, float.Epsilon); Assert.IsTrue(float.IsNaN(target.X), "Vector4.constructor (float, float, float, float) did not return the expected value."); Assert.IsTrue(float.Equals(float.MaxValue, target.Y), "Vector4.constructor (float, float, float, float) did not return the expected value."); Assert.IsTrue(float.IsPositiveInfinity(target.Z), "Vector4.constructor (float, float, float, float) did not return the expected value."); Assert.IsTrue(float.Equals(float.Epsilon, target.W), "Vector4.constructor (float, float, float, float) did not return the expected value."); } // A test for ToString () // ToString test for Vector4 [TestMethod] public void Vector4ToStringTest() { Vector4 target = new Vector4(-1.0f, 2.2f, 3.3f, -4.4f); string expected = "{X:-1 Y:2.2 Z:3.3 W:-4.4}"; string actual; actual = target.ToString(); Assert.AreEqual(expected, actual, "Vector4.ToString did not return the expected value."); } // A test for Add (Vector4, Vector4) [TestMethod] public void Vector4AddTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 6.0f, 7.0f, 8.0f); Vector4 expected = new Vector4(6.0f, 8.0f, 10.0f, 12.0f); Vector4 actual; actual = Vector4.Add(a, b); Assert.AreEqual(expected, actual, "Vector4.Add did not return the expected value."); } // A test for Divide (Vector4, float) [TestMethod] public void Vector4DivideTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); float div = 2.0f; Vector4 expected = new Vector4(0.5f, 1.0f, 1.5f, 2.0f); Vector4 actual; actual = Vector4.Divide(a, div); Assert.AreEqual(expected, actual, "Vector4.Divide did not return the expected value."); } // A test for Divide (Vector4, Vector4) [TestMethod] public void Vector4DivideTest1() { Vector4 a = new Vector4(1.0f, 6.0f, 7.0f, 4.0f); Vector4 b = new Vector4(5.0f, 2.0f, 3.0f, 8.0f); Vector4 expected = new Vector4(1.0f / 5.0f, 6.0f / 2.0f, 7.0f / 3.0f, 4.0f / 8.0f); Vector4 actual; actual = Vector4.Divide(a, b); Assert.AreEqual(expected, actual, "Vector4.Divide did not return the expected value."); } // A test for Equals (object) [TestMethod] public void Vector4EqualsTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); // case 1: compare between same values object obj = b; bool expected = true; bool actual = a.Equals(obj); Assert.AreEqual(expected, actual, "Vector4.Equals did not return the expected value."); // case 2: compare between different values b.X = 10.0f; obj = b; expected = false; actual = a.Equals(obj); Assert.AreEqual(expected, actual, "Vector4.Equals did not return the expected value."); // case 3: compare between different types. obj = new Quaternion(); expected = false; actual = a.Equals(obj); Assert.AreEqual(expected, actual, "Vector4.Equals did not return the expected value."); // case 3: compare against null. obj = null; expected = false; actual = a.Equals(obj); Assert.AreEqual(expected, actual, "Vector4.Equals did not return the expected value."); } // A test for GetHashCode () [TestMethod] public void Vector4GetHashCodeTest() { Vector4 target = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); int expected = target.X.GetHashCode() + target.Y.GetHashCode() + target.Z.GetHashCode() + target.W.GetHashCode(); int actual; actual = target.GetHashCode(); Assert.AreEqual(expected, actual, "Vector4.GetHashCode did not return the expected value."); } // A test for Multiply (Vector4, float) [TestMethod] public void Vector4MultiplyTest2() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); float factor = 2.0f; Vector4 expected = new Vector4(2.0f, 4.0f, 6.0f, 8.0f); Vector4 actual = Vector4.Multiply(a, factor); Assert.AreEqual(expected, actual, "Vector4.Multiply did not return the expected value."); } // A test for Multiply (Vector4, Vector4) [TestMethod] public void Vector4MultiplyTest3() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 6.0f, 7.0f, 8.0f); Vector4 expected = new Vector4(5.0f, 12.0f, 21.0f, 32.0f); Vector4 actual; actual = Vector4.Multiply(a, b); Assert.AreEqual(expected, actual, "Vector4.Multiply did not return the expected value."); } // A test for Negate (Vector4) [TestMethod] public void Vector4NegateTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 expected = new Vector4(-1.0f, -2.0f, -3.0f, -4.0f); Vector4 actual; actual = Vector4.Negate(a); Assert.AreEqual(expected, actual, "Vector4.Negate did not return the expected value."); } // A test for operator != (Vector4, Vector4) [TestMethod] public void Vector4InequalityTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); // case 1: compare between same values bool expected = false; bool actual = a != b; Assert.AreEqual(expected, actual, "Vector4.operator != did not return the expected value."); // case 2: compare between different values b.X = 10.0f; expected = true; actual = a != b; Assert.AreEqual(expected, actual, "Vector4.operator != did not return the expected value."); } // A test for operator == (Vector4, Vector4) [TestMethod] public void Vector4EqualityTest() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); // case 1: compare between same values bool expected = true; bool actual = a == b; Assert.AreEqual(expected, actual, "Vector4.operator == did not return the expected value."); // case 2: compare between different values b.X = 10.0f; expected = false; actual = a == b; Assert.AreEqual(expected, actual, "Vector4.operator == did not return the expected value."); } // A test for Subtract (Vector4, Vector4) [TestMethod] public void Vector4SubtractTest() { Vector4 a = new Vector4(1.0f, 6.0f, 3.0f, 4.0f); Vector4 b = new Vector4(5.0f, 2.0f, 3.0f, 9.0f); Vector4 expected = new Vector4(-4.0f, 4.0f, 0.0f, -5.0f); Vector4 actual; actual = Vector4.Subtract(a, b); Assert.AreEqual(expected, actual, "Vector4.Subtract did not return the expected value."); } // A test for UnitW [TestMethod] public void Vector4UnitWTest() { Vector4 val = new Vector4(0.0f, 0.0f, 0.0f, 1.0f); Assert.AreEqual(val, Vector4.UnitW, "Vector4.UnitW was not set correctly."); } // A test for UnitX [TestMethod] public void Vector4UnitXTest() { Vector4 val = new Vector4(1.0f, 0.0f, 0.0f, 0.0f); Assert.AreEqual(val, Vector4.UnitX, "Vector4.UnitX was not set correctly."); } // A test for UnitY [TestMethod] public void Vector4UnitYTest() { Vector4 val = new Vector4(0.0f, 1.0f, 0.0f, 0.0f); Assert.AreEqual(val, Vector4.UnitY, "Vector4.UnitY was not set correctly."); } // A test for UnitZ [TestMethod] public void Vector4UnitZTest() { Vector4 val = new Vector4(0.0f, 0.0f, 1.0f, 0.0f); Assert.AreEqual(val, Vector4.UnitZ, "Vector4.UnitZ was not set correctly."); } // A test for One [TestMethod] public void Vector4OneTest() { Vector4 val = new Vector4(1.0f, 1.0f, 1.0f, 1.0f); Assert.AreEqual(val, Vector4.One, "Vector4.One was not set correctly."); } // A test for Zero [TestMethod] public void Vector4ZeroTest() { Vector4 val = new Vector4(0.0f, 0.0f, 0.0f, 0.0f); Assert.AreEqual(val, Vector4.Zero, "Vector4.Zero was not set correctly."); } // A test for Equals (Vector4) [TestMethod] public void Vector4EqualsTest1() { Vector4 a = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); Vector4 b = new Vector4(1.0f, 2.0f, 3.0f, 4.0f); // case 1: compare between same values bool expected = true; bool actual = a.Equals(b); Assert.AreEqual(expected, actual, "Vector4.Equals did not return the expected value."); // case 2: compare between different values b.X = 10.0f; expected = false; actual = a.Equals(b); Assert.AreEqual(expected, actual, "Vector4.Equals did not return the expected value."); } // A test for Vector4 (float) [TestMethod] public void Vector4ConstructorTest6() { float value = 1.0f; Vector4 target = new Vector4(value); Vector4 expected = new Vector4(value, value, value, value); Assert.AreEqual(expected, target, "Vector4.cstr did not return the expected value."); value = 2.0f; target = new Vector4(value); expected = new Vector4(value, value, value, value); Assert.AreEqual(expected, target, "Vector4.cstr did not return the expected value."); } // A test for Vector4 comparison involving NaN values [TestMethod] public void Vector4EqualsNanTest() { Vector4 a = new Vector4(float.NaN, 0, 0, 0); Vector4 b = new Vector4(0, float.NaN, 0, 0); Vector4 c = new Vector4(0, 0, float.NaN, 0); Vector4 d = new Vector4(0, 0, 0, float.NaN); Assert.IsFalse(a == Vector4.Zero); Assert.IsFalse(b == Vector4.Zero); Assert.IsFalse(c == Vector4.Zero); Assert.IsFalse(d == Vector4.Zero); Assert.IsTrue(a != Vector4.Zero); Assert.IsTrue(b != Vector4.Zero); Assert.IsTrue(c != Vector4.Zero); Assert.IsTrue(d != Vector4.Zero); Assert.IsFalse(a.Equals(Vector4.Zero)); Assert.IsFalse(b.Equals(Vector4.Zero)); Assert.IsFalse(c.Equals(Vector4.Zero)); Assert.IsFalse(d.Equals(Vector4.Zero)); // Counterintuitive result - IEEE rules for NaN comparison are weird! Assert.IsFalse(a.Equals(a)); Assert.IsFalse(b.Equals(b)); Assert.IsFalse(c.Equals(c)); Assert.IsFalse(d.Equals(d)); } // A test to make sure these types are blittable directly into GPU buffer memory layouts [TestMethod] public unsafe void Vector4SizeofTest() { Assert.AreEqual(16, sizeof(Vector4)); Assert.AreEqual(32, sizeof(Vector4_2x)); Assert.AreEqual(20, sizeof(Vector4PlusFloat)); Assert.AreEqual(40, sizeof(Vector4PlusFloat_2x)); } [StructLayout(LayoutKind.Sequential)] struct Vector4_2x { Vector4 a; Vector4 b; } [StructLayout(LayoutKind.Sequential)] struct Vector4PlusFloat { Vector4 v; float f; } [StructLayout(LayoutKind.Sequential)] struct Vector4PlusFloat_2x { Vector4PlusFloat a; Vector4PlusFloat b; } // A test to make sure the fields are laid out how we expect [TestMethod] public unsafe void Vector4FieldOffsetTest() { Vector4 value; Assert.AreEqual(0 + new IntPtr(&value).ToInt64(), new IntPtr(&value.X).ToInt64()); Assert.AreEqual(4 + new IntPtr(&value).ToInt64(), new IntPtr(&value.Y).ToInt64()); Assert.AreEqual(8 + new IntPtr(&value).ToInt64(), new IntPtr(&value.Z).ToInt64()); Assert.AreEqual(12 + new IntPtr(&value).ToInt64(), new IntPtr(&value.W).ToInt64()); } // A test to validate interop between .NET (System.Numerics) and WinRT (Microsoft.Graphics.Canvas.Numerics) [TestMethod] public void Vector4WinRTInteropTest() { Vector4 a = new Vector4(23, 42, 100, -1); Microsoft.Graphics.Canvas.Numerics.Vector4 b = a; Assert.AreEqual(a.X, b.X); Assert.AreEqual(a.Y, b.Y); Assert.AreEqual(a.Z, b.Z); Assert.AreEqual(a.W, b.W); Vector4 c = b; Assert.AreEqual(a, c); } } }
/* * Copyright (c) 2008, openmetaverse.org * All rights reserved. * * - Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * - Neither the name of the Second Life Reverse Engineering Team nor the names * of its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Text; using OpenMetaverse; namespace OpenMetaverse { /// <summary> /// /// </summary> public enum PacketFrequency : byte { /// <summary></summary> Low, /// <summary></summary> Medium, /// <summary></summary> High } } namespace OpenMetaverse.Packets { /// <summary> /// Thrown when a packet could not be successfully deserialized /// </summary> public class MalformedDataException : ApplicationException { /// <summary> /// Default constructor /// </summary> public MalformedDataException() { } /// <summary> /// Constructor that takes an additional error message /// </summary> /// <param name="Message">An error message to attach to this exception</param> public MalformedDataException(string Message) : base(Message) { this.Source = "Packet decoding"; } } /// <summary> /// The header of a message template packet. Holds packet flags, sequence /// number, packet ID, and any ACKs that will be appended at the end of /// the packet /// </summary> public struct Header { public bool Reliable; public bool Resent; public bool Zerocoded; public bool AppendedAcks; public uint Sequence; public ushort ID; public PacketFrequency Frequency; public uint[] AckList; public void ToBytes(byte[] bytes, ref int i) { byte flags = 0; if (Reliable) flags |= Helpers.MSG_RELIABLE; if (Resent) flags |= Helpers.MSG_RESENT; if (Zerocoded) flags |= Helpers.MSG_ZEROCODED; if (AppendedAcks) flags |= Helpers.MSG_APPENDED_ACKS; // Flags bytes[i++] = flags; // Sequence number Utils.UIntToBytesBig(Sequence, bytes, i); i += 4; // Extra byte bytes[i++] = 0; // Packet ID switch (Frequency) { case PacketFrequency.High: // 1 byte ID bytes[i++] = (byte)ID; break; case PacketFrequency.Medium: // 2 byte ID bytes[i++] = 0xFF; bytes[i++] = (byte)ID; break; case PacketFrequency.Low: // 4 byte ID bytes[i++] = 0xFF; bytes[i++] = 0xFF; Utils.UInt16ToBytesBig(ID, bytes, i); i += 2; break; } } public void FromBytes(byte[] bytes, ref int pos, ref int packetEnd) { this = BuildHeader(bytes, ref pos, ref packetEnd); } /// <summary> /// Convert the AckList to a byte array, used for packet serializing /// </summary> /// <param name="bytes">Reference to the target byte array</param> /// <param name="i">Beginning position to start writing to in the byte /// array, will be updated with the ending position of the ACK list</param> public void AcksToBytes(byte[] bytes, ref int i) { foreach (uint ack in AckList) { Utils.UIntToBytesBig(ack, bytes, i); i += 4; } if (AckList.Length > 0) { bytes[i++] = (byte)AckList.Length; } } /// <summary> /// /// </summary> /// <param name="bytes"></param> /// <param name="pos"></param> /// <param name="packetEnd"></param> /// <returns></returns> public static Header BuildHeader(byte[] bytes, ref int pos, ref int packetEnd) { Header header; byte flags = bytes[pos]; header.AppendedAcks = (flags & Helpers.MSG_APPENDED_ACKS) != 0; header.Reliable = (flags & Helpers.MSG_RELIABLE) != 0; header.Resent = (flags & Helpers.MSG_RESENT) != 0; header.Zerocoded = (flags & Helpers.MSG_ZEROCODED) != 0; header.Sequence = (uint)((bytes[pos + 1] << 24) + (bytes[pos + 2] << 16) + (bytes[pos + 3] << 8) + bytes[pos + 4]); // Set the frequency and packet ID number if (bytes[pos + 6] == 0xFF) { if (bytes[pos + 7] == 0xFF) { header.Frequency = PacketFrequency.Low; if (header.Zerocoded && bytes[pos + 8] == 0) header.ID = bytes[pos + 10]; else header.ID = (ushort)((bytes[pos + 8] << 8) + bytes[pos + 9]); pos += 10; } else { header.Frequency = PacketFrequency.Medium; header.ID = bytes[pos + 7]; pos += 8; } } else { header.Frequency = PacketFrequency.High; header.ID = bytes[pos + 6]; pos += 7; } header.AckList = null; CreateAckList(ref header, bytes, ref packetEnd); return header; } /// <summary> /// /// </summary> /// <param name="header"></param> /// <param name="bytes"></param> /// <param name="packetEnd"></param> static void CreateAckList(ref Header header, byte[] bytes, ref int packetEnd) { if (header.AppendedAcks) { int count = bytes[packetEnd--]; header.AckList = new uint[count]; for (int i = 0; i < count; i++) { header.AckList[i] = (uint)( (bytes[(packetEnd - i * 4) - 3] << 24) | (bytes[(packetEnd - i * 4) - 2] << 16) | (bytes[(packetEnd - i * 4) - 1] << 8) | (bytes[(packetEnd - i * 4) ])); } packetEnd -= (count * 4); } } } /// <summary> /// A block of data in a packet. Packets are composed of one or more blocks, /// each block containing one or more fields /// </summary> public abstract class PacketBlock { /// <summary>Current length of the data in this packet</summary> public abstract int Length { get; } /// <summary> /// Create a block from a byte array /// </summary> /// <param name="bytes">Byte array containing the serialized block</param> /// <param name="i">Starting position of the block in the byte array. /// This will point to the data after the end of the block when the /// call returns</param> public abstract void FromBytes(byte[] bytes, ref int i); /// <summary> /// Serialize this block into a byte array /// </summary> /// <param name="bytes">Byte array to serialize this block into</param> /// <param name="i">Starting position in the byte array to serialize to. /// This will point to the position directly after the end of the /// serialized block when the call returns</param> public abstract void ToBytes(byte[] bytes, ref int i); }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the lambda-2014-11-11.normal.json service model. */ using System; using System.Collections.Generic; using System.Xml.Serialization; using System.Text; using System.IO; using Amazon.Runtime; using Amazon.Runtime.Internal; namespace Amazon.Lambda.Model { /// <summary> /// A complex type that describes function metadata. /// </summary> public partial class FunctionConfiguration { private long? _codeSize; private string _configurationId; private string _description; private string _functionARN; private string _functionName; private string _handler; private string _lastModified; private int? _memorySize; private Mode _mode; private string _role; private Runtime _runtime; private int? _timeout; /// <summary> /// Gets and sets the property CodeSize. /// <para> /// The size, in bytes, of the function .zip file you uploaded. /// </para> /// </summary> public long CodeSize { get { return this._codeSize.GetValueOrDefault(); } set { this._codeSize = value; } } // Check to see if CodeSize property is set internal bool IsSetCodeSize() { return this._codeSize.HasValue; } /// <summary> /// Gets and sets the property ConfigurationId. /// <para> /// A Lambda-assigned unique identifier for the current function code and related configuration. /// </para> /// </summary> public string ConfigurationId { get { return this._configurationId; } set { this._configurationId = value; } } // Check to see if ConfigurationId property is set internal bool IsSetConfigurationId() { return this._configurationId != null; } /// <summary> /// Gets and sets the property Description. /// <para> /// The user-provided description. /// </para> /// </summary> public string Description { get { return this._description; } set { this._description = value; } } // Check to see if Description property is set internal bool IsSetDescription() { return this._description != null; } /// <summary> /// Gets and sets the property FunctionARN. /// <para> /// The Amazon Resource Name (ARN) assigned to the function. /// </para> /// </summary> public string FunctionARN { get { return this._functionARN; } set { this._functionARN = value; } } // Check to see if FunctionARN property is set internal bool IsSetFunctionARN() { return this._functionARN != null; } /// <summary> /// Gets and sets the property FunctionName. /// <para> /// The name of the function. /// </para> /// </summary> public string FunctionName { get { return this._functionName; } set { this._functionName = value; } } // Check to see if FunctionName property is set internal bool IsSetFunctionName() { return this._functionName != null; } /// <summary> /// Gets and sets the property Handler. /// <para> /// The function Lambda calls to begin executing your function. /// </para> /// </summary> public string Handler { get { return this._handler; } set { this._handler = value; } } // Check to see if Handler property is set internal bool IsSetHandler() { return this._handler != null; } /// <summary> /// Gets and sets the property LastModified. /// <para> /// The timestamp of the last time you updated the function. /// </para> /// </summary> public string LastModified { get { return this._lastModified; } set { this._lastModified = value; } } // Check to see if LastModified property is set internal bool IsSetLastModified() { return this._lastModified != null; } /// <summary> /// Gets and sets the property MemorySize. /// <para> /// The memory size, in MB, you configured for the function. Must be a multiple of 64 /// MB. /// </para> /// </summary> public int MemorySize { get { return this._memorySize.GetValueOrDefault(); } set { this._memorySize = value; } } // Check to see if MemorySize property is set internal bool IsSetMemorySize() { return this._memorySize.HasValue; } /// <summary> /// Gets and sets the property Mode. /// <para> /// The type of the Lambda function you uploaded. /// </para> /// </summary> public Mode Mode { get { return this._mode; } set { this._mode = value; } } // Check to see if Mode property is set internal bool IsSetMode() { return this._mode != null; } /// <summary> /// Gets and sets the property Role. /// <para> /// The Amazon Resource Name (ARN) of the IAM role that Lambda assumes when it executes /// your function to access any other Amazon Web Services (AWS) resources. /// </para> /// </summary> public string Role { get { return this._role; } set { this._role = value; } } // Check to see if Role property is set internal bool IsSetRole() { return this._role != null; } /// <summary> /// Gets and sets the property Runtime. /// <para> /// The runtime environment for the Lambda function. /// </para> /// </summary> public Runtime Runtime { get { return this._runtime; } set { this._runtime = value; } } // Check to see if Runtime property is set internal bool IsSetRuntime() { return this._runtime != null; } /// <summary> /// Gets and sets the property Timeout. /// <para> /// The function execution time at which Lambda should terminate the function. Because /// the execution time has cost implications, we recommend you set this value based on /// your expected execution time. The default is 3 seconds. /// </para> /// </summary> public int Timeout { get { return this._timeout.GetValueOrDefault(); } set { this._timeout = value; } } // Check to see if Timeout property is set internal bool IsSetTimeout() { return this._timeout.HasValue; } } }
using Avalonia.Controls; using Avalonia.UnitTests; using System; using Xunit; namespace Avalonia.Markup.Xaml.UnitTests.Xaml { public class BindingTests_RelativeSource : XamlTestBase { [Fact] public void Binding_To_DataContext_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Button Name='button' Content='{Binding Foo, RelativeSource={RelativeSource DataContext}}'/> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); button.DataContext = new { Foo = "foo" }; window.ApplyTemplate(); Assert.Equal("foo", button.Content); } } [Fact] public void Binding_To_Self_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Button Name='button' Content='{Binding Name, RelativeSource={RelativeSource Self}}'/> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); Assert.Equal("button", button.Content); } } [Fact] public void Binding_To_First_Ancestor_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border1'> <Border Name='border2'> <Button Name='button' Content='{Binding Name, RelativeSource={RelativeSource AncestorType=Border}}'/> </Border> </Border> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); window.Presenter.ApplyTemplate(); Assert.Equal("border2", button.Content); } } [Fact] public void Binding_To_First_Ancestor_Without_AncestorType_Throws_Exception() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border1'> <ContentControl Name='contentControl'> <Button Name='button' Content='{Binding Name, RelativeSource={RelativeSource AncestorLevel=1}}'/> </ContentControl> </Border> </Window>"; Assert.Throws<InvalidOperationException>( () => AvaloniaRuntimeXamlLoader.Load(xaml)); } } [Fact] public void Binding_To_First_Ancestor_With_Shorthand_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border1'> <Border Name='border2'> <Button Name='button' Content='{Binding $parent.Name}'/> </Border> </Border> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); Assert.Equal("border2", button.Content); } } [Fact] public void Binding_To_First_Ancestor_With_Shorthand_Uses_LogicalTree() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border'> <ContentControl Name='contentControl'> <Button Name='button' Content='{Binding $parent.Name}'/> </ContentControl> </Border> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var contentControl = window.FindControl<ContentControl>("contentControl"); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); Assert.Equal("contentControl", button.Content); } } [Fact] public void Binding_To_Second_Ancestor_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border1'> <Border Name='border2'> <Button Name='button' Content='{Binding Name, RelativeSource={RelativeSource AncestorType=Border, AncestorLevel=2}}'/> </Border> </Border> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); window.Presenter.ApplyTemplate(); Assert.Equal("border1", button.Content); } } [Fact] public void Binding_To_Second_Ancestor_With_Shorthand_Uses_LogicalTree() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <ContentControl Name='contentControl1'> <ContentControl Name='contentControl2'> <Button Name='button' Content='{Binding $parent[1].Name}'/> </ContentControl> </ContentControl> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var contentControl1 = window.FindControl<ContentControl>("contentControl1"); var contentControl2 = window.FindControl<ContentControl>("contentControl2"); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); Assert.Equal("contentControl1", button.Content); } } [Fact] public void Binding_To_Ancestor_Of_Type_With_Shorthand_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border1'> <Border Name='border2'> <Button Name='button' Content='{Binding $parent[Border].Name}'/> </Border> </Border> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); Assert.Equal("border2", button.Content); } } [Fact] public void Binding_To_Second_Ancestor_With_Shorthand_And_Type_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border1'> <Border Name='border2'> <Button Name='button' Content='{Binding $parent[Border; 1].Name}'/> </Border> </Border> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); Assert.Equal("border1", button.Content); } } [Fact] public void Binding_To_Second_Ancestor_With_Shorthand_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border1'> <Border Name='border2'> <Button Name='button' Content='{Binding $parent[1].Name}'/> </Border> </Border> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); Assert.Equal("border1", button.Content); } } [Fact] public void Binding_To_Ancestor_With_Namespace_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <local:TestWindow xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests' Title='title'> <Button Name='button' Content='{Binding Title, RelativeSource={RelativeSource AncestorType=local:TestWindow}}'/> </local:TestWindow>"; var window = (TestWindow)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); window.Presenter.ApplyTemplate(); Assert.Equal("title", button.Content); } } [Fact] public void Shorthand_Binding_With_Negation_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border1'> <Border Name='border2'> <Button Name='button' Content='{Binding !$self.IsDefault}'/> </Border> </Border> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); #pragma warning disable xUnit2004 // Diagnostic mis-firing since button.Content isn't guaranteed to be a bool. Assert.Equal(true, button.Content); #pragma warning restore xUnit2004 } } [Fact] public void Shorthand_Binding_With_Multiple_Negation_Works() { using (UnitTestApplication.Start(TestServices.StyledWindow)) { var xaml = @" <Window xmlns='https://github.com/avaloniaui' xmlns:x='http://schemas.microsoft.com/winfx/2006/xaml' xmlns:local='clr-namespace:Avalonia.Markup.Xaml.UnitTests.Xaml;assembly=Avalonia.Markup.Xaml.UnitTests'> <Border Name='border1'> <Border Name='border2'> <Button Name='button' Content='{Binding !!$self.IsDefault}'/> </Border> </Border> </Window>"; var window = (Window)AvaloniaRuntimeXamlLoader.Load(xaml); var button = window.FindControl<Button>("button"); window.ApplyTemplate(); #pragma warning disable xUnit2004 // Diagnostic mis-firing since button.Content isn't guaranteed to be a bool. Assert.Equal(false, button.Content); #pragma warning restore xUnit2004 } } } public class TestWindow : Window { } }
using System; using System.Diagnostics; using System.Text; using i64 = System.Int64; using u8 = System.Byte; using u32 = System.UInt32; using Pgno = System.UInt32; namespace Community.CsharpSqlite { using sqlite3_int64 = System.Int64; using DbPage = Sqlite3.PgHdr; public partial class Sqlite3 { /* ** 2009 January 28 ** ** The author disclaims copyright to this source code. In place of ** a legal notice, here is a blessing: ** ** May you do good and not evil. ** May you find forgiveness for yourself and forgive others. ** May you share freely, never taking more than you give. ** ************************************************************************* ** This file contains the implementation of the sqlite3_backup_XXX() ** API functions and the related features. ************************************************************************* ** Included in SQLite3 port to C#-SQLite; 2008 Noah B Hart ** C#-SQLite is an independent reimplementation of the SQLite software library ** ** SQLITE_SOURCE_ID: 2011-05-19 13:26:54 ed1da510a239ea767a01dc332b667119fa3c908e ** ************************************************************************* */ //#include "sqliteInt.h" //#include "btreeInt.h" /* Macro to find the minimum of two numeric values. */ #if !MIN //# define MIN(x,y) ((x)<(y)?(x):(y)) #endif /* ** Structure allocated for each backup operation. */ public class sqlite3_backup { public sqlite3 pDestDb; /* Destination database handle */ public Btree pDest; /* Destination b-tree file */ public u32 iDestSchema; /* Original schema cookie in destination */ public int bDestLocked; /* True once a write-transaction is open on pDest */ public Pgno iNext; /* Page number of the next source page to copy */ public sqlite3 pSrcDb; /* Source database handle */ public Btree pSrc; /* Source b-tree file */ public int rc; /* Backup process error code */ /* These two variables are set by every call to backup_step(). They are ** read by calls to backup_remaining() and backup_pagecount(). */ public Pgno nRemaining; /* Number of pages left to copy */ public Pgno nPagecount; /* Total number of pages to copy */ public int isAttached; /* True once backup has been registered with pager */ public sqlite3_backup pNext; /* Next backup associated with source pager */ }; /* ** THREAD SAFETY NOTES: ** ** Once it has been created using backup_init(), a single sqlite3_backup ** structure may be accessed via two groups of thread-safe entry points: ** ** * Via the sqlite3_backup_XXX() API function backup_step() and ** backup_finish(). Both these functions obtain the source database ** handle mutex and the mutex associated with the source BtShared ** structure, in that order. ** ** * Via the BackupUpdate() and BackupRestart() functions, which are ** invoked by the pager layer to report various state changes in ** the page cache associated with the source database. The mutex ** associated with the source database BtShared structure will always ** be held when either of these functions are invoked. ** ** The other sqlite3_backup_XXX() API functions, backup_remaining() and ** backup_pagecount() are not thread-safe functions. If they are called ** while some other thread is calling backup_step() or backup_finish(), ** the values returned may be invalid. There is no way for a call to ** BackupUpdate() or BackupRestart() to interfere with backup_remaining() ** or backup_pagecount(). ** ** Depending on the SQLite configuration, the database handles and/or ** the Btree objects may have their own mutexes that require locking. ** Non-sharable Btrees (in-memory databases for example), do not have ** associated mutexes. */ /* ** Return a pointer corresponding to database zDb (i.e. "main", "temp") ** in connection handle pDb. If such a database cannot be found, return ** a NULL pointer and write an error message to pErrorDb. ** ** If the "temp" database is requested, it may need to be opened by this ** function. If an error occurs while doing so, return 0 and write an ** error message to pErrorDb. */ static Btree findBtree( sqlite3 pErrorDb, sqlite3 pDb, string zDb ) { int i = sqlite3FindDbName( pDb, zDb ); if ( i == 1 ) { Parse pParse; int rc = 0; pParse = new Parse();//sqlite3StackAllocZero(pErrorDb, sizeof(*pParse)); if ( pParse == null ) { sqlite3Error( pErrorDb, SQLITE_NOMEM, "out of memory" ); rc = SQLITE_NOMEM; } else { pParse.db = pDb; if ( sqlite3OpenTempDatabase( pParse ) != 0 ) { sqlite3Error( pErrorDb, pParse.rc, "%s", pParse.zErrMsg ); rc = SQLITE_ERROR; } sqlite3DbFree( pErrorDb, ref pParse.zErrMsg ); //sqlite3StackFree( pErrorDb, pParse ); } if ( rc != 0 ) { return null; } } if ( i < 0 ) { sqlite3Error( pErrorDb, SQLITE_ERROR, "unknown database %s", zDb ); return null; } return pDb.aDb[i].pBt; } /* ** Attempt to set the page size of the destination to match the page size ** of the source. */ static int setDestPgsz( sqlite3_backup p ) { int rc; rc = sqlite3BtreeSetPageSize( p.pDest, sqlite3BtreeGetPageSize( p.pSrc ), -1, 0 ); return rc; } /* ** Create an sqlite3_backup process to copy the contents of zSrcDb from ** connection handle pSrcDb to zDestDb in pDestDb. If successful, return ** a pointer to the new sqlite3_backup object. ** ** If an error occurs, NULL is returned and an error code and error message ** stored in database handle pDestDb. */ static public sqlite3_backup sqlite3_backup_init( sqlite3 pDestDb, /* Database to write to */ string zDestDb, /* Name of database within pDestDb */ sqlite3 pSrcDb, /* Database connection to read from */ string zSrcDb /* Name of database within pSrcDb */ ) { sqlite3_backup p; /* Value to return */ /* Lock the source database handle. The destination database ** handle is not locked in this routine, but it is locked in ** sqlite3_backup_step(). The user is required to ensure that no ** other thread accesses the destination handle for the duration ** of the backup operation. Any attempt to use the destination ** database connection while a backup is in progress may cause ** a malfunction or a deadlock. */ sqlite3_mutex_enter( pSrcDb.mutex ); sqlite3_mutex_enter( pDestDb.mutex ); if ( pSrcDb == pDestDb ) { sqlite3Error( pDestDb, SQLITE_ERROR, "source and destination must be distinct" ); p = null; } else { /* Allocate space for a new sqlite3_backup object... ** EVIDENCE-OF: R-64852-21591 The sqlite3_backup object is created by a ** call to sqlite3_backup_init() and is destroyed by a call to ** sqlite3_backup_finish(). */ p = new sqlite3_backup();// (sqlite3_backup)sqlite3_malloc( sizeof( sqlite3_backup ) ); //if ( null == p ) //{ // sqlite3Error( pDestDb, SQLITE_NOMEM, 0 ); //} } /* If the allocation succeeded, populate the new object. */ if ( p != null ) { // memset( p, 0, sizeof( sqlite3_backup ) ); p.pSrc = findBtree( pDestDb, pSrcDb, zSrcDb ); p.pDest = findBtree( pDestDb, pDestDb, zDestDb ); p.pDestDb = pDestDb; p.pSrcDb = pSrcDb; p.iNext = 1; p.isAttached = 0; if ( null == p.pSrc || null == p.pDest || setDestPgsz( p ) == SQLITE_NOMEM ) { /* One (or both) of the named databases did not exist or an OOM ** error was hit. The error has already been written into the ** pDestDb handle. All that is left to do here is free the ** sqlite3_backup structure. */ //sqlite3_free( ref p ); p = null; } } if ( p != null ) { p.pSrc.nBackup++; } sqlite3_mutex_leave( pDestDb.mutex ); sqlite3_mutex_leave( pSrcDb.mutex ); return p; } /* ** Argument rc is an SQLite error code. Return true if this error is ** considered fatal if encountered during a backup operation. All errors ** are considered fatal except for SQLITE_BUSY and SQLITE_LOCKED. */ static bool isFatalError( int rc ) { return ( rc != SQLITE_OK && rc != SQLITE_BUSY && ALWAYS( rc != SQLITE_LOCKED ) ); } /* ** Parameter zSrcData points to a buffer containing the data for ** page iSrcPg from the source database. Copy this data into the ** destination database. */ static int backupOnePage( sqlite3_backup p, Pgno iSrcPg, byte[] zSrcData ) { Pager pDestPager = sqlite3BtreePager( p.pDest ); int nSrcPgsz = sqlite3BtreeGetPageSize( p.pSrc ); int nDestPgsz = sqlite3BtreeGetPageSize( p.pDest ); int nCopy = MIN( nSrcPgsz, nDestPgsz ); i64 iEnd = (i64)iSrcPg * (i64)nSrcPgsz; #if !NOT_SQLITE_HAS_CODEC //#if SQLITE_HAS_CODEC int nSrcReserve = sqlite3BtreeGetReserve(p.pSrc); int nDestReserve = sqlite3BtreeGetReserve(p.pDest); #endif int rc = SQLITE_OK; i64 iOff; Debug.Assert( p.bDestLocked != 0 ); Debug.Assert( !isFatalError( p.rc ) ); Debug.Assert( iSrcPg != PENDING_BYTE_PAGE( p.pSrc.pBt ) ); Debug.Assert( zSrcData != null ); /* Catch the case where the destination is an in-memory database and the ** page sizes of the source and destination differ. */ if ( nSrcPgsz != nDestPgsz && sqlite3PagerIsMemdb( pDestPager ) ) { rc = SQLITE_READONLY; } #if !NOT_SQLITE_HAS_CODEC //#if SQLITE_HAS_CODEC /* Backup is not possible if the page size of the destination is changing ** and a codec is in use. */ if ( nSrcPgsz != nDestPgsz && sqlite3PagerGetCodec( pDestPager ) != null ) { rc = SQLITE_READONLY; } /* Backup is not possible if the number of bytes of reserve space differ ** between source and destination. If there is a difference, try to ** fix the destination to agree with the source. If that is not possible, ** then the backup cannot proceed. */ if ( nSrcReserve != nDestReserve ) { u32 newPgsz = (u32)nSrcPgsz; rc = sqlite3PagerSetPagesize( pDestPager, ref newPgsz, nSrcReserve ); if ( rc == SQLITE_OK && newPgsz != nSrcPgsz ) rc = SQLITE_READONLY; } #endif /* This loop runs once for each destination page spanned by the source ** page. For each iteration, variable iOff is set to the byte offset ** of the destination page. */ for ( iOff = iEnd - (i64)nSrcPgsz; rc == SQLITE_OK && iOff < iEnd; iOff += nDestPgsz ) { DbPage pDestPg = null; u32 iDest = (u32)( iOff / nDestPgsz ) + 1; if ( iDest == PENDING_BYTE_PAGE( p.pDest.pBt ) ) continue; if ( SQLITE_OK == ( rc = sqlite3PagerGet( pDestPager, iDest, ref pDestPg ) ) && SQLITE_OK == ( rc = sqlite3PagerWrite( pDestPg ) ) ) { //string zIn = &zSrcData[iOff%nSrcPgsz]; byte[] zDestData = sqlite3PagerGetData( pDestPg ); //string zOut = &zDestData[iOff % nDestPgsz]; /* Copy the data from the source page into the destination page. ** Then clear the Btree layer MemPage.isInit flag. Both this module ** and the pager code use this trick (clearing the first byte ** of the page 'extra' space to invalidate the Btree layers ** cached parse of the page). MemPage.isInit is marked ** "MUST BE FIRST" for this purpose. */ Buffer.BlockCopy( zSrcData, (int)( iOff % nSrcPgsz ), zDestData, (int)( iOff % nDestPgsz ), nCopy );// memcpy( zOut, zIn, nCopy ); sqlite3PagerGetExtra( pDestPg ).isInit = 0;// ( sqlite3PagerGetExtra( pDestPg ) )[0] = 0; } sqlite3PagerUnref( pDestPg ); } return rc; } /* ** If pFile is currently larger than iSize bytes, then truncate it to ** exactly iSize bytes. If pFile is not larger than iSize bytes, then ** this function is a no-op. ** ** Return SQLITE_OK if everything is successful, or an SQLite error ** code if an error occurs. */ static int backupTruncateFile( sqlite3_file pFile, int iSize ) { long iCurrent = 0; int rc = sqlite3OsFileSize( pFile, ref iCurrent ); if ( rc == SQLITE_OK && iCurrent > iSize ) { rc = sqlite3OsTruncate( pFile, iSize ); } return rc; } /* ** Register this backup object with the associated source pager for ** callbacks when pages are changed or the cache invalidated. */ static void attachBackupObject( sqlite3_backup p ) { sqlite3_backup pp; Debug.Assert( sqlite3BtreeHoldsMutex( p.pSrc ) ); pp = sqlite3PagerBackupPtr( sqlite3BtreePager( p.pSrc ) ); p.pNext = pp; sqlite3BtreePager( p.pSrc ).pBackup = p; //*pp = p; p.isAttached = 1; } /* ** Copy nPage pages from the source b-tree to the destination. */ static public int sqlite3_backup_step( sqlite3_backup p, int nPage ) { int rc; int destMode; /* Destination journal mode */ int pgszSrc = 0; /* Source page size */ int pgszDest = 0; /* Destination page size */ sqlite3_mutex_enter( p.pSrcDb.mutex ); sqlite3BtreeEnter( p.pSrc ); if ( p.pDestDb != null ) { sqlite3_mutex_enter( p.pDestDb.mutex ); } rc = p.rc; if ( !isFatalError( rc ) ) { Pager pSrcPager = sqlite3BtreePager( p.pSrc ); /* Source pager */ Pager pDestPager = sqlite3BtreePager( p.pDest ); /* Dest pager */ int ii; /* Iterator variable */ Pgno nSrcPage = 0; /* Size of source db in pages */ int bCloseTrans = 0; /* True if src db requires unlocking */ /* If the source pager is currently in a write-transaction, return ** SQLITE_BUSY immediately. */ if ( p.pDestDb != null && p.pSrc.pBt.inTransaction == TRANS_WRITE ) { rc = SQLITE_BUSY; } else { rc = SQLITE_OK; } /* Lock the destination database, if it is not locked already. */ if ( SQLITE_OK == rc && p.bDestLocked == 0 && SQLITE_OK == ( rc = sqlite3BtreeBeginTrans( p.pDest, 2 ) ) ) { p.bDestLocked = 1; sqlite3BtreeGetMeta( p.pDest, BTREE_SCHEMA_VERSION, ref p.iDestSchema ); } /* If there is no open read-transaction on the source database, open ** one now. If a transaction is opened here, then it will be closed ** before this function exits. */ if ( rc == SQLITE_OK && !sqlite3BtreeIsInReadTrans( p.pSrc ) ) { rc = sqlite3BtreeBeginTrans( p.pSrc, 0 ); bCloseTrans = 1; } /* Do not allow backup if the destination database is in WAL mode ** and the page sizes are different between source and destination */ pgszSrc = sqlite3BtreeGetPageSize( p.pSrc ); pgszDest = sqlite3BtreeGetPageSize( p.pDest ); destMode = sqlite3PagerGetJournalMode( sqlite3BtreePager( p.pDest ) ); if ( SQLITE_OK == rc && destMode == PAGER_JOURNALMODE_WAL && pgszSrc != pgszDest ) { rc = SQLITE_READONLY; } /* Now that there is a read-lock on the source database, query the ** source pager for the number of pages in the database. */ nSrcPage = sqlite3BtreeLastPage( p.pSrc ); Debug.Assert( nSrcPage >= 0 ); for ( ii = 0; ( nPage < 0 || ii < nPage ) && p.iNext <= nSrcPage && 0 == rc; ii++ ) { Pgno iSrcPg = p.iNext; /* Source page number */ if ( iSrcPg != PENDING_BYTE_PAGE( p.pSrc.pBt ) ) { DbPage pSrcPg = null; /* Source page object */ rc = sqlite3PagerGet( pSrcPager, (u32)iSrcPg, ref pSrcPg ); if ( rc == SQLITE_OK ) { rc = backupOnePage( p, iSrcPg, sqlite3PagerGetData( pSrcPg ) ); sqlite3PagerUnref( pSrcPg ); } } p.iNext++; } if ( rc == SQLITE_OK ) { p.nPagecount = nSrcPage; p.nRemaining = ( nSrcPage + 1 - p.iNext ); if ( p.iNext > nSrcPage ) { rc = SQLITE_DONE; } else if ( 0 == p.isAttached ) { attachBackupObject( p ); } } /* Update the schema version field in the destination database. This ** is to make sure that the schema-version really does change in ** the case where the source and destination databases have the ** same schema version. */ if ( rc == SQLITE_DONE && ( rc = sqlite3BtreeUpdateMeta( p.pDest, 1, p.iDestSchema + 1 ) ) == SQLITE_OK ) { Pgno nDestTruncate; if ( p.pDestDb != null ) { sqlite3ResetInternalSchema( p.pDestDb, -1 ); } /* Set nDestTruncate to the final number of pages in the destination ** database. The complication here is that the destination page ** size may be different to the source page size. ** ** If the source page size is smaller than the destination page size, ** round up. In this case the call to sqlite3OsTruncate() below will ** fix the size of the file. However it is important to call ** sqlite3PagerTruncateImage() here so that any pages in the ** destination file that lie beyond the nDestTruncate page mark are ** journalled by PagerCommitPhaseOne() before they are destroyed ** by the file truncation. */ Debug.Assert( pgszSrc == sqlite3BtreeGetPageSize( p.pSrc ) ); Debug.Assert( pgszDest == sqlite3BtreeGetPageSize( p.pDest ) ); if ( pgszSrc < pgszDest ) { int ratio = pgszDest / pgszSrc; nDestTruncate = (Pgno)( ( nSrcPage + ratio - 1 ) / ratio ); if ( nDestTruncate == (int)PENDING_BYTE_PAGE( p.pDest.pBt ) ) { nDestTruncate--; } } else { nDestTruncate = (Pgno)( nSrcPage * ( pgszSrc / pgszDest ) ); } sqlite3PagerTruncateImage( pDestPager, nDestTruncate ); if ( pgszSrc < pgszDest ) { /* If the source page-size is smaller than the destination page-size, ** two extra things may need to happen: ** ** * The destination may need to be truncated, and ** ** * Data stored on the pages immediately following the ** pending-byte page in the source database may need to be ** copied into the destination database. */ int iSize = (int)( pgszSrc * nSrcPage ); sqlite3_file pFile = sqlite3PagerFile( pDestPager ); i64 iOff; i64 iEnd; Debug.Assert( pFile != null ); Debug.Assert( (i64)nDestTruncate * (i64)pgszDest >= iSize || ( nDestTruncate == (int)( PENDING_BYTE_PAGE( p.pDest.pBt ) - 1 ) && iSize >= PENDING_BYTE && iSize <= PENDING_BYTE + pgszDest ) ); /* This call ensures that all data required to recreate the original ** database has been stored in the journal for pDestPager and the ** journal synced to disk. So at this point we may safely modify ** the database file in any way, knowing that if a power failure ** occurs, the original database will be reconstructed from the ** journal file. */ rc = sqlite3PagerCommitPhaseOne( pDestPager, null, true ); /* Write the extra pages and truncate the database file as required. */ iEnd = MIN( PENDING_BYTE + pgszDest, iSize ); for ( iOff = PENDING_BYTE + pgszSrc; rc == SQLITE_OK && iOff < iEnd; iOff += pgszSrc ) { PgHdr pSrcPg = null; u32 iSrcPg = (u32)( ( iOff / pgszSrc ) + 1 ); rc = sqlite3PagerGet( pSrcPager, iSrcPg, ref pSrcPg ); if ( rc == SQLITE_OK ) { byte[] zData = sqlite3PagerGetData( pSrcPg ); rc = sqlite3OsWrite( pFile, zData, pgszSrc, iOff ); } sqlite3PagerUnref( pSrcPg ); } if ( rc == SQLITE_OK ) { rc = backupTruncateFile( pFile, (int)iSize ); } /* Sync the database file to disk. */ if ( rc == SQLITE_OK ) { rc = sqlite3PagerSync( pDestPager ); } } else { rc = sqlite3PagerCommitPhaseOne( pDestPager, null, false ); } /* Finish committing the transaction to the destination database. */ if ( SQLITE_OK == rc && SQLITE_OK == ( rc = sqlite3BtreeCommitPhaseTwo( p.pDest, 0 ) ) ) { rc = SQLITE_DONE; } } /* If bCloseTrans is true, then this function opened a read transaction ** on the source database. Close the read transaction here. There is ** no need to check the return values of the btree methods here, as ** "committing" a read-only transaction cannot fail. */ if ( bCloseTrans != 0 ) { #if !NDEBUG || SQLITE_COVERAGE_TEST //TESTONLY( int rc2 ); //TESTONLY( rc2 = ) sqlite3BtreeCommitPhaseOne(p.pSrc, 0); //TESTONLY( rc2 |= ) sqlite3BtreeCommitPhaseTwo(p.pSrc); int rc2; rc2 = sqlite3BtreeCommitPhaseOne( p.pSrc, "" ); rc2 |= sqlite3BtreeCommitPhaseTwo( p.pSrc, 0 ); Debug.Assert( rc2 == SQLITE_OK ); #else sqlite3BtreeCommitPhaseOne(p.pSrc, null); sqlite3BtreeCommitPhaseTwo(p.pSrc, 0); #endif } if ( rc == SQLITE_IOERR_NOMEM ) { rc = SQLITE_NOMEM; } p.rc = rc; } if ( p.pDestDb != null ) { sqlite3_mutex_leave( p.pDestDb.mutex ); } sqlite3BtreeLeave( p.pSrc ); sqlite3_mutex_leave( p.pSrcDb.mutex ); return rc; } /* ** Release all resources associated with an sqlite3_backup* handle. */ static public int sqlite3_backup_finish( sqlite3_backup p ) { sqlite3_backup pp; /* Ptr to head of pagers backup list */ sqlite3_mutex mutex; /* Mutex to protect source database */ int rc; /* Value to return */ /* Enter the mutexes */ if ( p == null ) return SQLITE_OK; sqlite3_mutex_enter( p.pSrcDb.mutex ); sqlite3BtreeEnter( p.pSrc ); mutex = p.pSrcDb.mutex; if ( p.pDestDb != null ) { sqlite3_mutex_enter( p.pDestDb.mutex ); } /* Detach this backup from the source pager. */ if ( p.pDestDb != null ) { p.pSrc.nBackup--; } if ( p.isAttached != 0 ) { pp = sqlite3PagerBackupPtr( sqlite3BtreePager( p.pSrc ) ); while ( pp != p ) { pp = ( pp ).pNext; } sqlite3BtreePager( p.pSrc ).pBackup = p.pNext; } /* If a transaction is still open on the Btree, roll it back. */ sqlite3BtreeRollback( p.pDest ); /* Set the error code of the destination database handle. */ rc = ( p.rc == SQLITE_DONE ) ? SQLITE_OK : p.rc; sqlite3Error( p.pDestDb, rc, 0 ); /* Exit the mutexes and free the backup context structure. */ if ( p.pDestDb != null ) { sqlite3_mutex_leave( p.pDestDb.mutex ); } sqlite3BtreeLeave( p.pSrc ); if ( p.pDestDb != null ) { /* EVIDENCE-OF: R-64852-21591 The sqlite3_backup object is created by a ** call to sqlite3_backup_init() and is destroyed by a call to ** sqlite3_backup_finish(). */ //sqlite3_free( ref p ); } sqlite3_mutex_leave( mutex ); return rc; } /* ** Return the number of pages still to be backed up as of the most recent ** call to sqlite3_backup_step(). */ static int sqlite3_backup_remaining( sqlite3_backup p ) { return (int)p.nRemaining; } /* ** Return the total number of pages in the source database as of the most ** recent call to sqlite3_backup_step(). */ static int sqlite3_backup_pagecount( sqlite3_backup p ) { return (int)p.nPagecount; } /* ** This function is called after the contents of page iPage of the ** source database have been modified. If page iPage has already been ** copied into the destination database, then the data written to the ** destination is now invalidated. The destination copy of iPage needs ** to be updated with the new data before the backup operation is ** complete. ** ** It is assumed that the mutex associated with the BtShared object ** corresponding to the source database is held when this function is ** called. */ static void sqlite3BackupUpdate( sqlite3_backup pBackup, Pgno iPage, byte[] aData ) { sqlite3_backup p; /* Iterator variable */ for ( p = pBackup; p != null; p = p.pNext ) { Debug.Assert( sqlite3_mutex_held( p.pSrc.pBt.mutex ) ); if ( !isFatalError( p.rc ) && iPage < p.iNext ) { /* The backup process p has already copied page iPage. But now it ** has been modified by a transaction on the source pager. Copy ** the new data into the backup. */ int rc; Debug.Assert( p.pDestDb != null ); sqlite3_mutex_enter( p.pDestDb.mutex ); rc = backupOnePage( p, iPage, aData ); sqlite3_mutex_leave( p.pDestDb.mutex ); Debug.Assert( rc != SQLITE_BUSY && rc != SQLITE_LOCKED ); if ( rc != SQLITE_OK ) { p.rc = rc; } } } } /* ** Restart the backup process. This is called when the pager layer ** detects that the database has been modified by an external database ** connection. In this case there is no way of knowing which of the ** pages that have been copied into the destination database are still ** valid and which are not, so the entire process needs to be restarted. ** ** It is assumed that the mutex associated with the BtShared object ** corresponding to the source database is held when this function is ** called. */ static void sqlite3BackupRestart( sqlite3_backup pBackup ) { sqlite3_backup p; /* Iterator variable */ for ( p = pBackup; p != null; p = p.pNext ) { Debug.Assert( sqlite3_mutex_held( p.pSrc.pBt.mutex ) ); p.iNext = 1; } } #if !SQLITE_OMIT_VACUUM /* ** Copy the complete content of pBtFrom into pBtTo. A transaction ** must be active for both files. ** ** The size of file pTo may be reduced by this operation. If anything ** goes wrong, the transaction on pTo is rolled back. If successful, the ** transaction is committed before returning. */ static int sqlite3BtreeCopyFile( Btree pTo, Btree pFrom ) { int rc; sqlite3_backup b; sqlite3BtreeEnter( pTo ); sqlite3BtreeEnter( pFrom ); /* Set up an sqlite3_backup object. sqlite3_backup.pDestDb must be set ** to 0. This is used by the implementations of sqlite3_backup_step() ** and sqlite3_backup_finish() to detect that they are being called ** from this function, not directly by the user. */ b = new sqlite3_backup();// memset( &b, 0, sizeof( b ) ); b.pSrcDb = pFrom.db; b.pSrc = pFrom; b.pDest = pTo; b.iNext = 1; /* 0x7FFFFFFF is the hard limit for the number of pages in a database ** file. By passing this as the number of pages to copy to ** sqlite3_backup_step(), we can guarantee that the copy finishes ** within a single call (unless an error occurs). The Debug.Assert() statement ** checks this assumption - (p.rc) should be set to either SQLITE_DONE ** or an error code. */ sqlite3_backup_step( b, 0x7FFFFFFF ); Debug.Assert( b.rc != SQLITE_OK ); rc = sqlite3_backup_finish( b ); if ( rc == SQLITE_OK ) { pTo.pBt.pageSizeFixed = false; } sqlite3BtreeLeave( pFrom ); sqlite3BtreeLeave( pTo ); return rc; } #endif //* SQLITE_OMIT_VACUUM */ } }
// *********************************************************************** // Copyright (c) 2007-2016 Charlie Poole // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** #if PARALLEL using System; using System.Collections; using System.Collections.Concurrent; using System.Globalization; using System.Runtime.Serialization; using System.Threading; #if NET_2_0 || NET_3_5 using ManualResetEventSlim = System.Threading.ManualResetEvent; #endif using NUnit.Framework.Interfaces; namespace NUnit.Framework.Internal.Execution { #region Individual Event Classes /// <summary> /// NUnit.Core.Event is the abstract base for all stored events. /// An Event is the stored representation of a call to the /// ITestListener interface and is used to record such calls /// or to queue them for forwarding on another thread or at /// a later time. /// </summary> public abstract class Event { /// <summary> /// The Send method is implemented by derived classes to send the event to the specified listener. /// </summary> /// <param name="listener">The listener.</param> public abstract void Send(ITestListener listener); } /// <summary> /// TestStartedEvent holds information needed to call the TestStarted method. /// </summary> public class TestStartedEvent : Event { private readonly ITest _test; /// <summary> /// Initializes a new instance of the <see cref="TestStartedEvent"/> class. /// </summary> /// <param name="test">The test.</param> public TestStartedEvent(ITest test) { _test = test; } /// <summary> /// Calls TestStarted on the specified listener. /// </summary> /// <param name="listener">The listener.</param> public override void Send(ITestListener listener) { listener.TestStarted(_test); } } /// <summary> /// TestFinishedEvent holds information needed to call the TestFinished method. /// </summary> public class TestFinishedEvent : Event { private readonly ITestResult _result; /// <summary> /// Initializes a new instance of the <see cref="TestFinishedEvent"/> class. /// </summary> /// <param name="result">The result.</param> public TestFinishedEvent(ITestResult result) { _result = result; } /// <summary> /// Calls TestFinished on the specified listener. /// </summary> /// <param name="listener">The listener.</param> public override void Send(ITestListener listener) { listener.TestFinished(_result); } } /// <summary> /// TestOutputEvent holds information needed to call the TestOutput method. /// </summary> public class TestOutputEvent : Event { private readonly TestOutput _output; /// <summary> /// Initializes a new instance of the <see cref="TestOutputEvent"/> class. /// </summary> /// <param name="output">The output object.</param> public TestOutputEvent(TestOutput output) { _output = output; } /// <summary> /// Calls TestOutput on the specified listener. /// </summary> /// <param name="listener">The listener.</param> public override void Send(ITestListener listener) { listener.TestOutput(_output); } } #endregion /// <summary> /// Implements a queue of work items each of which /// is queued as a WaitCallback. /// </summary> public class EventQueue { private const int spinCount = 5; // static readonly Logger log = InternalTrace.GetLogger("EventQueue"); private readonly ConcurrentQueue<Event> _queue = new ConcurrentQueue<Event>(); /* This event is used solely for the purpose of having an optimized sleep cycle when * we have to wait on an external event (Add or Remove for instance) */ private readonly ManualResetEventSlim _mreAdd = new ManualResetEventSlim(false); /* The whole idea is to use these two values in a transactional * way to track and manage the actual data inside the underlying lock-free collection * instead of directly working with it or using external locking. * * They are manipulated with CAS and are guaranteed to increase over time and use * of the instance thus preventing ABA problems. */ private int _addId = int.MinValue; private int _removeId = int.MinValue; private int _stopped; /// <summary> /// Gets the count of items in the queue. /// </summary> public int Count { get { return _queue.Count; } } /// <summary> /// Enqueues the specified event /// </summary> /// <param name="e">The event to enqueue.</param> public void Enqueue(Event e) { do { int cachedAddId = _addId; // Validate that we have are the current enqueuer if (Interlocked.CompareExchange(ref _addId, cachedAddId + 1, cachedAddId) != cachedAddId) continue; // Add to the collection _queue.Enqueue(e); // Wake up threads that may have been sleeping _mreAdd.Set(); break; } while (true); Thread.Sleep(1); // give EventPump thread a chance to process the event } /// <summary> /// Removes the first element from the queue and returns it (or <c>null</c>). /// </summary> /// <param name="blockWhenEmpty"> /// If <c>true</c> and the queue is empty, the calling thread is blocked until /// either an element is enqueued, or <see cref="Stop"/> is called. /// </param> /// <returns> /// <list type="bullet"> /// <item> /// <term>If the queue not empty</term> /// <description>the first element.</description> /// </item> /// <item> /// <term>otherwise, if <paramref name="blockWhenEmpty"/>==<c>false</c> /// or <see cref="Stop"/> has been called</term> /// <description><c>null</c>.</description> /// </item> /// </list> /// </returns> public Event Dequeue(bool blockWhenEmpty) { SpinWait sw = new SpinWait(); do { int cachedRemoveId = _removeId; int cachedAddId = _addId; // Empty case if (cachedRemoveId == cachedAddId) { if (!blockWhenEmpty || _stopped != 0) return null; // Spin a few times to see if something changes if (sw.Count <= spinCount) { sw.SpinOnce(); } else { // Reset to wait for an enqueue _mreAdd.Reset(); // Recheck for an enqueue to avoid a Wait if (cachedRemoveId != _removeId || cachedAddId != _addId) { // Queue is not empty, set the event _mreAdd.Set(); continue; } // Wait for something to happen _mreAdd.Wait(500); } continue; } // Validate that we are the current dequeuer if (Interlocked.CompareExchange(ref _removeId, cachedRemoveId + 1, cachedRemoveId) != cachedRemoveId) continue; // Dequeue our work item Event e; while (!_queue.TryDequeue (out e)) { if (!blockWhenEmpty || _stopped != 0) return null; } return e; } while (true); } /// <summary> /// Stop processing of the queue /// </summary> public void Stop() { if (Interlocked.CompareExchange(ref _stopped, 1, 0) == 0) _mreAdd.Set(); } } } #endif
/* transpiled with BefunCompile v1.3.0 (c) 2017 */ public static class Program { private static readonly string _g = "Ah+LCAAAAAAABADtl0OXJQqQhMt237LNLtu2bdu2bdu2re6ybduuW3bPmx8xu8lV5CIXeTLPifgIY2BA/rcgQG7ypupNpZz1Ak5o1HJVi8UCBNDiIP4F4lgrEnEWpi7h"+ "OKnud0ytNnfKKe2g4FhSa59HslROfkZzOk3vEGy12/1bS/j3uoZl4t/q9jpW6+afqjcnYXMyGmD9SWUTn7Aw9n5utpfgpv29bjOSYdeQ4OL+1br7sPAapbHR4rfd9DnG"+ "7xawvfkqge3hd717xHBoxu8eZ4DtN5vAdGTwOXp3s/QigVvj28ov03tSR+i3CmSai9KodwrwefZc8GZwWBYw1wIWH113xnnuac++cb46vgJpTq5fnNR9Q1XMvyvZdl9n"+ "//Kfxmf09dQnbHEUlOwmdB0iSW97fRHx6/QAiQ6tchNwCOn5DXMIa8auntE4lg/bDtm1tz2jMrR5fmx3PTmBr0DM3/oNc/4CcMi2e7+QnV49T1q0v1yfO4vednHKRk2i"+ "7567CQK2jpaGz3ZOYDH+RUJjiFOXpSVsDm07gQMdLw9TEjqXmA5R7wZ+DW6XZSQwrn5g2Q2z7LfEoQv8sep4Y6Yb3Ykw+xg9/ZPqKQvMXbt+Lpn2PJkIuQHeC6xa89kC"+ "HP7SXFWPuNTTlXoaPh8N5wx/jv7aM6r3HAZa2ETEWW8ACcqGc0xWWhx4ffTJCV6iMSPv6lATOG0BAUqe5SWhkEdXCYOvo7A9I2dxAc/4/4y4ZnFf+Pj4pzmd5oA2TYw2"+ "dPuaoeTpqr0nBEapm4c7H/HPmDY2oUkmTjssg4u0/F53Trg9ob8Zrlqfh4QTMmhYCNazgbrGCcMKDl81sW+ppPyOdIMbWlGfe4iPBf8YDP7FzNxFVzTx5JiGq2H6lL+c"+ "jF5octs7jypobK+OZQE5dWqDPP2NelE/30vGXKJHcAIW3rbaV+PcGDdkm/RfVs0A/Eo8TfMyvg/eOutU/uTsvp0RGDksubK1GdgCdZTuk3ZN4dY5wIYB/ZuIuF8MbEMW"+ "w3+GWRCnRhUcsMrqGx1qKyc2I3FpnCpKIiAleHjdd533reS//pvSuKOmffWvajo+iz3BTzfbCPca3NvRFQHWRZ3BYapyeLA5ggC5xGsTHU2BCIMmrqt+dJ3Mh1Np8N+j"+ "LCPjHPyFtE2hEdFrMXd56K8dEp3VWtwdSkDyCJ2rE2r2vd/8/g0HXezxTO184lmaB8NQV01SxmKNmMYZ6ALCiHuq0Ff89Ago8ex/sT+T7diMR5D8G45Wjz4539ahDmDX"+ "zfChowUqPw6HoGLGADHEGLq/a8s995Q0ekHyKYIR5QXatY1axjKA4HrHcWbsOPUbbK5HfO5RyCl9g3A1osnCRJy81wsmaaq9dpyXjmrQCazGF79XV/qd1tCmxsp61yfM"+ "0u6ha4dXSIZrsefKHqhpr7rCgliq5Xi3I9cmK1FlKlECHYNrI4corQ+EDtNgfHEZCAtu9g0+3OLG7Fjdv1hsdzW296gPyg6zwlMG7uxzp+P34/7h8nY3opZr8iEoKqw3"+ "HyyeRrrhICdRERR12Oa/DS0Cl7UtFmzdmXUqH5e040Li7gkUboYnudjYpbD4CYajb3Q/2bKoSkGB6Wz+XO1KZ1Oe/lgNbTDvTUC4g1BX0DMZlHyebWUDurHqqciZXTif"+ "xNfzDIsAeP6c6yJ0OpZ73TRG93HTVakOAiF6+7qIKx7R5J0Je3veDGx/+lp0G+Ua91VPSsFR5jrOkrdURIUdmPcybJ9DgaYckvSRBtE3y+vvzji5WOYRYizxbqYC48f1"+ "CPCjA0+S2dApJB135e6Fw42GKK/DqIz7xYOZQyEEXl1cDoR5jZOuYeWoL/CcxwZx419TPalvaZpEx6xNF4N/DxTVM4vpMD5iGTCL7Bocy+Ic13y+ah6BFmQCon2O6Uni"+ "4X4mv17F788g6Ys89GrdDn2AgzmqIQbEBba1S+tyWET6romRiQq7XY7ioZRg8+Rn2rU1k3+mg5Y2lwr9NTE8oTBsRK80bYwqJQUGlTovJblJ+hBfHZ3EOfIV6Wo62ybH"+ "LKRGxjRL+5ouzd/imH0fPWfdRgMGTYbBWXBvgZG4V65A5bnGdyLyqDQ5VZMmtOUjrpT0EcSRlG3T6oPsjUj4x1DMnVond8CV13BGR9b3mDc1FtH2laECA3Pgr3+HGQ61"+ "cS3v6F+jpmKsP+7ihwQCcQI1x8vAb+llmLUxk4Zj3YxYp+TmMWTjk2H2GAucrDvmvFtUaEwPNVh21Mo3ISPkQ+R1r8fDEydtcA98+KRMZTbe2zSyRlHrbiDr1f6QdCYT"+ "+6tURrsWqneVyEPVFOfxeROJdC/VAFQA4VT/dV+T7VAcK2G8vtk7TYjYr58Gen9y+7pDpJLoKEi0i3EVW4IoB+KM6LNgiYQRaaUl3f6DwRkO+e03S9Px3wjooBx4HpQQ"+ "+r2ACfeS6choBGjpFVQftKnhpXXPxC5v/s91qtKzPABTukLeep1eNcke1ap21D5+TM3hyZHaZqzbimTv4CNQNmqY9BoDdo/0gjPL5OGEwkO3F46Hwb8a+ze6JJhXibCY"+ "4F8mzyl0LS/fybfelR3LwuzaNmFLnvd0CK72x9LeIryLnIyxx4SZTTNy8e0EH32srHmM/MRDdnU9USaWfPH0IBbm5OErMSIzhn0eB7otOVtf6V1+x+rU2JTPFt84uOXI"+ "8Un65dxlviRCMUc9MhKj1AFl+zhRCOSOKiOTlWTbMYzLZYkVEPkbZ3ne2VYJh9CvdXhKW/FvGcMoPfNT90hqVLOpI6TKB52DBaUnj+SHCST8zghPUt/ZMpDp6XHBPpb6"+ "bCVWX6kK65jCUFBJRCoUugPa+sJDT23AA+qm68Fw3bo3YAoIhO+imuhJj8ch4X83rcIaWevPGamRZQmr/DS+SoYdfSQbI+MzvlNhAAxOECeKGwUTViBV1jbqE1ZdHM8U"+ "JA+hxkO7eaQm5bxhpA8kzIlXt+tOwnZ2smmb89ZF7yQiqnyIf+hK7g36yfGPxY8wVuXtmCV93iPUmnt1hFlA2jhpsTo/wWCZmqaOa4ypPCOUngGaVXmmlLMnrw6nOSD3"+ "oy9z5DDhl4IjOaLBqAgo2aMj8D0Exjkk448+8xKlsWJ0/yYnjWjzpUJmfItTMYji6bokGMD2yUlhBLR74bS5ghKj5RgFdUs6QjLQK22LcWrRUBIXaJ6lxtLXv4nW2L14"+ "Gc0plnKARKIaqEtPrfPNVa1ghInaUUhvfUTChSyea5eNluB2tkbqAW4BWeDXtM1MaDki3YR7NdfNMV3IVsvMXmNmiVISkvz9SL9ag70LMsB6uFI/chg43mXieOEM0PIZ"+ "dUWwHJQP0G7Xu9O8GW+T3BS3Oemr3poV6TpsOLOIkIBfIClw/7gCbVNiSZwh0ieL8ZAKWjYOk75NEjxLNiQRvBY0ZxOUKdKhAlcOwOXRlJsD9Div9kGNwCiS0sW41rQy"+ "U/DyHNGuyNEONEXCIfZ7tCpHqOHmQH3O6ZSsd0S0t0ZTnDuh6TepZHC7nKGxcfsp365l6hbWDQG3TW8moGMW+/mQ5MY5yimhHqW95k2HCpRKvLljRhWK5zD1usrujhIY"+ "IuXwOWfM7729SsGppEmTjl2ksXgrjiev6ywj0Lge6C+gL9GU+W/aB69II6Q8rkpV4LZtnrdk0sANbTp6mW85yT90fzSXgY1VFSYtKoJz+fS/EbgDLw9JAz/EdvCvkAVU"+ "77kQM4mSA5Tb6EVFQA4Ftgoh5l0qR8J4SAhjfTA0KHhBUVdNugvEjLbpLRMU8xJ3VR5eiK9xXrctdje9t8m6RvKuH5t9oTAD8wBjvAjTpOkQ/TSKhyTMWnmBPOZvuURV"+ "vIct9zaqCm+zkp6CxW2rpY59ofpfjydO2euR+smmbrouoGh8Q5mLUd1kQbwvbHQsggETSdnrRjzLUFiCa86/i5DSiH/gvhpQJzGxJi+UyEW/hVghsyAhXNskuGBgRHiN"+ "yfPjjqiBDXyRwKXSZhRaGi+sJHrpdM8OsU2RhUaELVFEo1jiap4qMORwCw8HmVQhT5zSuBOuv3/2T5EWLlFUqjcycH2QMSmuAsFGCeB9GpOSZYvwmhui+hiOuUoeQ8Pv"+ "efpZ+WtRamlHPOrMZfDvLaLahlvuILjgNlyab7XIpEV9UvSJcKR7sxQioaQ53OLbTvnuXIKeKvtHupB2gkW+R+UIMIpGXgtlr8hKmybpgJrmvTlm1CCiHRBlTi5skCtS"+ "0GGIv1Adg6YbV3mX40AyMjgSEJh24/QHDeXqJTkHzRqlobx2SqdBjOIRkRkRgwe449jiJKALeGMSIb+kbA6ovXhOqQ2F0pS5fZGIxkonSbKjB8ARCJKkm5Okjq6Uqwib"+ "q3uEiM9Z5EClolfJeaeGujh7Z67wCVSOe/vpgEuMzsQrZd7In/oceVgEb6bgnzTFfMcPTTaYRjkaRcvapa7gvZmtTD+tjWH3xTk3EthI4z2YWbhv8yRnCYNTuGCy3S8r"+ "SMQFx6E/sqb6gpLguYnSx4m3KtZ2i78vK8UoOsOiSX9H6vAsr6NXRtGtaffuI5kkpVxsSQ8sfjETugXO65xmw2L4c2HQ9cRqR/Y/pPrRa8qFOdWm3K16tA8vVNjRuVtq"+ "FuiDoYdzsf0mzrscDtL3fXkueREZmwDj/mnTosEAd8emnJDHXFqz/9EnpSXiwI1R38iEsmEXBaVO6EfLWhjhNVWD/AJsf4IkGvZat5SGgNgVGaPKXQvJZoFQp2gz/8Ru"+ "M/lv9qtRNLnf0yIKEOC2k3luWTph3LIyOwEtkmEP3wuhTCh95ppYtdsrKApK9j/g9Ebv9YkGgx+B9ge/KSKJL/jTJFkUyMIKfVQ0i6E1WR7eLYwaPAv0batG4kIAiAh9"+ "Fxzt0slmavS3codmSVb4fZa95lTQeGy0du58EO0wsIOUuBAeSii/EQiJs/4owpT2CxK1eAC/6B28Gh08/XjZfuttcpc6gBMyndcO07SwDbW8XihrMRT2QdGIYmAbpZlj"+ "SeWo6+3M3hwK8FowV+qDmoHaUKhH4hDxk79ynH+0U/CLGpt8jCpcLInBCtlJL6Wb4YIGf6UqTMSEwxdzEwuOVsRvdZZmFNZkzMj3lF/8tt0G1OB/OGgGpEwJkEcMSWXI"+ "vaDjZkKdj2N1n4fklZDGRCxFZRyQMORGCYfTHRgm+YPwhEJC62XJvApMiwfEaJ/PY2XhbOj+fQfXWQIcA4yXvXYI+HKo9WF15/yjL3AZxIrwY4K+Qe9AHVntkVVD+Nt/"+ "r5vnRNIQ6hYuWe2srRKZBiKyr8Xct8vpZDOEQ0y6FYvXxnmCzmeJP+9S5WTwgfXA1WgZ7fOeF7R0wlXJmALyIPGcBsYJ2EjxEED0GrflbmFGXbR9i2XcFjHwkQxzpA2p"+ "V9EFo/IivhFyFHKtiwy33mZnvHcFafBRKJyxmuUw/rVT5DODCWJM+K1DYb569zyTyxcwKq1Tb7mB2K+CmOEGqRI5bMiRGtmvtr0Y1xbLuNesX8/LUcwH1TMc9L9kIckz"+ "/q5t3CCLc+YjsD2AToDyzayYl++TgrS6aiHFcCchp14u6v/j+kEJrDN2Sel39GyKrlDD2Ofl4UKQVtYft6PmjSkyyV6bODowOfCrtD29HJ5SWTQTdRbeBCePZIi4QEX6"+ "wdGTxIUGlCzMujSuSburQPuGe01QQfhH+XN2iAASjM+Cpdtl9dh1M8IsOveV4+pP+okTbMIXJ4IdQTK3hD/3L92umszg4kn8EyYdjY7BrGnIGE/bFoKn9GrZqpsQI8TO"+ "A+FMDqiKmUm0yCTGAOiy4qHkweDYVONITgOnx5AzLEt3cgVL+/C+h0cRsU0gq9PmNuGSEmnagRAUkNMD4i8PtFM8q2AXCLUUhcP0o5K2264ZOIrDE9eEAlLa7QOmcGzG"+ "6PTd+8uiIevwhhEaypJ0LA9nFeu5jXzDf+6GAc8Lyikiim+MaB7qLYTegSQY1HTnXnaKMzB3vLOXtxfifYhMUA28dq2h6EggE5C1t97w3Qe49fOBrWgJcLJ9S/IFg6ul"+ "QN5cjAHtZL0OqZCe/HXnBeeoP93MdPa/5HDv7V+iNYoQstWDUPOlUjvZ1PF9C+IIGXyLEU8xPtHlEWQSZn1XBFT42JJH1qtJtURC0u3VYclrvX+pRCagc/3IyjAUd870"+ "Cd8zEP0AO0iLcoVPmwLZehSNjRue8A75/oObFjNEVHJqUh8slmZyLrHVHQuLOgyhSrKEQ20XOC3VIZJzaJAlqqcRLa4MiV5BZcpzWZMdTBWfxVs/XaKZTcjp7W2M1cQs"+ "/q8QkSC2BIUMTZk/ihuRGRjynoa83sq/dNp36hw5fqoPF6C1XJavE75Y//S39/hrVbjCwT7lqZbsRgKB5i9zdgOD9bGY582WMEgPDXBqW+XsMC1NpZ58u874F7aJvBd8"+ "6CSHwXiuUWdpg9/zUHQg/i6ZgpM4JANEZbjH5HGIQIScu8MxaPQ+T6Vaxn5VeNSeuWvyfZDTcV/Sw4ZQNG0lnbcoYQtNf7555jyHhcXSs5xSS7QA2h5jNOLsQk2LV8ME"+ "KdFYAFizUgv6XuE9yjdMjHvOHGXfsaBc72DpsCe2DirNdZe0Ur9Kimsidwc8iAxujaL4S+mfgY8Sp98CFsn47t0cbdpFK8+CeowjdI2MA4MdENS9cyCE2sKT3bjOf8ID"+ "ulAouUGP0MJwOhbeU8Fnh9srTB1QLbT6PXiQypRf3D6G5xq9DvNaA+Tb+MF6KECTp1zPP+PoDxzmdchC+qB0UTTgUqYX4Wryrm2y2+YFuHXAYH5TJB8N3tlVn1sJZG1X"+ "gKeo79fKtJQmrcl9LhGZpM0Tow4VG6mgDuMrTm8FlyUG+nqG+VzEKBilp6wvnmRQ1+hWMHKalQw42xLxhCGqFycOjvNwHIK6Oiwvfd5PabWCjdrx+OwjtGpkBxi1kfLD"+ "gnV54oyFu2ZoRjUczdASPRmtuM0fH2sNMsN3kAFAZ4lKevgtCqZOZJkBjoVUPfD2jdGYzxKZuPZzYQggOH1Gjb/eaHxOA9PEU8moiz+jvWjSHMts7ZcN8WOd3I1sQBin"+ "Wm3Wku2QT2kRm9bty3QZ3JWoikiSrjaw43Z6U77psWw3WfZz1DqvMWcnQIYL5LzZOsy9pQ4d465HuoulJMqGbZhrPJox5XQxfTuLA5i4Q8hkcVt1yukA9sVvRE/hGSZp"+ "+4XRvv73eukex/GLuT4vNtCKZ0YrbI1lnlyNbursQUQtIukPRB98ZeolyjSrszLBcNiQkUOmd5iFRYggPtScxv8mIY6lBrFCYdfHxxFUwZmlKWvuIGKNmSb11yW66U50"+ "mwpiwX/faiczWXJmnK4UjXap7S72BwmP9Bb4kuXMbcVx6hktJ5dCoTo4VYeq4tl5IkCRS05a2SDXagN3KyRBOZEV2xcFNnHtIbG/8letRdKXQ0j4Nhq7mqII0+qtO8Ik"+ "E2s4CbS+0mATBpNxUmNxoYaNLIeg/4OOt/VVQ6ZIlGb26elR8JgnM7SE/KMtzKqpxOPZ7j7tGL32CeZjE3bnu8pYqmGYaTIxN+lklJqP4d9wDI1EngkmsAYEdnNRoMmQ"+ "CBzV6IR0P8iDU1AwjpHm4MJjLyXM251JQvUW3+F8jdamShCOH/4hrT4pqUsiLFToWChSGYYscpJo6ePeq2Ld5KAsSaqRC7gVbE5/Y2Kbxh/KiRK8bRZgjfAZ34PPNjlL"+ "pKI9z3nd/5BC0PO4p3m3HYDgcJ1CuONfP05R6LPmpMv5i+wnP7XYKkxEQTkEKNe5g7K6z9iHQ7B0ms3oRqu/Nv8Wok+Yo/9Doc3zHmN35JYdqXjUL4p+sxthGzUROVCS"+ "Y2hwmIL5cQSKWVfGJ/2YnZjgipXLmpsnzfp7zJyyNPzgWvkZ9oCyoxFvF1WcV68jje/eBsAgGMXIP0gFXm8P/P1YL4la5CfZiWM4U6wEL8FD1Whqh7AYdqoYugvmfrkr"+ "z7uPXSivP1ZWS3PAo88tFfdw2gjOXqQbyGnvQAbnwz0X6JQItkutqWIFqTraSv/0XvH2QkoLnTjezt5zxkqKJJNl62mTn7SPPU5XhrATEUdN9nVjXibKLK7MepGiCTJJ"+ "05RyCzLBbWax5JQyDCiiPfz7Z0ToXwkeThnagBSidrsr6FnUNfTm+yDxWI1ivUE3wbJs2gRFlbnokrAjH7NSQgspK9y+KboR0SKcy3v78FOUcNki91hN7AtRj97XVD+b"+ "x5e9J1c/qB/WkXfxUuiIWLBOzbQZo1rwthw3ayh4OFYycy8aKwVq/I6L01XJ5GnIbkA+61JjIBYZzuQjVeo4ONZN4qcqWoOkpVAtbGmx2TXIRTTu9MO6A1lEz3Cvmutz"+ "sApmie3WPMp3lP/xl50vM1/JS5AtLBQb83GGKQ5DuBh3fAXYj+0rDQumjzaokRp05r1IE16QMfFRkid65RHwtBWUvMGlolWJ8PjDG43eoH22qE4fLVaTSLTtmUoCB8ls"+ "MUiLdZCSahi9OsXQaWOCHYvaxaeZ9uBHWTGeBjrSGFLbsiEcwgIbOfLqb8J1kPChbMRTp173vcLmGZ1hOzy6YkiAo5U4HQzC2GibCHTak7PUDMXNaIs8zAb87FIR1gwF"+ "W4h8hkRzrTIoV26UjmsXslnnrvDfZYA31J5d4wIeG9sKOiF3qVcK0dp5GrLmMrhu4UCek/V9oloeaUPZm3hqzgp9v1RFwIwC+T3hqUqjDiWm3laeBFVxPZZG7/CBmGzQ"+ "3rlrBI9T0T6ytZMq4quiLG4Gix7gFsWSyImOG4YnH8IEULCLu84Y4Rx/FH8Fqu7w55zqEJovJcVivTfZfgWvphl5lIhcHJUNxRReZZsMBBlaApTc26UR/n4LkA5alJTG"+ "SFpK9ATpfplmJr7KjAcfzBAgNbDdlow+SLvkCiFEmoo8/5jEtUbQZYQ7ZJIcgTzn38LdUTmQFmdehYzpM15RlDWzuX6x8I4J73lUMLZ8Aoa58LdtmYHMFz/Abb0tQ2wd"+ "vsGYQhK6z2VuDPvm+G9SxrmMvLCy5yWL87wnT5SoS+V8YVo0F6wgKjAvJwxTHoH8Q64TuGFZxPGfMUt8jRpsbBlxJSdG09NJfFPJUP6ZTJlnuVbAAo3w6jbBTaxtXT/j"+ "JRmbar+mlN38DNHc9VXSVlde8CkqgpQwTVHc8Nvhbdwtyzz6c0QsGGcXtbBrnVlIYzzmIH5k9+2u/ZKWvGC+TpHbahaEzJv+G4/bZ/6cfkIvb9ZG8rruMICUaQ/R7Qbt"+ "VS8z57gUW+DWlSopza4isdOBeNTwKhjmPbMoFQv8PU3rG1LLOwRtbuoONbD0yS7dl8eeuBhUsL/w+w2Qk5LGTiGjTnoSat6X1Zr21RZIa5MST0TeixNqhwXaqSvVy4M9"+ "kTZ51ViSTCyfr1LKnsXYIvDizJ0CcvsAlcFA4kz2vmSMndrCBT60nvHokKyIqemnw1wcLtKfJHHOboRKgNBdzhT4ZPIOPunW/7Rjug5eovQfX5QK5UGjcOdOLun0N2Y0"+ "c8kcSrJRUsqw0IKEehVC0XpIf6UmG6/nQ7cTo1LQ1u17N99KvOQfbwm3Eisg/BNeHLiR15ltpNyP4R86hWZ1BYwp9g/d32RXKKsq4MC1h7LznChmxIXrkMztIqEu+tXd"+ "y7SHhLXLMfoe29iKvvwUahIHWlhBzJfEjpS7WxXqXNJzIjdJmg9tcvLiTcrJpr35DFQXknQFTNBpoGb8ux7TiNoC2J5xvnphuZPbK+3ANjIYOw/puDr/lK8GVdLXi5lK"+ "aC8bE+tFsp9s+s1sllHSeYCAsTv4x3zZwuGzIpBtjm/oJ36GlvB4qHlCqb0gFSh7nnF2yQAE3IHjdwVGj+I7epGGG/PGBgRVyipgtEdc8TWCQvbFEoJHIZDMl5T1dEWe"+ "XG8ifUzFnTdmmiJTGCkXq7P0YmJKx4rLkfB4Y3cdJeX9+iB8v4Rts9ptNBkYJenzWoYSJ/tZOHl6s7TqGZN//Ql9c5UI3mztX5Wdbwm8rk0LoV0+hGZCYCSYbEB6Plmc"+ "qet9cER+AKWc+ZQTno9RVXr4RPVo1CSPUSoXvqD4HvRm7EmiNywsR1pOLfMahKV4kqNQmoPukSRGGvoVsnlH3bbZaL54oPV3cROa7VyinWLEpKmz8PNEXuk+DLqnPfcX"+ "Vl+rsAcqtUWiHeqe6ShwGYP4XpXn9pCmNpIcWRq+PYJRljhR29LjLLVSFxq1aWmIc3jAjU01xJh+ER69fiN4hBkByKEkb7w8bVltCLGQpV3JoqnZ0WpvRtL/bqahvMrA"+ "hcSoowFNzp/+AbANbVfJO5zmbi1lsa59kctyOnAifCNCpI/6xVvdqorgLUkcTwKDTHe+YLruWDtyyv10WXHtQpNz63k9+5Nkx+Toai6xmh4u50iuxMy+GWisoGHdhpTz"+ "hMZAO7HUD84BrX4yM4B2Pp2urr2j7hGCUhcZhTAMPswW1PnCblWCODQ+AUl7jiY6sYApm2I1CnPUzLYUWJWNgs37j/lfyJ0LkRz626teGwFULZxs1IEgKQlMznvzzM3q"+ "6X4d6b7akvSjiSUaUosw/ItjxlUWXBz3X+bkgO5JoU5oDI0KnJgApl6R9npzmB1cOR/LCo72FfSjIr6VwCo4/UJgf37z3IUE/mgShUc2uPWKlWB5pi8n+yr2yh8iEui/"+ "0pAuPFy2fuLBmEvRqZ1u4YcsK657gpBmofqsqc/B0FaTtJWCpNmUGfjItcEJPw0ifrfjj65wk/TTlFBzw8p97ef2M5VXllnG+CFgU8Rp1DpI3bZpGI6/4ZRwiJQIVT0w"+ "McaV8WOIeDMps96SyfNVyBBBthBKbrWkYukRjvHPVdKMGKq+bNb9rw8dZRd+/0PAQSrxMrslQzxnVu4urakaiqBnN1lz8a7Kq09XbcGjgcK1Ml7U0MzTIQ3m4jQ0qDzw"+ "vUM355fmHVArMcvewfigvx7dRMcg/UVH08u5DRoSWusl5MuwFQRZHtU8z/TYNsJvWw2zkY7nxBcY1t5954aRegGvxpte2ZktP39+j6p2CHSHkMs24IFtcvddV2YIGawK"+ "HhydAk7tc8mLftCKJWNtIWuPBDIQtoEMsTvWSm1oPPgS92pyMMdsOxFIPENddlTqWd4rrh7NRBNFtFIRlQ55vkDM6Kw1Xb3n/t2E+En1XLSh+IM8OwzyentpRczh+GtL"+ "tLyqmkjwUYELCYqFgXwZIq13+BEMdWh9p1oWdt3SsIFrJ6yDz5HIrt+heEo6Hu84oEu4d8G4svRxmCetqFIGN1qkM68q3st1cmX4k5EzEBsIVqPdB1rfH5W1rKMTHJiK"+ "AEvzFhKTuCJ6eLuJryXomP7xFAYFI0QuVed6+qnsAufyS0p1ItJtF8tl6DOzheCsJNKs4nhfmrn22CKlHoO1id0tMARPZgfbkX7l6L8/yhsnkvpTyw1PJ5GSR7BDRuJR"+ "sQ8lPusUky52JT1Cp4Im2SuVzqTK3Zd2KJsAaoFLGqawsXDEBQcONkaVyL7hBc0cbSVWoKU0ENgPxydFQXZpyz32t9oNpBVblkI64+8vgma7141f3OS9Cs/aMPt6YOOS"+ "DI6NZm7i0J/uJX++tSFIc427PQeFle1n9PdjaJLY8E+24/s0XZpJ7lFD5Ptim0VY2hfF/2jg+JQ3sphMBp13Cx7n4jupnmOzd0AfB5p2xStk5yv8MirPjB+OGns7hgq+"+ "2qrSnijU40albR/PNwzaTr5pKceTkGdHDDuSB/tEq7pmxeO3znJmrN5BKGbZxtbWocy6VYSDaJyA4ImUG2vNGd9wqBYlQYGVCdNtiDpE6TGr+ADb8kJsbhB2eQ0O0rfY"+ "vvCOnNK4Xu1IBr03gfVQQn6J/YxT662vBqPH8byX8O7mk3TBiUZZzUYRsfUT3GhpTiPJfJqHZ4YdLkHOcnddNNKZGvIJn0EGhzq8qdoHn9G6yGiP745rLQYEd6r/aZeV"+ "4lKDIcXCEGO2KmRVuj7RV8QztshaxYkO4gzzU3ZNuQ5yImOSeQuNAzNK7KRIghxuC4nRNz8rrUf0qReswK0h6P0ShaGsmKBuvRG/NX65w7MK6xFIowXo80r1s+IZuDe/"+ "iwc11nSfTWcUl0E1Jk7LvjVlfP0oGt7YhAe/5scb3pMLy4Y8iUQ/9TOpLypGiNnuFhB7hLuDs0TH20kPZmSbdKnevyoAb+61MZTOJ/1+MQ99cqY1JyC8dP8ivWpnWeGe"+ "CbolBU/q7K/FlXLlmiULukZ4b1KTWsoT9rLh/PFQjzblhlERkqP9jr6iajpPb4mYZ4h3A2k7Dv5LMqV+EUlfLKUiWCQ+VRimFM/CJotqwCmIKNe/a6u3SZQCWiM/3T0x"+ "t7rCkR3lAuf/SdRv0P5rCExYhxKRyZvRw0MiOFCkG0IVBbgOC3QJInhHxXOHqWmHIMLBsot49ArkhlYr7OxFkRTLuqb+9I1UJOaNNl9UJ4CEMGFXwQszJyLM/8F65IO7"+ "S+LoenNIudmv5eJJmLZLgdTL1SF2u3jC21SVyMwfl7MHjPzWyejnvX9YAakyhnahDBk9BQX2maaHtZ0eFwSby8AipCcwQ3ddCM6b7rE9gUBRTYTRsTQ0i4Xqg7RVE8W1"+ "IkemfW7Kd6iqyUXq2BQ7APbGilTgxBJHuswCuAMtszU8aHDl7ouWjp08wkKC8MNdtJDPpctteSOzalzQvtOl+Bvohi6p2Tm8hE8GtCDTCJCJhMG9u5zDyiMjdzFeV8pS"+ "F+/oqcFKtM/l8tqFT3kMKa9L7EEU3TOCqITJd0VD1PW4YVnly3TPoqjXyvDURayrLpBGxgH4G0/5x/k8Pi6ymR0jHDStT3r0M9cjpLfLExtzPQ/4Zi2J+NoIKJjsz/48"+ "/TyBAh3ZVcmtTGvm+Ixhx9Q69HRbMgqlpko30WEMbXSKB7DSfqcl0mOErY70XkDhBw2lfCGcbtNb6vocF2cZ6duUzlnhkWYx0c1CaT1WB2QRorHslJeRuizPqMKkCxWF"+ "VwXIcL/nr/bM409b8E3pnfxmVXBSes1zyQyQ6prp9HY/gumxSeTsJyViQl8LscT7ZEt1GEnornD4IJskHnUUjLAK9i4xKTbwCiXs+ItrjtWp7WhvzotIqPXv684pagGx"+ "pEFbP5TA8SFLT1nlDG48FchPCzvZ/lVQzZ08fvBYFRSXBR9lU2nxmzAFPUupzYK/0mpFcoXXUYU79TS3VNS7aWvei7JDir9k4ru1z58otFkIhcE0iq/+RukjvRUjXeJJ"+ "NGvWTlLYXi/8QnHAeATUEbVwQkrNyw3gcetNQA5RshsWPhPRCwO4kIU6db7RcUAqpyhsFjezpu1q+Ltlv6mGLSE1denQ86UmOWpadpeLsrketC5MZHwvxzaMXlYVRcAv"+ "BKQTZNY04iXVCBi4nmb/0j+Pe1s7c16xy1qQ6VD056erW/lbxM75E+yAKb30XJ8NNBmRtYGvF1wGxshu/ODZJUl7cUrqqcpdO4Ofm7mVYUeX6NgyOi7I930ahxWZgmi9"+ "gU1cziDCU/s9SzT1WkHTbnZoIrRJM5ARj18ovYG9cZNXUiS5nK3/W696rzP+WzKRUT5Zjq0+oJUHuc7PfUYudKt+qerP3NiDYxbfQegwzoXRqNHE2r5XjKID0ZDvMh1/"+ "mpYtVyqTAaTf2GpFLZxxvfcpO21PsYH7HqR3Fh98R/bKkaZjr1E9a8cMEaCUmcpuDCmGbFnHMx6Rb7M4c/Gm67nQkgrDA3JfnvULko31akyON7oLuQ+yVwMxbyaPEKZb"+ "hdTRGHa3/1gL+0xY5H0yORSOFhbDxUNSE3kCrNisC9UOj6lVGn9p7GFl3ZSvx1PPQ+JPJ/agkt8ioF5dDcN0ziXF6YB6e6JnhZVDVfuc1qL6Aa2OuNPoLWs1LBy0Fx2h"+ "/mxs8QmD1WeDKSIdV12Op1pViK2Nd9KiX/TGs0c7uC5C8mbYSVtrttltOiOFnA5IIg6nVYDqakGzVrN/HlEDt620wlBwd2iax+28qS9Ql1XDrSZLqPaZkpdsIFlZULCO"+ "aEvaWq5awgZTI4tEUYu78hv7e4MsDAxX4jv49sFm5IQXy5efmahJLtTW/eaTfkPkyP06Blj4hzYXLYtQ5CCNkgdgGgo56j/lut+DmKQiHS2AWvdk7yEzDkgR7chvIpHb"+ "aOVl5fMgwFZyWnGFcGCnuqe1G0xDePmPcux7UJpMjDFYGNhafAhFIy3RJluRm9lXUMdQsjPCpHzvfZDN+HuPQVp/poGnPhn6shdzRIzxQ2B0b+C11XuGTjJ7Ph0qYuWh"+ "bDCVVdGRVsDMyD4vVsJYd1+9c+YcuSuYJ+HpE8yLk6bo76fEyHcLr+88wHtugYtebxPEcoT3LCCAjCHy8y+0Z/ERPwHnU+3g9IKrB0KtHhby7NiTwHlsedzkvdlVVezm"+ "ZbMf+ljVOEBQlDg15S2FXKpqu8MPVbrAUJLbqIY9ZSyeG3Ipdf2SXHnQE4vY04xVRzzkKtdxXrX82solu4M1hgGbeh2SzsaaWAitsluBRIms40jn6bwE6N/ohrVjVeLb"+ "bQ9JpvfMyVtKj2P9uqmg4n9Gf4cyqLuNYxVaa7KXppjPs0zM1f8w7Y3U5VCQbjOCoLR4iubuvJ6TQw7ltvdMGNGfbrjy4rPZxWMySKWmy+T/PSAqQzcNiBWUp9TnqxUf"+ "k7M2KC+R1WBjhDFqXqmRxnS+obbKig2s04J1buNuQ9aLaIo9jbemqOrgg7hrWG9DSwNyFiQPacWAvzgwP8pT7Ei5J/sco7D5azwiRE8P9AffkHWVI8IWCvfcJ1a11xSH"+ "QOJC4CyG2WicZhJvxZsgNjkt+3wD/kuqYJVa7ENjpZ18D9nvaPrHhwzcfRpWl7CZ9Kw6ZSmYHy8Z4ptmgZ0s1mr3DEYk6y1ZeqTrWryRFuVQoZNXHjFQMMMpVZONY8/4"+ "ZypvotGJFRCg2OehO0pUZKu9CQNmY9v0Nz/jYNbWWKnVy2x2uz8XQuGhNX68teH3ZFDPZL1/VK1QTivgfjbumkISaT4KSqjV/KpKyESlnIW4ChDcQSO1Ngn7XdHceD/y"+ "lE2mRHIfk1/IOtLxuWXBo61fOk6JWplRkw2gET213HK04NDZZPI9hcJAURSj1xlbU8vIsE7MVaLiJjeJB5fVI1ZM4W05veV/rbeMAHKvkzl5aSddigSPy8kUxBnslo+Y"+ "mT/0zlIuipTM1BpPu8hvvGzjKvhgiFeg5UyUXTatbZF+FF+0sj0gT6gjbbe4mIEIORWlnFaTQQWaqXygk/z59YNvf1yeZpHZT20Sbn6KTrMGpWj8g/VCDte5q1fAGjP1"+ "jv7yn03huXbL/shNMRxK9FhD8ZAgMCkGSWqNoOAvywrIKNk/FFVrKuVvBXNmbpZGT/rStGIqiCpK6KEkbitpyZlJTzYbQCFb/wcRz/arznp5kzvKOmwSG07YdOhzBIw3"+ "BBq12MuTygaK6S6y1dtWhl1m3voA8iN2vJLO/Duwn7MmGF7bZ2w8yXa/ntGU+5ff1JmpVXI8JaH95Xhj9+so5wLDHL03ivvPnrIAl3eEc3dIAMDAbfWGql0SOuZQPa21"+ "5KubTt03ink3B6C+GwJIDecMDT438coPOR4nMdAPHpk4cJ5zy0HKD5rneAA2p4WS54BfK2s1AqTkZ3+pKBF/qH2ctST8hiEJdhkymHiSjc4hVdgIB6J0lsdyGNRLXDQV"+ "2srjE0wcglLSxR8sKNXmozT6RcafLFIcLqxcvutI5VommOyKfuWVSnkr+y160R0y1AggSVIBBCJ+/IGKuF4NKyqIAMFRvyYbBSBCCXG+yEB56t8QY04hn/BLyvZY/zMn"+ "U/i8eulZRivN+uWIksEyLn+FaBHHSLgdZQU37Kq/2w9BdaRiQdL/JQRI7XbW2R9OMKMjnrhuXN+OJE82xhU+Vdjipb1TEcdx8G84PaGQ+3LVm5eB3NqSNu6sZ02aLviM"+ "peko3EI+ZNGQx0aBFlZSjN5SMhHvK9Xx1nH3MtwLXsqY8OASTtnCUrIhj2+t7llwmu6df3ANDSpaCvuvTmn8bO8ApYfKlCveqKC52aPxpCZ4qMvtl2yksCpaamwAGCNs"+ "0PE/sYL1pgi5ZAFFT/ud7LzmfhWnyE/oBH38NICrBe42oHPCBKeqQiue1RlZjvOdi0DlNHCewo0gekeJAe/kA9uVI6RjkiaB9BCPLfBQKCTWLElEt1dCGhJYsMNbd8cb"+ "v9aq/mtPT7T7dsOYq/Z4Qp8s6LRH60awSDLMggiFmmplE6Q/H21X16GdcqbJVqLfy7+DRjYfZ/Y3ZJn2o2LXgRDVy5Q0f2Szyi3qU+Q5R6rRuHxNhE1QR4aE39+KOAby"+ "1wRreLYT8Z64xXpNdQcpEVdbT22h6gpuleyBX3PazAyz7ucbQwBQ15XYrfwsFwn1NLgBJN2VYusrYyFHb46p9kp2P8D3GEg7X903Vo02nk872R5KXfkZJLPpQZEWPMap"+ "SDSY8K8jfkQcedKTXATd6j8psXU0g3TdI9Nhytg6l5yySHPEYMLV823ohL7RMzXeBvnmqnV52BjE8BruwMwhr1vayO5M6FgAcrSuu9qjjCpEwhDlZA1aj1XZMiCAFEfw"+ "+xErEsjZDp85osLq9Vi3XO5si8F2f2/7ocnkM6m3QnXpZPGZzo4RMzAIC8yvkZBgOA9Oi16ErcNussPNScUkl1zBS9RVKPu7bJLqWFCn2KqaGXOhcxnu52RBP2x5QCGC"+ "olj/L/dTkfRvaK2w05Vr8iFQIkmJevE+Nj4iIfBUK4c9ioRROI56rvEdTDhZVJXX0FY6uXEp4ORaMbvinHt9HPBeemCueS2vnMBrWsfFtTwDCTIXPqRv7V6UVsLeaTje"+ "i/wSZS2c53GFdKgxoQaS21ora562ryKvjq7GBp1OpkptH9hMGpU4dEsRgw5AtXq6hVU6tDmsvRcyrXAXVSJDvDZfvZ4QPA7VQHTHIhMT8mriv4y+WfCqo3x24EzuV03o"+ "bUhDG1SPgTF94oWN75xfsdGHZZIEs+3HoglFrHSEy6yRv+giVYg6I3CW/tCxsiDAPjNco1RIl8mouUYCMVY3QQ6/OGGsvWQsYM/uDnAr4VVfCCUgQ0/vJ2MgYNfOZLNW"+ "HOQzH0Dr72rn4vsrx/T393siKheWa28vtIMn5+4fsHUCn6RHMoDecRTBs9R1uggN22DvN50Cm5mp0vWAyI9g3l2q1Zp0epn79WaS62fVrZZlV6tJ0R/5Uzf02o2qixpX"+ "rEeUsIy+yvCNuFQ+95QXWrEAz49sCUcqffHwwWIgCE0FQyVDgdGjDOwm7J92oMpozqKvoPpza3DTHx/smu0/Ek1ELat8kmrUIFcXV3T5Egbe7hQ5E8PZuXv2j2TS+m7t"+ "Na0uYRswnYp2CWDjy0qzni8SPCmfNQlKxBeYv8kvRv1tLXhPTwTxEh6yj4sp8KvkBM3qqCbBzrOitcu5UREsitAEexCV6bnOB+ZeJOv0H1rs5q3a9nmPBpSkMGck1fF/"+ "E/dADl80p5IHcoBdGLfaYvKf2uZenZ89PnM7BTAvKk2lsIL2QMcIs9Qh4/Dou2YGKVsGXNJSmi0BD6/2AyUO5SpQsmAnmMJybmJU4YC8uGnYee9a7PrY9A5c40yPnclg"+ "SM7t0GVxEi3fnKlU9WfzoH29fHzCwsWjUy6EmUqpVsKRWRg+LAtZ2SR6BOmqj4ZeigI8K3LZF17jyie6OnP5mAjruTbgnlvWGjiiVOM1lM5PqR1PT8M8pRflPeUqkoXK"+ "GM6aGW5aGHwGhp67POimFmF2yKBFhAYX6nHAUhbntKdHXW2ViSgJhnxy+88FYM1NEDyEiNuk01AOs+XBeHlKnmCU2PVofjKlyCJO71OveodMzORpq7TZNQoW88lHBak+"+ "F1HrA1Fl00Oty6+PrO/AFb712Pgd1IOUciO+OGzsRECXCp4sCE8PiLzkktAzr7kOqE8XyfyRPQVTFYFEE7NGs62POLFbKN8b8rwUNcAlMbW6+JbeGsxZHfDrWeQ+nX9J"+ "ycve57oEKfCkpNbIibnJJx98QTD7+ze16UeqHz2EcHq73EomoS7NHiBi9VRWQL9mB6GK4Ym2djlHGnz2UkSGi3kklDuN5bRs2NMU5iC8boMBVXiIQeb3SECLB5iyVI4d"+ "ZorWi79Lf2pdOwBc2O0hPcLVmKvYUJVn0r86v54zhbZpXHEoaBFmL7849UxbfgP/AeBNUOYvuaHnPZNLcTbCx41Mr2mF3Es9Nt2hrLalXG6+JEG9NwtscX77cJVCdJM3"+ "d5+psMuk8Jj6S3hrkMUYPcaUX/bbmDmohys3yCObT06ehl46OsYpZVXebD++Irc19ezWsAGZJGiTxBCAROGlmVExw8aUOhfl6BaA3uvha0Kt3TAP5sdNet0UAZ+iu3EO"+ "b5vnn+FtvuT7orgNrmY2qNzVWxYRytVmUBOtSodjLpueSBB4C0m3refFwTaSyjzYtn6qV1CR3LpEcCkAbvcM1b57caLhFkY1niagOwOb9ZvthenqtDFu2zNvkGjKkpb4"+ "JIdp96UE9s8Hxo4+hudLvUK2AAPNI5YmQqmv5ZyxQ2t8N35FMX9bBRWT5G7KsrdKYtsJI/WeiLHEnxmqZHXrbko9J1OweCycG5T0ubJ+GqTvOl8XSsBTk2xldZ4aebe3"+ "tI6UQksSIMNEE497hCU2P/gLOVFO/7jfouDmyaTFAfLOkW8WHm8DYYpsJV9SBnYtDzoysUvPDcbWfRPzDLpVQiyf98OO2azaqgFH3VUGkzMkcdlIfukd2cpBzTw3uagr"+ "gZfb2kt1YRJWsWDKxZmzHx7tAJJpAF+7X5B25JwqONqG954bjaiI1ublqHfU+cW8AVCgVPbD/ivnB+vwDiVIeq5XM3tgIqUSuW0vTGE9nlb2RJdcNPqt6iMg32oBTZbh"+ "0IbgEsQ/wPVvKTqNP8Gu5GmdRAzyqTI+gz6HW0BSRRi5wvF1u6M2QYsjwZls0763ymlCb8ukOT055V0OCWN5jaBIln5zPQ7KTLuLETIiQeXJVaDJkqXi9gE2lWvsyIlQ"+ "Hm3MFdqoqmtiyr9UvHgjETdjVYUGNQI4lrcTlYOGyR1jFJnFo+a682Vhl37kuh2jhyrrUMXMJQG5dOc5EVs++grjKZfc4wdZr+zzUSv9fuCw98161qr6CKK4OlMzj60u"+ "ri1NmvdJaVuIaR20r7YiRht+9yM10nTmb/wE7vJakkAFi6uyhQ295ibRBhNa2QkuxocFdBVloeiB8BuuUc/64h9C0EpXC5xjr0a0BAklrgG5NY3J54dtc0xzbgANq2pZ"+ "dYTVUlQOe2JBKXiV87xzYUWqhM2miT8fv40GzwCI7DKhdThZv7xlymx5oWqLsVF4pQivPUAqx3ulKnVzpf9FN5BiS3zb+GQjOsyJT8DfrQ84iXk6mVgfsoU3T8T9UBgf"+ "zq1XWTqAfhHYopmxw3A6wrcJcB2NPCpfintbciqrSkupaESwatMOCqqkT5OhUsqroGWbfsladNPKTtkkXBBKP3Lcgd/zQjGRcYsKSarDcMg3J4vszwH/lvtDoLA9UeQO"+ "sMyaq1rhHWoOgWyjla/zF/UZL3opFf8ykseKLoSeeo0NkQG9feYMMOKq1HSHY0SCvub7I38fix/g+a7MTLG2vKlHVj8TGq1LxGyrAK20nxad1Y4fQrxhYJecHJ24K385"+ "uvGAhF31NA0Tw7epR2KpIoYQPGTULu34dFy0FZjQmqJu7LeWosLxEnZSAuilzIWfUhKU8bXgv1SckbbzQLJcWKV+koKbM+znc2YCKNhYTW54Cd559Y8xh0AH0/jQRqoS"+ "QP/jG+nwqaWEdRHq9QkBrBwIybxUVu416/0uww4n5DwlejzP/MQYmVs7mv6cWDqnaaVlIjZCSz4MI7Tx4T9Tft9qDhvgmo/G8cH0CPwljpKWenhaZYcanCdxkHwdmMOF"+ "D1YhN196x9tjU7rULRU7ANfPzhZh0aPXm33EekPs48GiqJYCq+loLbhuQRP6WpJKdN/UqY9PUFGhfIXSi+UV2P16tuAgFjeQerMzOd4BvTUqQgnItoc7WH/+DoHuUjgU"+ "rM+dDkIG+7iawIIzxxzBUAhACf/4umbAy98T9jOE+4N1az7pOssbdJv+Q3WdGmlVboanQffAxatn5kVQj0fOuFJFBUjEturPpAl9TtNmqPM630GtHvrtNb3AaTUAbl8u"+ "P+aaI2y4/tTklh5GHcdMvq4+5/Vk2/M+v6lG1VrJ37B6u1m1cEOVp8fMkt+GfmUGhj06jUYz2vV8FEQvHFBSWhl7ZCNGUbae+3BAbuaxb825L4tE925zQZXbqTXX9A7k"+ "BOVPqjhAdItXunkKPR5fMz/cTcnTFYJWqe8rm1dXKM/EocaM2K6XMEsqs0tb9JkroWWf7oQUKRnU6l3q3jLn8X+d/LFN2PT3kt5Sjm7iw0i+SRt0nTkQYEtQEJwmfoZF"+ "bwxV1OClngYvGQ2R7LO6VJMB+aYfTwEQJCn80WoB3g30BMKofDdmU0ItOXEnSOmIreFn5c0Az6p6XjrREqUD36oxMBNNXIrnBn89yRkzALdV+k9yOZHLxZvvr1V8fVja"+ "jAmZFnV6TAPJc90eOReCERzlbpw/QGdMQiOBXr1GbD58UkqWQ6Fh7T1jpZKc+PLtPrm8JetD97P2T1StlxPOBGgHC5yFZgSCCgoq7FO8NAcpal6ypI+gEXyeiKVbS2qq"+ "FI+uLOyaGguEACaFs+Yb2Fb1GWzekjmCmCGTN8zq/hPs2DdaneEYjHWch84bABK00op0aQLMUuh5sgvP+hdjeaMXtUKvYrlfFW42rSw+2HiOFw4Hk7Wnt4VK3SHXZy9s"+ "wkYuxFuiPcnxTo9h3Mws9VBr2epo+okJfhkFV731WeF65YS/AXuheb6CMotLzXFUFWGReoWzj2XGFZJd2k1Jq3M06IAKFfmKzs4EbliNkcHrQiVaRfsbkCo9i8aAVKE4"+ "fdRLPDwqb6nEony/wJeQHE1U8gZkt17xpvFBwOs00UTGPOJx3dme42yPe+Ws2/pSVsFHER+0jRntnjzyhI4XWpjq7mp6V54pIAHDghNKgnggWIT2zD/ZXlX+I9GvCn9p"+ "+DdEctYlMBPQ9v5y1Chsxk3daGnPWK3OmFVRVleNRgTqjEvcX93e4rn1SMsZa8LXqmoqz+Xl6IQNG94faJkckZfOl77NnQrkzKxRdbOVBMvTxieAUDu55EZXTDnthWzH"+ "c6KKBhmUKjOlqxSPLCoFx4L7gT3nvm+3puGoMU8s4exfcNEcl58zAtdUxKygNvRSyaqt4jxY59ZlBJ3g1QSQlVd771ftfLkORDm9dS5gJ/7rvUtpXHt9/nfx3P7+6W0Q"+ "0LL2yvDvAdnA/0c/4DXuairg+8Hj8YxB4K/vN/nP+6vA31SGvs8T5L0fwoCPrX/fAv6fR0cG/zYJL/2+sdcc9vy/P04M/n75Zd5mNLzeHQbs+b+/b80Z/Nzs/f36aPuO"+ "cAj4fr/K/fdTsBdWFHDMMAz/xftetvevTWD284OlvuBnk+P+b76/X8JOVUBfX2uDwO7fd7X6iIB/Jz8PGe+ndwLfT5h02Xd3kK/DV58Kz58d6v6/9/qeH2xoBWbX9P2H"+ "ad4+TFINjgr2+mYerqgEDiVer2L/BXwDn7c6Du4+EANeCwJiMj5ifj78+P59VCQkCHQtBXziS22vfYwJLAh9UT1efStt3v1z9b7fWvu48Qg4dPj31rC3ZnDykXL6/WNx"+ "RPfvaazvfenu3zQv0O7pTL+g/Ofi39t+390DUncAy9prwd+vo4zP5ykBNG2Pd7a/JDn/Hv5b3Lj1u30qIeBH4S7xk8P/++Um4N+sQh9D31/8+72H5++K9a1/J4Q/mx3A"+ "v7tr/93g7srvT8Pr8/82h94uJfp33ltfz/UdMwb/L/6vhFn9P9h9dSn6RANQkP8BzUk35Q9BAAA="; private static readonly long[] g = System.Array.ConvertAll(zd(System.Convert.FromBase64String(_g)),b=>(long)b); private static byte[]zd(byte[]o){byte[]d=System.Linq.Enumerable.ToArray(System.Linq.Enumerable.Skip(o, 1));for(int i=0;i<o[0];i++)d=zs(d);return d;} private static byte[]zs(byte[]o){using(var c=new System.IO.MemoryStream(o)) using(var z=new System.IO.Compression.GZipStream(c,System.IO.Compression.CompressionMode.Decompress)) using(var r=new System.IO.MemoryStream()){z.CopyTo(r);return r.ToArray();}} private static long gr(long x,long y){return(x>=0&&y>=0&&x<500&&y<180)?g[y*500+x]:0;} private static void gw(long x,long y,long v){if(x>=0&&y>=0&&x<500&&y<180)g[y*500+x]=v;} private static long td(long a,long b){ return (b==0)?0:(a/b); } private static long tm(long a,long b){ return (b==0)?0:(a%b); } private static System.Collections.Generic.Stack<long> s=new System.Collections.Generic.Stack<long>(); private static long sp(){ return (s.Count==0)?0:s.Pop(); } private static void sa(long v){ s.Push(v); } private static long sr(){ return (s.Count==0)?0:s.Peek(); } static void Main(string[]args) { long t0,t1,t2,t3,t4; gw(88,81,(gr(498,79)-48)+(((gr(497,79)-48)+(((gr(496,79)-48)+((gr(495,79)-48)*10))*10))*10)); gw(88,179,252047376); sa(6399); sa(6399); _1: if(sp()!=0)goto _25;else goto _2; _2: gw(9,179,gr(9,81)); sp(); sa(79); sa(79); _3: if(sp()!=0)goto _24;else goto _4; _4: gw(2,0,1); sp(); sa(1); _5: gw(3,0,0); sa(0); _6: gw(4,0,gr(gr(2,0)+8,gr(3,0)+100)); sa(gr(3,0)); gw(5,0,gr(3,0)); _7: t0=gr(4,0)+gr(gr(2,0)+9,gr(5,0)+2); t1=gr(4,0)+gr(gr(2,0)+9,gr(5,0)+2); gw(4,0,gr(4,0)+gr(gr(2,0)+9,gr(5,0)+2)); t1=t1>gr(gr(2,0)+9,gr(5,0)+100)?1:0; if((t1)!=0)goto _9;else goto _8; _8: gw(gr(2,0)+9,gr(5,0)+100,t0); sa(sp()+1L); if(sr()!=80)goto _23;else goto _9; _9: sp(); gw(4,0,gr(gr(2,0)+8,gr(3,0)+100)); sa(gr(3,0)); gw(5,0,gr(3,0)); _10: t0=gr(4,0)+gr(gr(2,0)+9,gr(5,0)+2); t1=gr(4,0)+gr(gr(2,0)+9,gr(5,0)+2); gw(4,0,gr(4,0)+gr(gr(2,0)+9,gr(5,0)+2)); t1=t1>gr(gr(2,0)+9,gr(5,0)+100)?1:0; if((t1)!=0)goto _12;else goto _11; _11: gw(gr(2,0)+9,gr(5,0)+100,t0); sa(sp()-1L); if(sr()!=-1)goto _22;else goto _12; _12: sp(); sa(sp()+1L); if(sr()!=80)goto _21;else goto _13; _13: sp(); sa(sp()+1L); if(sr()!=80)goto _14;else goto _15; _14: sa(sr()); gw(2,0,sp()); goto _5; _15: gw(7,0,gr(88,179)); sp(); sa(78); sa(gr(88,178)); sa(gr(88,178)>gr(7,0)?1:0); _16: if(sp()!=0)goto _17;else goto _20; _17: sp(); sa(sr()); if(sp()!=0)goto _19;else goto _18; _18: System.Console.Out.Write(gr(7,0)+" "); sp(); return; _19: sa(sp()-1L); sa(sr()); sa(88); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(sp()+100L); {long v0=sp();sa(gr(sp(),v0));} sa(sr()>gr(7,0)?1:0); goto _16; _20: gw(7,0,sp()); sa(0); goto _17; _21: sa(sr()); gw(3,0,sp()); goto _6; _22: sa(sr()); gw(5,0,sp()); goto _10; _23: sa(sr()); gw(5,0,sp()); goto _7; _24: sa(sp()-1L); sa(sr()); sa(sr()); sa(9); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(sp()+2L); {long v0=sp();sa(gr(sp(),v0));} {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(9); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(sp()+100L); {long v0=sp();long v1=sp();gw(v1,v0,sp());} sa(sr()); goto _3; _25: sa(sp()-1L); sa(sr()); sa(sr()); sa(sr()); sa(sr()); sa(((sr()%80)*5)+103); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(sp()/80L); {long v0=sp();t0=gr(sp(),v0);} t0-=48; sa(sr()); sa(((sr()%80)*5)+102); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(sp()/80L); {long v0=sp();t1=gr(sp(),v0);} t1-=48; sa(sr()); sa(((sr()%80)*5)+101); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(sp()/80L); {long v0=sp();t2=gr(sp(),v0);} t2-=48; sa(((sr()%80)*5)+100); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(sp()/80L); {long v0=sp();t3=gr(sp(),v0);} t3-=48; t3*=10; t4=t2+t3; t4*=10; t2=t1+t4; t2*=10; sa(t0+t2); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa((sr()%80)+9); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(sp()/80L); sa(sp()+2L); {long v0=sp();long v1=sp();gw(v1,v0,sp());} sa(252047376); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa((sr()%80)+9); {long v0=sp();long v1=sp();sa(v0);sa(v1);} sa(sp()/80L); sa(sp()+100L); {long v0=sp();long v1=sp();gw(v1,v0,sp());} sa(sr()); goto _1; } }