context
stringlengths
2.52k
185k
gt
stringclasses
1 value
namespace Aurora.Migrations { using System; using System.Data.Entity; using System.Data.Entity.Migrations; using System.Linq; using Aurora.Models.Derived; using Aurora.Models.Primitives; using Aurora.Entity; using System.Collections.Generic; internal sealed class Configuration : DbMigrationsConfiguration<AuroraContext> { #region Campus Seed Values private Campus Bangalore = new Campus { CampusId = 1, CampusName = "Bangalore" }; private Campus Coimbatore = new Campus { CampusId = 2, CampusName = "Coimbatore" }; private Campus Amritapuri = new Campus { CampusId = 3, CampusName = "Amritapuri" }; private Campus Mysore = new Campus { CampusId = 4, CampusName = "Mysore" }; private Campus Kochi = new Campus { CampusId = 5, CampusName = "Kochi" }; #endregion #region Role Seed Values private Role Developer = new Role { RoleId = 1, RoleName = "Developer" }; private Role Admin = new Role { RoleId = 2, RoleName = "Administrator" }; private Role SuperModerator = new Role { RoleId = 3, RoleName = "SuperModerator" }; private Role Moderator = new Role { RoleId = 4, RoleName = "Moderator" }; private Role User = new Role { RoleId = 5, RoleName = "User" }; #endregion #region Resource Seed Values private Resource Resource = new Resource { ResourceId = 1, ResourceName = "Resource Management" }; private Resource Role = new Resource { ResourceId = 2, ResourceName = "Role Management" }; private Resource Campus = new Resource { ResourceId = 3, ResourceName = "Campus Management" }; private Resource Department = new Resource { ResourceId = 4, ResourceName = "Department Management" }; private Resource Permission = new Resource { ResourceId = 5, ResourceName = "Permission Management" }; #endregion #region Department Seed Values private Department CSE = new Department { DepartmentId = 1, DepartmentName = "Computer Science", IsUndergraduate = true }; private Department ECE = new Department { DepartmentId = 2, DepartmentName = "Electronics and Communication", IsUndergraduate = true }; private Department EEE = new Department { DepartmentId = 3, DepartmentName = "Electrical and Electronics", IsUndergraduate = true }; private Department EIE = new Department { DepartmentId = 4, DepartmentName = "Electronics and Instrumentation", IsUndergraduate = true }; private Department CIVIL = new Department { DepartmentId = 5, DepartmentName = "Civil", IsUndergraduate = true }; private Department MECH = new Department { DepartmentId = 6, DepartmentName = "Mechanical", IsUndergraduate = true }; #endregion #region Process Step Seed Values ProcessStep Written = new ProcessStep { ProcessStepName = "Written Test" }; ProcessStep TechnicalRound = new ProcessStep { ProcessStepName = "Technical Interview" }; ProcessStep HRRound = new ProcessStep { ProcessStepName = "HR Interview" }; #endregion public Configuration() { AutomaticMigrationsEnabled = false; } protected override void Seed(AuroraContext context) { // This method will be called after migrating to the latest version. // You can use the DbSet<T>.AddOrUpdate() helper extension method // to avoid creating duplicate seed data. E.g. InitialPrimitivesSeed(context); context.SaveChanges(); PermissionModelSeed(context); context.SaveChanges(); UserModelSeed(context); context.SaveChanges(); CompanyModelSeed(context); context.SaveChanges(); HiringProcessSeed(context); context.SaveChanges(); StudentModelSeed(context); context.SaveChanges(); ProgressionModelSeed(context); context.SaveChanges(); } private void InitialPrimitivesSeed(AuroraContext context) { context.Campuses.AddOrUpdate(t => t.CampusName, Bangalore, Coimbatore, Amritapuri, Mysore, Kochi); context.Roles.AddOrUpdate(t => t.RoleName, Developer, Admin, SuperModerator, Moderator, User); context.Resources.AddOrUpdate(t => t.ResourceName, Resource, Role, Campus, Department, Permission); context.Departments.AddOrUpdate(t => t.DepartmentName, CSE, EEE, ECE, EIE, MECH, CIVIL); } private void PermissionModelSeed(AuroraContext context) { var Developer = context.Roles.First(t => t.RoleId == 1); var Admin = context.Roles.First(t => t.RoleId == 2); var Resource = context.Resources.First(t => t.ResourceId == 1); var Role = context.Resources.First(t => t.ResourceId == 2); var Campus = context.Resources.First(t => t.ResourceId == 3); var Department = context.Resources.First(t => t.ResourceId == 4); var Permission = context.Resources.First(t => t.ResourceId == 5); if (new object[] { Developer, Admin, Resource, Role, Campus, Department, Permission }.All(t => t != null)) { context.Permissions.AddOrUpdate( t => t.PermissionId, new Permission { PermissionId = 1, Role = Developer, Resource = Resource, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true }, new Permission { PermissionId = 2, Role = Developer, Resource = Role, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true }, new Permission { PermissionId = 3, Role = Developer, Resource = Campus, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true }, new Permission { PermissionId = 4, Role = Developer, Resource = Department, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true }, new Permission { PermissionId = 5, Role = Developer, Resource = Permission, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true }, new Permission { PermissionId = 6, Role = Admin, Resource = Resource, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true }, new Permission { PermissionId = 7, Role = Admin, Resource = Role, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true }, new Permission { PermissionId = 8, Role = Admin, Resource = Campus, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true }, new Permission { PermissionId = 9, Role = Admin, Resource = Department, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true }, new Permission { PermissionId = 10, Role = Admin, Resource = Permission, CanCreate = true, CanRead = true, CanUpdate = true, CanDelete = true } ); } } private void UserModelSeed(AuroraContext context) { var Campus = context.Campuses.Find(1); var Department = context.Departments.Find(1); var Role = context.Roles.Find(1); context.Users.AddOrUpdate( t => t.UserId, new User { UserId = 1, Campus = Campus, Department = Department, Role = Role, Username = "warlord1609", FirstName = "Bhargav", LastName = "Krishna", RegistrationNumber = "BL.EN.U4CSE09022", Email = "bhargavkrishna16@gmail.com", Contact = "+919844469276", ProfileLink = "https://www.facebook.com/GeekInfinite" }, new User { UserId = 2, Campus = Campus, Department = Department, Role = Role, Username = "amritha.dilip", FirstName = "Amritha", LastName = "Dilip", RegistrationNumber = "BL.EN.U4CSE09501", Email = "amritha.dilip@gmail.com", Contact = "+917411099809", ProfileLink = "https://www.facebook.com/GeekInfinite" }, new User { UserId = 3, Campus = Campus, Department = Department, Role = Role, Username = "aishwarya1411", FirstName = "Aishwarya", LastName = "Ramanathan", RegistrationNumber = "BL.EN.U4CSE09004", Email = "aishwarya1411@gmail.com", Contact = "+917411099809", ProfileLink = "https://www.facebook.com/GeekInfinite" } ); } private void CompanyModelSeed(AuroraContext context) { var CSE = context.Departments.Find(1); var ECE = context.Departments.Find(2); var Bangalore = context.Campuses.Find(1); HiringProfile FacebookHiringProfile = new HiringProfile { Batch = 2013, CGPA = 6.0f, Comments = null, Departments = new List<Department> { CSE, ECE } as ICollection<Department>, HiringDate = DateTime.Now.AddMonths(5), HiringProfileName = "Facebook Hiring BTech 2013", TenthPercentage = 60, TwelvthPercentage = 60, UnderGraduateCTC = 1200000, VisitingCampus = Bangalore, JobProfile = "Junior Web Developer", }; Company FacebookIndia = new Company { CompanyName = "Facebook India", CompanyProfile = "https://www.facebook.com/FacebookIndia", CompanyUrl = "https://www.facebook.com", CompanyDescription = "Facebook is an online social networking service, whose name stems from the colloquial name for the book given to students at the start of the academic year by some university administrations in the United States to help students get to know each other.\r\n" + "It was founded in February 2004 by Mark Zuckerberg with his college roommates and fellow Harvard University students Eduardo Saverin, Andrew McCollum, Dustin Moskovitz and Chris Hughes. The website's membership was initially limited by the founders to Harvard students, but was expanded to other colleges in the Boston area, the Ivy League, and Stanford University.\r\n" + "It gradually added support for students at various other universities before opening to high school students, and eventually to anyone aged 13 and over.\r\n" + "Facebook now allows any users who declare themselves to be at least 13 years old to become registered users of the site.\r\n", CompanyAddress = "1 Hacker Way, Menlo Park, CA, United States.\r\n", HiringProfiles = new List<HiringProfile> { FacebookHiringProfile } as ICollection<HiringProfile> }; context.Companies.AddOrUpdate( t => t.CompanyName, FacebookIndia ); } private void HiringProcessSeed(AuroraContext context) { var Facebook = context.HiringProfiles.Include("ProcessSteps").Where(t => t.Company.CompanyId == 1).FirstOrDefault(); Facebook.ProcessSteps.Add(Written); Facebook.ProcessSteps.Add(TechnicalRound); Facebook.ProcessSteps.Add(TechnicalRound); Facebook.ProcessSteps.Add(HRRound); } private void StudentModelSeed(AuroraContext context) { var BLR = context.Campuses.Find(1); var CSE = context.Departments.Find(1); var USR = context.Roles.Find(5); var Vaibhav = new Student { Campus = BLR, CGPA = 8.0f, Contact = "+919964005183", Department = CSE, Email = "vaibhavkrishna29@outlook.com", FirstName = "Vaibhav", LastName = "Krishna", ProfileLink = "https://www.facebook.com/vaibhav.krishna.31", RegistrationNumber = "BL.EN.U4.CSE12002", Role = USR, TenthPercentage = 88, TwelvthPercentage = 92, Username = "vaibhav.krishna", Company = null }; context.Students.AddOrUpdate( t => t.Username, Vaibhav ); } private void ProgressionModelSeed(AuroraContext context) { var Facebook = context.HiringProfiles.Find(1); var Vaibhav = context.Students.Find(4); var StudentProgress = new StudentProgression { HiringProfile = Facebook, Student = Vaibhav, Total = Facebook.ProcessSteps.Count, Cleared = 1 }; context.StudentProgressions.AddOrUpdate( t => t.HiringId, StudentProgress ); } } }
// Copyright (c) Umbraco. // See LICENSE for more details. using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using Microsoft.Extensions.Logging; using NUnit.Framework; using Umbraco.Cms.Core.Cache; using Umbraco.Cms.Core.Configuration.Models; using Umbraco.Cms.Core.Models; using Umbraco.Cms.Core.Persistence.Querying; using Umbraco.Cms.Core.Scoping; using Umbraco.Cms.Core.Services; using Umbraco.Cms.Infrastructure.Persistence; using Umbraco.Cms.Infrastructure.Persistence.Repositories.Implement; using Umbraco.Cms.Tests.Common.Testing; using Umbraco.Cms.Tests.Integration.Testing; namespace Umbraco.Cms.Tests.Integration.Umbraco.Infrastructure.Persistence.Repositories { [TestFixture] [UmbracoTest(Database = UmbracoTestOptions.Database.NewSchemaPerTest)] public class LanguageRepositoryTest : UmbracoIntegrationTest { private GlobalSettings _globalSettings; [SetUp] public void SetUp() { CreateTestData(); _globalSettings = new GlobalSettings(); } [Test] public void Can_Perform_Get_On_LanguageRepository() { // Arrange IScopeProvider provider = ScopeProvider; using (IScope scope = provider.CreateScope()) { scope.Database.AsUmbracoDatabase().EnableSqlTrace = true; LanguageRepository repository = CreateRepository(provider); // Act ILanguage language = repository.Get(1); // Assert Assert.That(language, Is.Not.Null); Assert.That(language.HasIdentity, Is.True); Assert.That(language.CultureName, Is.EqualTo("English (United States)")); Assert.That(language.IsoCode, Is.EqualTo("en-US")); Assert.That(language.FallbackLanguageId, Is.Null); } } [Test] public void Can_Perform_Get_By_Iso_Code_On_LanguageRepository() { IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); var au = CultureInfo.GetCultureInfo("en-AU"); var language = (ILanguage)new Language(_globalSettings, au.Name) { CultureName = au.DisplayName, FallbackLanguageId = 1 }; repository.Save(language); // re-get language = repository.GetByIsoCode(au.Name); // Assert Assert.That(language, Is.Not.Null); Assert.That(language.HasIdentity, Is.True); Assert.That(language.CultureName, Is.EqualTo(au.DisplayName)); Assert.That(language.IsoCode, Is.EqualTo(au.Name)); Assert.That(language.FallbackLanguageId, Is.EqualTo(1)); } } [Test] public void Get_When_Id_Doesnt_Exist_Returns_Null() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act ILanguage language = repository.Get(0); // Assert Assert.That(language, Is.Null); } } [Test] public void Can_Perform_GetAll_On_LanguageRepository() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act IEnumerable<ILanguage> languages = repository.GetMany(); // Assert Assert.That(languages, Is.Not.Null); Assert.That(languages.Any(), Is.True); Assert.That(languages.Any(x => x == null), Is.False); Assert.That(languages.Count(), Is.EqualTo(5)); } } [Test] public void Can_Perform_GetAll_With_Params_On_LanguageRepository() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act IEnumerable<ILanguage> languages = repository.GetMany(1, 2); // Assert Assert.That(languages, Is.Not.Null); Assert.That(languages.Any(), Is.True); Assert.That(languages.Any(x => x == null), Is.False); Assert.That(languages.Count(), Is.EqualTo(2)); } } [Test] public void Can_Perform_GetByQuery_On_LanguageRepository() { // Arrange IScopeProvider provider = ScopeProvider; using (IScope scope = provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act IQuery<ILanguage> query = scope.SqlContext.Query<ILanguage>().Where(x => x.IsoCode == "da-DK"); IEnumerable<ILanguage> result = repository.Get(query); // Assert Assert.That(result, Is.Not.Null); Assert.That(result.Any(), Is.True); Assert.That(result.FirstOrDefault().CultureName, Is.EqualTo("da-DK")); } } [Test] public void Can_Perform_Count_On_LanguageRepository() { // Arrange IScopeProvider provider = ScopeProvider; using (IScope scope = provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act IQuery<ILanguage> query = scope.SqlContext.Query<ILanguage>().Where(x => x.IsoCode.StartsWith("D")); int count = repository.Count(query); // Assert Assert.That(count, Is.EqualTo(2)); } } [Test] public void Can_Perform_Add_On_LanguageRepository() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act var languageBR = new Language(_globalSettings, "pt-BR") { CultureName = "pt-BR" }; repository.Save(languageBR); // Assert Assert.That(languageBR.HasIdentity, Is.True); Assert.That(languageBR.Id, Is.EqualTo(6)); // With 5 existing entries the Id should be 6 Assert.IsFalse(languageBR.IsDefault); Assert.IsFalse(languageBR.IsMandatory); Assert.IsNull(languageBR.FallbackLanguageId); } } [Test] public void Can_Perform_Add_On_LanguageRepository_With_Boolean_Properties() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act var languageBR = new Language(_globalSettings, "pt-BR") { CultureName = "pt-BR", IsDefault = true, IsMandatory = true }; repository.Save(languageBR); // Assert Assert.That(languageBR.HasIdentity, Is.True); Assert.That(languageBR.Id, Is.EqualTo(6)); // With 5 existing entries the Id should be 6 Assert.IsTrue(languageBR.IsDefault); Assert.IsTrue(languageBR.IsMandatory); Assert.IsNull(languageBR.FallbackLanguageId); } } [Test] public void Can_Perform_Add_On_LanguageRepository_With_Fallback_Language() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act var languageBR = new Language(_globalSettings, "pt-BR") { CultureName = "pt-BR", FallbackLanguageId = 1 }; repository.Save(languageBR); // Assert Assert.That(languageBR.HasIdentity, Is.True); Assert.That(languageBR.Id, Is.EqualTo(6)); // With 5 existing entries the Id should be 6 Assert.That(languageBR.FallbackLanguageId, Is.EqualTo(1)); } } [Test] public void Can_Perform_Add_On_LanguageRepository_With_New_Default() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); var languageBR = (ILanguage)new Language(_globalSettings, "pt-BR") { CultureName = "pt-BR", IsDefault = true, IsMandatory = true }; repository.Save(languageBR); var languageEN = new Language(_globalSettings, "en-AU") { CultureName = "en-AU" }; repository.Save(languageEN); Assert.IsTrue(languageBR.IsDefault); Assert.IsTrue(languageBR.IsMandatory); // Act var languageNZ = new Language(_globalSettings, "en-NZ") { CultureName = "en-NZ", IsDefault = true, IsMandatory = true }; repository.Save(languageNZ); languageBR = repository.Get(languageBR.Id); // Assert Assert.IsFalse(languageBR.IsDefault); Assert.IsTrue(languageNZ.IsDefault); } } [Test] public void Can_Perform_Update_On_LanguageRepository() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act ILanguage language = repository.Get(5); language.IsoCode = "pt-BR"; language.CultureName = "pt-BR"; language.FallbackLanguageId = 1; repository.Save(language); ILanguage languageUpdated = repository.Get(5); // Assert Assert.That(languageUpdated, Is.Not.Null); Assert.That(languageUpdated.IsoCode, Is.EqualTo("pt-BR")); Assert.That(languageUpdated.CultureName, Is.EqualTo("pt-BR")); Assert.That(languageUpdated.FallbackLanguageId, Is.EqualTo(1)); } } [Test] public void Perform_Update_With_Existing_Culture() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act ILanguage language = repository.Get(5); language.IsoCode = "da-DK"; language.CultureName = "da-DK"; Assert.Throws<InvalidOperationException>(() => repository.Save(language)); } } [Test] public void Can_Perform_Delete_On_LanguageRepository() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act ILanguage language = repository.Get(3); repository.Delete(language); bool exists = repository.Exists(3); // Assert Assert.That(exists, Is.False); } } [Test] public void Can_Perform_Delete_On_LanguageRepository_With_Language_Used_As_Fallback() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { // Add language to delete as a fall-back language to another one LanguageRepository repository = CreateRepository(provider); ILanguage languageToFallbackFrom = repository.Get(5); languageToFallbackFrom.FallbackLanguageId = 2; // fall back to #2 (something we can delete) repository.Save(languageToFallbackFrom); // delete #2 ILanguage languageToDelete = repository.Get(2); repository.Delete(languageToDelete); bool exists = repository.Exists(2); // has been deleted Assert.That(exists, Is.False); } } [Test] public void Can_Perform_Exists_On_LanguageRepository() { // Arrange IScopeProvider provider = ScopeProvider; using (provider.CreateScope()) { LanguageRepository repository = CreateRepository(provider); // Act bool exists = repository.Exists(3); bool doesntExist = repository.Exists(10); // Assert Assert.That(exists, Is.True); Assert.That(doesntExist, Is.False); } } private LanguageRepository CreateRepository(IScopeProvider provider) => new LanguageRepository((IScopeAccessor)provider, AppCaches.Disabled, LoggerFactory.CreateLogger<LanguageRepository>(), Microsoft.Extensions.Options.Options.Create(_globalSettings)); private void CreateTestData() { // Id 1 is en-US - when Umbraco is installed ILocalizationService localizationService = GetRequiredService<ILocalizationService>(); var languageDK = new Language(_globalSettings, "da-DK") { CultureName = "da-DK" }; localizationService.Save(languageDK); // Id 2 var languageSE = new Language(_globalSettings, "sv-SE") { CultureName = "sv-SE" }; localizationService.Save(languageSE); // Id 3 var languageDE = new Language(_globalSettings, "de-DE") { CultureName = "de-DE" }; localizationService.Save(languageDE); // Id 4 var languagePT = new Language(_globalSettings, "pt-PT") { CultureName = "pt-PT" }; localizationService.Save(languagePT); // Id 5 } } }
// // Copyright (c) 2004-2016 Jaroslaw Kowalski <jaak@jkowalski.net>, Kim Christensen, Julian Verdurmen // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // #if !SILVERLIGHT && !__IOS__ && !__ANDROID__ namespace NLog.Targets { using System; using System.ComponentModel; using System.Diagnostics; using System.Globalization; using System.Security; using Internal.Fakeables; using NLog.Common; using NLog.Config; using NLog.Internal; using NLog.Layouts; /// <summary> /// Writes log message to the Event Log. /// </summary> /// <seealso href="https://github.com/nlog/nlog/wiki/EventLog-target">Documentation on NLog Wiki</seealso> /// <example> /// <p> /// To set up the target in the <a href="config.html">configuration file</a>, /// use the following syntax: /// </p> /// <code lang="XML" source="examples/targets/Configuration File/EventLog/NLog.config" /> /// <p> /// This assumes just one target and a single rule. More configuration /// options are described <a href="config.html">here</a>. /// </p> /// <p> /// To set up the log target programmatically use code like this: /// </p> /// <code lang="C#" source="examples/targets/Configuration API/EventLog/Simple/Example.cs" /> /// </example> [Target("EventLog")] public class EventLogTarget : TargetWithLayout, IInstallable { private EventLog eventLogInstance; /// <summary> /// Initializes a new instance of the <see cref="EventLogTarget"/> class. /// </summary> public EventLogTarget() : this(AppDomainWrapper.CurrentDomain) { } /// <summary> /// Initializes a new instance of the <see cref="EventLogTarget"/> class. /// </summary> public EventLogTarget(IAppDomain appDomain) { this.Source = appDomain.FriendlyName; this.Log = "Application"; this.MachineName = "."; this.MaxMessageLength = 16384; } /// <summary> /// Gets or sets the name of the machine on which Event Log service is running. /// </summary> /// <docgen category='Event Log Options' order='10' /> [DefaultValue(".")] public string MachineName { get; set; } /// <summary> /// Gets or sets the layout that renders event ID. /// </summary> /// <docgen category='Event Log Options' order='10' /> public Layout EventId { get; set; } /// <summary> /// Gets or sets the layout that renders event Category. /// </summary> /// <docgen category='Event Log Options' order='10' /> public Layout Category { get; set; } /// <summary> /// Optional entrytype. When not set, or when not convertable to <see cref="LogLevel"/> then determined by <see cref="NLog.LogLevel"/> /// </summary> public Layout EntryType { get; set; } /// <summary> /// Gets or sets the value to be used as the event Source. /// </summary> /// <remarks> /// By default this is the friendly name of the current AppDomain. /// </remarks> /// <docgen category='Event Log Options' order='10' /> public Layout Source { get; set; } /// <summary> /// Gets or sets the name of the Event Log to write to. This can be System, Application or /// any user-defined name. /// </summary> /// <docgen category='Event Log Options' order='10' /> [DefaultValue("Application")] public string Log { get; set; } private int maxMessageLength; /// <summary> /// Gets or sets the message length limit to write to the Event Log. /// </summary> /// <remarks><value>MaxMessageLength</value> cannot be zero or negative</remarks> [DefaultValue(16384)] public int MaxMessageLength { get { return this.maxMessageLength; } set { if (value <= 0) throw new ArgumentException("MaxMessageLength cannot be zero or negative."); this.maxMessageLength = value; } } /// <summary> /// Gets or sets the action to take if the message is larger than the <see cref="MaxMessageLength"/> option. /// </summary> /// <docgen category='Event Log Overflow Action' order='10' /> [DefaultValue(EventLogTargetOverflowAction.Truncate)] public EventLogTargetOverflowAction OnOverflow { get; set; } /// <summary> /// Performs installation which requires administrative permissions. /// </summary> /// <param name="installationContext">The installation context.</param> public void Install(InstallationContext installationContext) { var fixedSource = GetFixedSource(); //always throw error to keep backwardscomp behavior. CreateEventSourceIfNeeded(fixedSource, true); } /// <summary> /// Performs uninstallation which requires administrative permissions. /// </summary> /// <param name="installationContext">The installation context.</param> public void Uninstall(InstallationContext installationContext) { var fixedSource = GetFixedSource(); if (string.IsNullOrEmpty(fixedSource)) { InternalLogger.Debug("Skipping removing of event source because it contains layout renderers"); } else { EventLog.DeleteEventSource(fixedSource, this.MachineName); } } /// <summary> /// Determines whether the item is installed. /// </summary> /// <param name="installationContext">The installation context.</param> /// <returns> /// Value indicating whether the item is installed or null if it is not possible to determine. /// </returns> public bool? IsInstalled(InstallationContext installationContext) { var fixedSource = GetFixedSource(); if (!string.IsNullOrEmpty(fixedSource)) { return EventLog.SourceExists(fixedSource, this.MachineName); } InternalLogger.Debug("Unclear if event source exists because it contains layout renderers"); return null; //unclear! } /// <summary> /// Initializes the target. /// </summary> protected override void InitializeTarget() { base.InitializeTarget(); var fixedSource = GetFixedSource(); if (string.IsNullOrEmpty(fixedSource)) { InternalLogger.Debug("Skipping creation of event source because it contains layout renderers"); } else { var currentSourceName = EventLog.LogNameFromSourceName(fixedSource, this.MachineName); if (!currentSourceName.Equals(this.Log, StringComparison.CurrentCultureIgnoreCase)) { this.CreateEventSourceIfNeeded(fixedSource, false); } } } /// <summary> /// Writes the specified logging event to the event log. /// </summary> /// <param name="logEvent">The logging event.</param> protected override void Write(LogEventInfo logEvent) { string message = this.Layout.Render(logEvent); EventLogEntryType entryType = GetEntryType(logEvent); int eventId = 0; if (this.EventId != null) { eventId = Convert.ToInt32(this.EventId.Render(logEvent), CultureInfo.InvariantCulture); } short category = 0; if (this.Category != null) { category = Convert.ToInt16(this.Category.Render(logEvent), CultureInfo.InvariantCulture); } EventLog eventLog = GetEventLog(logEvent); // limitation of EventLog API if (message.Length > this.MaxMessageLength) { if (OnOverflow == EventLogTargetOverflowAction.Truncate) { message = message.Substring(0, this.MaxMessageLength); eventLog.WriteEntry(message, entryType, eventId, category); } else if (OnOverflow == EventLogTargetOverflowAction.Split) { for (int offset = 0; offset < message.Length; offset += this.MaxMessageLength) { string chunk = message.Substring(offset, Math.Min(this.MaxMessageLength, (message.Length - offset))); eventLog.WriteEntry(chunk, entryType, eventId, category); } } else if (OnOverflow == EventLogTargetOverflowAction.Discard) { //message will not be written return; } } else { eventLog.WriteEntry(message, entryType, eventId, category); } } /// <summary> /// Get the entry type for logging the message. /// </summary> /// <param name="logEvent">The logging event - for rendering the <see cref="EntryType"/></param> /// <returns></returns> private EventLogEntryType GetEntryType(LogEventInfo logEvent) { if (this.EntryType != null) { //try parse, if fail, determine auto var value = this.EntryType.Render(logEvent); EventLogEntryType eventLogEntryType; if (EnumHelpers.TryParse(value, true, out eventLogEntryType)) { return eventLogEntryType; } } // determine auto if (logEvent.Level >= LogLevel.Error) { return EventLogEntryType.Error; } if (logEvent.Level >= LogLevel.Warn) { return EventLogEntryType.Warning; } return EventLogEntryType.Information; } /// <summary> /// Get the source, if and only if the source is fixed. /// </summary> /// <returns><c>null</c> when not <see cref="SimpleLayout.IsFixedText"/></returns> /// <remarks>Internal for unit tests</remarks> internal string GetFixedSource() { if (this.Source == null) { return null; } var simpleLayout = Source as SimpleLayout; if (simpleLayout != null && simpleLayout.IsFixedText) { return simpleLayout.FixedText; } return null; } /// <summary> /// Get the eventlog to write to. /// </summary> /// <param name="logEvent">Event if the source needs to be rendered.</param> /// <returns></returns> private EventLog GetEventLog(LogEventInfo logEvent) { return eventLogInstance ?? (eventLogInstance = new EventLog(this.Log, this.MachineName, this.Source.Render(logEvent))); } /// <summary> /// (re-)create a event source, if it isn't there. Works only with fixed sourcenames. /// </summary> /// <param name="fixedSource">sourcenaam. If source is not fixed (see <see cref="SimpleLayout.IsFixedText"/>, then pass <c>null</c> or emptystring.</param> /// <param name="alwaysThrowError">always throw an Exception when there is an error</param> private void CreateEventSourceIfNeeded(string fixedSource, bool alwaysThrowError) { if (string.IsNullOrEmpty(fixedSource)) { InternalLogger.Debug("Skipping creation of event source because it contains layout renderers"); //we can only create event sources if the source is fixed (no layout) return; } // if we throw anywhere, we remain non-operational try { if (EventLog.SourceExists(fixedSource, this.MachineName)) { string currentLogName = EventLog.LogNameFromSourceName(fixedSource, this.MachineName); if (!currentLogName.Equals(this.Log, StringComparison.CurrentCultureIgnoreCase)) { // re-create the association between Log and Source EventLog.DeleteEventSource(fixedSource, this.MachineName); var eventSourceCreationData = new EventSourceCreationData(fixedSource, this.Log) { MachineName = this.MachineName }; EventLog.CreateEventSource(eventSourceCreationData); } } else { var eventSourceCreationData = new EventSourceCreationData(fixedSource, this.Log) { MachineName = this.MachineName }; EventLog.CreateEventSource(eventSourceCreationData); } } catch (Exception exception) { InternalLogger.Error(exception, "Error when connecting to EventLog."); if (alwaysThrowError || exception.MustBeRethrown()) { throw; } } } } } #endif
#region License // Copyright (c) Jeremy Skinner (http://www.jeremyskinner.co.uk) // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // The latest version of this file can be found at http://www.codeplex.com/FluentValidation #endregion namespace FluentValidation.Internal { using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using System.Reflection; using Resources; using Results; using Validators; /// <summary> /// Defines a rule associated with a property. /// </summary> public class PropertyRule : IValidationRule { readonly List<IPropertyValidator> validators = new List<IPropertyValidator>(); Func<CascadeMode> cascadeModeThunk = () => ValidatorOptions.CascadeMode; /// <summary> /// Property associated with this rule. /// </summary> public MemberInfo Member { get; private set; } /// <summary> /// Function that can be invoked to retrieve the value of the property. /// </summary> public Func<object, object> PropertyFunc { get; private set; } /// <summary> /// Expression that was used to create the rule. /// </summary> public LambdaExpression Expression { get; private set; } /// <summary> /// String source that can be used to retrieve the display name (if null, falls back to the property name) /// </summary> public IStringSource DisplayName { get; set; } /// <summary> /// Rule set that this rule belongs to (if specified) /// </summary> public string RuleSet { get; set; } /// <summary> /// Function that will be invoked if any of the validators associated with this rule fail. /// </summary> public Action<object> OnFailure { get; set; } /// <summary> /// The current validator being configured by this rule. /// </summary> public IPropertyValidator CurrentValidator { get; private set; } /// <summary> /// Type of the property being validated /// </summary> public Type TypeToValidate { get; private set; } /// <summary> /// Cascade mode for this rule. /// </summary> public CascadeMode CascadeMode { get { return cascadeModeThunk(); } set { cascadeModeThunk = () => value; } } /// <summary> /// Validators associated with this rule. /// </summary> public IEnumerable<IPropertyValidator> Validators { get { return validators.AsReadOnly(); } } /// <summary> /// Creates a new property rule. /// </summary> /// <param name="member">Property</param> /// <param name="propertyFunc">Function to get the property value</param> /// <param name="expression">Lambda expression used to create the rule</param> /// <param name="cascadeModeThunk">Function to get the cascade mode.</param> /// <param name="typeToValidate">Type to validate</param> /// <param name="containerType">Container type that owns the property</param> public PropertyRule(MemberInfo member, Func<object, object> propertyFunc, LambdaExpression expression, Func<CascadeMode> cascadeModeThunk, Type typeToValidate, Type containerType) { Member = member; PropertyFunc = propertyFunc; Expression = expression; OnFailure = x => { }; TypeToValidate = typeToValidate; this.cascadeModeThunk = cascadeModeThunk; PropertyName = ValidatorOptions.PropertyNameResolver(containerType, member, expression); string displayName = ValidatorOptions.DisplayNameResolver(containerType, member, expression); if (!string.IsNullOrEmpty(displayName)) DisplayName = new StaticStringSource(displayName); } /// <summary> /// Creates a new property rule from a lambda expression. /// </summary> public static PropertyRule Create<T, TProperty>(Expression<Func<T, TProperty>> expression) { return Create(expression, () => ValidatorOptions.CascadeMode); } /// <summary> /// Creates a new property rule from a lambda expression. /// </summary> public static PropertyRule Create<T, TProperty>(Expression<Func<T, TProperty>> expression, Func<CascadeMode> cascadeModeThunk) { var member = expression.GetMember(); var compiled = expression.Compile(); return new PropertyRule(member, compiled.CoerceToNonGeneric(), expression, cascadeModeThunk, typeof(TProperty), typeof(T)); } /// <summary> /// Adds a validator to the rule. /// </summary> public void AddValidator(IPropertyValidator validator) { CurrentValidator = validator; validators.Add(validator); } /// <summary> /// Replaces a validator in this rule. Used to wrap validators. /// </summary> public void ReplaceValidator(IPropertyValidator original, IPropertyValidator newValidator) { var index = validators.IndexOf(original); if (index > -1) { validators[index] = newValidator; if (ReferenceEquals(CurrentValidator, original)) { CurrentValidator = newValidator; } } } /// <summary> /// Remove a validator in this rule. /// </summary> public void RemoveValidator(IPropertyValidator original) { if (ReferenceEquals(CurrentValidator, original)) { CurrentValidator = validators.LastOrDefault(); } validators.Remove(original); } /// <summary> /// Clear all validators from this rule. /// </summary> public void ClearValidators() { CurrentValidator = null; validators.Clear(); } /// <summary> /// Returns the property name for the property being validated. /// Returns null if it is not a property being validated (eg a method call) /// </summary> public string PropertyName { get; set; } /// <summary> /// Display name for the property. /// </summary> public string GetDisplayName() { if (DisplayName != null) { return DisplayName.GetString(); } return PropertyName.SplitPascalCase(); } /// <summary> /// Performs validation using a validation context and returns a collection of Validation Failures. /// </summary> /// <param name="context">Validation Context</param> /// <returns>A collection of validation failures</returns> public virtual IEnumerable<ValidationFailure> Validate(ValidationContext context) { EnsureValidPropertyName(); // Construct the full name of the property, taking into account overriden property names and the chain (if we're in a nested validator) string propertyName = BuildPropertyName(context); // Ensure that this rule is allowed to run. // The validatselector has the opportunity to veto this before any of the validators execute. if(! context.Selector.CanExecute(this, propertyName, context)) { yield break; } var cascade = cascadeModeThunk(); bool hasAnyFailure = false; // Invoke each validator and collect its results. foreach (var validator in validators) { var results = InvokePropertyValidator(context, validator, propertyName); bool hasFailure = false; foreach (var result in results) { hasAnyFailure = true; hasFailure = true; yield return result; } // If there has been at least one failure, and our CascadeMode has been set to StopOnFirst // then don't continue to the next rule if (cascade == FluentValidation.CascadeMode.StopOnFirstFailure && hasFailure) { break; } } if (hasAnyFailure) { // Callback if there has been at least one property validator failed. OnFailure(context.InstanceToValidate); } } /// <summary> /// Invokes a property validator using the specified validation context. /// </summary> protected virtual IEnumerable<ValidationFailure> InvokePropertyValidator(ValidationContext context, IPropertyValidator validator, string propertyName) { var propertyContext = new PropertyValidatorContext(context, this, propertyName); return validator.Validate(propertyContext); } private void EnsureValidPropertyName() { if (PropertyName == null && DisplayName == null) { throw new InvalidOperationException(string.Format("Property name could not be automatically determined for expression {0}. Please specify either a custom property name by calling 'WithName'.", Expression)); } } private string BuildPropertyName(ValidationContext context) { return context.PropertyChain.BuildPropertyName(PropertyName ?? DisplayName.GetString()); } public void ApplyCondition(Func<object, bool> predicate, ApplyConditionTo applyConditionTo = ApplyConditionTo.AllValidators) { // Default behaviour for When/Unless as of v1.3 is to apply the condition to all previous validators in the chain. if (applyConditionTo == ApplyConditionTo.AllValidators) { foreach (var validator in Validators.ToList()) { var wrappedValidator = new DelegatingValidator(predicate, validator); ReplaceValidator(validator, wrappedValidator); } } else { var wrappedValidator = new DelegatingValidator(predicate, CurrentValidator); ReplaceValidator(CurrentValidator, wrappedValidator); } } } }
using Umbraco.Core.Configuration; using System; using Umbraco.Core.Configuration.UmbracoSettings; using Umbraco.Core.IO; using Umbraco.Core.Logging; using Umbraco.Core.Persistence.Repositories; using Umbraco.Core.Persistence.SqlSyntax; using Umbraco.Core.Persistence.UnitOfWork; namespace Umbraco.Core.Persistence { /// <summary> /// Used to instantiate each repository type /// </summary> public class RepositoryFactory { private readonly ILogger _logger; private readonly ISqlSyntaxProvider _sqlSyntax; private readonly CacheHelper _cacheHelper; private readonly IUmbracoSettingsSection _settings; #region Ctors public RepositoryFactory(CacheHelper cacheHelper, ILogger logger, ISqlSyntaxProvider sqlSyntax, IUmbracoSettingsSection settings) { if (cacheHelper == null) throw new ArgumentNullException("cacheHelper"); if (logger == null) throw new ArgumentNullException("logger"); //if (sqlSyntax == null) throw new ArgumentNullException("sqlSyntax"); if (settings == null) throw new ArgumentNullException("settings"); _cacheHelper = cacheHelper; _logger = logger; _sqlSyntax = sqlSyntax; _settings = settings; } [Obsolete("Use the ctor specifying all dependencies instead")] public RepositoryFactory() : this(ApplicationContext.Current.ApplicationCache, LoggerResolver.Current.Logger, SqlSyntaxContext.SqlSyntaxProvider, UmbracoConfig.For.UmbracoSettings()) { } [Obsolete("Use the ctor specifying all dependencies instead")] public RepositoryFactory(CacheHelper cacheHelper) : this(cacheHelper, LoggerResolver.Current.Logger, SqlSyntaxContext.SqlSyntaxProvider, UmbracoConfig.For.UmbracoSettings()) { } [Obsolete("Use the ctor specifying all dependencies instead, NOTE: disableAllCache has zero effect")] public RepositoryFactory(bool disableAllCache, CacheHelper cacheHelper) : this(cacheHelper, LoggerResolver.Current.Logger, SqlSyntaxContext.SqlSyntaxProvider, UmbracoConfig.For.UmbracoSettings()) { if (cacheHelper == null) throw new ArgumentNullException("cacheHelper"); _cacheHelper = cacheHelper; } [Obsolete("Use the ctor specifying all dependencies instead")] public RepositoryFactory(bool disableAllCache) : this(disableAllCache ? CacheHelper.CreateDisabledCacheHelper() : ApplicationContext.Current.ApplicationCache, LoggerResolver.Current.Logger, SqlSyntaxContext.SqlSyntaxProvider, UmbracoConfig.For.UmbracoSettings()) { } #endregion public virtual IExternalLoginRepository CreateExternalLoginRepository(IDatabaseUnitOfWork uow) { return new ExternalLoginRepository(uow, _cacheHelper, _logger, _sqlSyntax); } public virtual IPublicAccessRepository CreatePublicAccessRepository(IDatabaseUnitOfWork uow) { return new PublicAccessRepository(uow, _cacheHelper, _logger, _sqlSyntax); } public virtual ITaskRepository CreateTaskRepository(IDatabaseUnitOfWork uow) { return new TaskRepository(uow, CacheHelper.CreateDisabledCacheHelper(), //never cache _logger, _sqlSyntax); } public virtual IAuditRepository CreateAuditRepository(IDatabaseUnitOfWork uow) { return new AuditRepository(uow, CacheHelper.CreateDisabledCacheHelper(), //never cache _logger, _sqlSyntax); } public virtual ITagRepository CreateTagRepository(IDatabaseUnitOfWork uow) { return new TagRepository( uow, _cacheHelper, _logger, _sqlSyntax); } public virtual IContentRepository CreateContentRepository(IDatabaseUnitOfWork uow) { return new ContentRepository( uow, _cacheHelper, _logger, _sqlSyntax, CreateContentTypeRepository(uow), CreateTemplateRepository(uow), CreateTagRepository(uow)) { EnsureUniqueNaming = _settings.Content.EnsureUniqueNaming }; } public virtual IContentTypeRepository CreateContentTypeRepository(IDatabaseUnitOfWork uow) { return new ContentTypeRepository( uow, _cacheHelper, _logger, _sqlSyntax, CreateTemplateRepository(uow)); } public virtual IDataTypeDefinitionRepository CreateDataTypeDefinitionRepository(IDatabaseUnitOfWork uow) { return new DataTypeDefinitionRepository( uow, _cacheHelper, _cacheHelper, _logger, _sqlSyntax, CreateContentTypeRepository(uow)); } public virtual IDictionaryRepository CreateDictionaryRepository(IDatabaseUnitOfWork uow) { return new DictionaryRepository( uow, _cacheHelper, _logger, _sqlSyntax, CreateLanguageRepository(uow)); } public virtual ILanguageRepository CreateLanguageRepository(IDatabaseUnitOfWork uow) { return new LanguageRepository( uow, _cacheHelper, _logger, _sqlSyntax); } public virtual IMediaRepository CreateMediaRepository(IDatabaseUnitOfWork uow) { return new MediaRepository( uow, _cacheHelper, _logger, _sqlSyntax, CreateMediaTypeRepository(uow), CreateTagRepository(uow)) { EnsureUniqueNaming = _settings.Content.EnsureUniqueNaming }; } public virtual IMediaTypeRepository CreateMediaTypeRepository(IDatabaseUnitOfWork uow) { return new MediaTypeRepository( uow, _cacheHelper, _logger, _sqlSyntax); } public virtual IRelationRepository CreateRelationRepository(IDatabaseUnitOfWork uow) { return new RelationRepository( uow, CacheHelper.CreateDisabledCacheHelper(), //never cache _logger, _sqlSyntax, CreateRelationTypeRepository(uow)); } public virtual IRelationTypeRepository CreateRelationTypeRepository(IDatabaseUnitOfWork uow) { return new RelationTypeRepository( uow, CacheHelper.CreateDisabledCacheHelper(), //never cache _logger, _sqlSyntax); } public virtual IScriptRepository CreateScriptRepository(IUnitOfWork uow) { return new ScriptRepository(uow, new PhysicalFileSystem(SystemDirectories.Scripts), _settings.Content); } internal virtual IPartialViewRepository CreatePartialViewRepository(IUnitOfWork uow) { return new PartialViewRepository(uow); } internal virtual IPartialViewRepository CreatePartialViewMacroRepository(IUnitOfWork uow) { return new PartialViewMacroRepository(uow); } public virtual IStylesheetRepository CreateStylesheetRepository(IUnitOfWork uow, IDatabaseUnitOfWork db) { return new StylesheetRepository(uow, new PhysicalFileSystem(SystemDirectories.Css)); } public virtual ITemplateRepository CreateTemplateRepository(IDatabaseUnitOfWork uow) { return new TemplateRepository(uow, _cacheHelper, _logger, _sqlSyntax, new PhysicalFileSystem(SystemDirectories.Masterpages), new PhysicalFileSystem(SystemDirectories.MvcViews), _settings.Templates); } internal virtual ServerRegistrationRepository CreateServerRegistrationRepository(IDatabaseUnitOfWork uow) { return new ServerRegistrationRepository( uow, CacheHelper.CreateDisabledCacheHelper(), //never cache _logger, _sqlSyntax); } public virtual IUserTypeRepository CreateUserTypeRepository(IDatabaseUnitOfWork uow) { return new UserTypeRepository( uow, //There's not many user types but we query on users all the time so the result needs to be cached _cacheHelper, _logger, _sqlSyntax); } public virtual IUserRepository CreateUserRepository(IDatabaseUnitOfWork uow) { return new UserRepository( uow, //Need to cache users - we look up user information more than anything in the back office! _cacheHelper, _logger, _sqlSyntax, CreateUserTypeRepository(uow)); } internal virtual IMacroRepository CreateMacroRepository(IDatabaseUnitOfWork uow) { return new MacroRepository(uow, _cacheHelper, _logger, _sqlSyntax); } public virtual IMemberRepository CreateMemberRepository(IDatabaseUnitOfWork uow) { return new MemberRepository( uow, _cacheHelper, _logger, _sqlSyntax, CreateMemberTypeRepository(uow), CreateMemberGroupRepository(uow), CreateTagRepository(uow)); } public virtual IMemberTypeRepository CreateMemberTypeRepository(IDatabaseUnitOfWork uow) { return new MemberTypeRepository(uow, _cacheHelper, _logger, _sqlSyntax); } public virtual IMemberGroupRepository CreateMemberGroupRepository(IDatabaseUnitOfWork uow) { return new MemberGroupRepository(uow, _cacheHelper, _logger, _sqlSyntax, _cacheHelper); } public virtual IEntityRepository CreateEntityRepository(IDatabaseUnitOfWork uow) { return new EntityRepository(uow); } public IDomainRepository CreateDomainRepository(IDatabaseUnitOfWork uow) { return new DomainRepository(uow, _cacheHelper, _logger, _sqlSyntax, CreateContentRepository(uow), CreateLanguageRepository(uow)); } public ITaskTypeRepository CreateTaskTypeRepository(IDatabaseUnitOfWork uow) { return new TaskTypeRepository(uow, CacheHelper.CreateDisabledCacheHelper(), //never cache _logger, _sqlSyntax); } } }
using J2N.Runtime.CompilerServices; using J2N.Threading.Atomic; using Lucene.Net.Diagnostics; using Lucene.Net.Index; using Lucene.Net.Index.Extensions; using Lucene.Net.Support; using Lucene.Net.Util; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.CompilerServices; using System.Threading; using AssertionError = Lucene.Net.Diagnostics.AssertionException; using Console = Lucene.Net.Util.SystemConsole; using JCG = J2N.Collections.Generic; #if FEATURE_SERIALIZABLE_EXCEPTIONS using System.Runtime.Serialization; #endif namespace Lucene.Net.Store { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// Enum for controlling hard disk throttling. /// Set via <see cref="MockDirectoryWrapper.Throttling"/> /// <para/> /// WARNING: can make tests very slow. /// </summary> public enum Throttling { /// <summary> /// always emulate a slow hard disk. could be very slow! </summary> ALWAYS, /// <summary> /// sometimes (2% of the time) emulate a slow hard disk. </summary> SOMETIMES, /// <summary> /// never throttle output </summary> NEVER } /// <summary> /// This is a Directory Wrapper that adds methods /// intended to be used only by unit tests. /// It also adds a number of features useful for testing: /// <list type="bullet"> /// <item> /// <description> /// Instances created by <see cref="LuceneTestCase.NewDirectory()"/> are tracked /// to ensure they are disposed by the test. /// </description> /// </item> /// <item> /// <description> /// When a <see cref="MockDirectoryWrapper"/> is disposed, it will throw an exception if /// it has any open files against it (with a stacktrace indicating where /// they were opened from). /// </description> /// </item> /// <item> /// <description> /// When a <see cref="MockDirectoryWrapper"/> is disposed, it runs <see cref="Index.CheckIndex"/> to test if /// the index was corrupted. /// </description> /// </item> /// <item> /// <description> /// <see cref="MockDirectoryWrapper"/> simulates some "features" of Windows, such as /// refusing to write/delete to open files. /// </description> /// </item> /// </list> /// </summary> public class MockDirectoryWrapper : BaseDirectoryWrapper { internal long maxSize; // Max actual bytes used. this is set by MockRAMOutputStream: internal long maxUsedSize; internal double randomIOExceptionRate; internal double randomIOExceptionRateOnOpen; internal Random randomState; internal bool noDeleteOpenFile = true; internal bool assertNoDeleteOpenFile = false; internal bool preventDoubleWrite = true; internal bool trackDiskUsage = false; internal bool wrapLockFactory = true; internal bool allowRandomFileNotFoundException = true; internal bool allowReadingFilesStillOpenForWrite = false; private ISet<string> unSyncedFiles; private ISet<string> createdFiles; private ISet<string> openFilesForWrite = new JCG.HashSet<string>(StringComparer.Ordinal); internal ISet<string> openLocks = new ConcurrentHashSet<string>(StringComparer.Ordinal); internal volatile bool crashed; private ThrottledIndexOutput throttledOutput; private Throttling throttling = Throttling.SOMETIMES; protected LockFactory m_lockFactory; internal readonly AtomicInt32 inputCloneCount = new AtomicInt32(); // use this for tracking files for crash. // additionally: provides debugging information in case you leave one open private readonly ConcurrentDictionary<IDisposable, Exception> openFileHandles = new ConcurrentDictionary<IDisposable, Exception>(IdentityEqualityComparer<IDisposable>.Default); // NOTE: we cannot initialize the Map here due to the // order in which our constructor actually does this // member initialization vs when it calls super. It seems // like super is called, then our members are initialized: private IDictionary<string, int> openFiles; // Only tracked if noDeleteOpenFile is true: if an attempt // is made to delete an open file, we enroll it here. private ISet<string> openFilesDeleted; private void Init() { lock (this) { if (openFiles == null) { openFiles = new Dictionary<string, int>(StringComparer.Ordinal); openFilesDeleted = new JCG.HashSet<string>(StringComparer.Ordinal); } if (createdFiles == null) { createdFiles = new JCG.HashSet<string>(StringComparer.Ordinal); } if (unSyncedFiles == null) { unSyncedFiles = new JCG.HashSet<string>(StringComparer.Ordinal); } } } public MockDirectoryWrapper(Random random, Directory @delegate) : base(@delegate) { // must make a private random since our methods are // called from different threads; else test failures may // not be reproducible from the original seed this.randomState = new Random(random.Next()); this.throttledOutput = new ThrottledIndexOutput(ThrottledIndexOutput.MBitsToBytes(40 + randomState.Next(10)), 5 + randomState.Next(5), null); // force wrapping of lockfactory this.m_lockFactory = new MockLockFactoryWrapper(this, @delegate.LockFactory); Init(); } public virtual int InputCloneCount => inputCloneCount; public virtual bool TrackDiskUsage { get => trackDiskUsage; // LUCENENET specific - added getter (to follow MSDN property guidelines) set => trackDiskUsage = value; } /// <summary> /// If set to true, we throw an <see cref="IOException"/> if the same /// file is opened by <see cref="CreateOutput(string, IOContext)"/>, ever. /// </summary> public virtual bool PreventDoubleWrite { get => preventDoubleWrite; // LUCENENET specific - added getter (to follow MSDN property guidelines) set => preventDoubleWrite = value; } /// <summary> /// If set to true (the default), when we throw random /// <see cref="IOException"/> on <see cref="OpenInput(string, IOContext)"/> or /// <see cref="CreateOutput(string, IOContext)"/>, we may /// sometimes throw <see cref="FileNotFoundException"/>. /// </summary> public virtual bool AllowRandomFileNotFoundException { get => allowRandomFileNotFoundException; // LUCENENET specific - added getter (to follow MSDN property guidelines) set => allowRandomFileNotFoundException = value; } /// <summary> /// If set to true, you can open an inputstream on a file /// that is still open for writes. /// </summary> public virtual bool AllowReadingFilesStillOpenForWrite { get => allowRandomFileNotFoundException; // LUCENENET specific - added getter (to follow MSDN property guidelines) set => allowReadingFilesStillOpenForWrite = value; } // LUCENENET specific - de-nested Throttling enum public virtual Throttling Throttling { get => throttling; // LUCENENET specific - added getter (to follow MSDN property guidelines) set => throttling = value; } /// <summary> /// Returns true if <see cref="FilterDirectory.m_input"/> must sync its files. /// Currently, only <see cref="NRTCachingDirectory"/> requires sync'ing its files /// because otherwise they are cached in an internal <see cref="RAMDirectory"/>. If /// other directories require that too, they should be added to this method. /// </summary> private bool MustSync() { Directory @delegate = m_input; while (@delegate is FilterDirectory) { @delegate = ((FilterDirectory)@delegate).Delegate; } return @delegate is NRTCachingDirectory; } [MethodImpl(MethodImplOptions.NoInlining)] public override void Sync(ICollection<string> names) { lock (this) { MaybeYield(); MaybeThrowDeterministicException(); if (crashed) { throw new IOException("cannot sync after crash"); } // don't wear out our hardware so much in tests. if (LuceneTestCase.Rarely(randomState) || MustSync()) { foreach (string name in names) { // randomly fail with IOE on any file MaybeThrowIOException(name); m_input.Sync(new[] { name }); unSyncedFiles.Remove(name); } } else { unSyncedFiles.RemoveAll(names); } } } public long GetSizeInBytes() { lock (this) { if (m_input is RAMDirectory) { return ((RAMDirectory)m_input).GetSizeInBytes(); } else { // hack long size = 0; foreach (string file in m_input.ListAll()) { size += m_input.FileLength(file); } return size; } } } /// <summary> /// Simulates a crash of OS or machine by overwriting /// unsynced files. /// </summary> public virtual void Crash() { lock (this) { crashed = true; openFiles = new Dictionary<string, int>(StringComparer.Ordinal); openFilesForWrite = new JCG.HashSet<string>(StringComparer.Ordinal); openFilesDeleted = new JCG.HashSet<string>(StringComparer.Ordinal); using (IEnumerator<string> it = unSyncedFiles.GetEnumerator()) { unSyncedFiles = new JCG.HashSet<string>(StringComparer.Ordinal); // first force-close all files, so we can corrupt on windows etc. // clone the file map, as these guys want to remove themselves on close. var m = new JCG.Dictionary<IDisposable, Exception>(openFileHandles, IdentityEqualityComparer<IDisposable>.Default); foreach (IDisposable f in m.Keys) { try { f.Dispose(); } #pragma warning disable 168 catch (Exception ignored) #pragma warning restore 168 { //Debug.WriteLine("Crash(): f.Dispose() FAILED for {0}:\n{1}", f.ToString(), ignored.ToString()); } } while (it.MoveNext()) { string name = it.Current; int damage = randomState.Next(5); string action = null; if (damage == 0) { action = "deleted"; DeleteFile(name, true); } else if (damage == 1) { action = "zeroed"; // Zero out file entirely long length = FileLength(name); var zeroes = new byte[256]; long upto = 0; using (IndexOutput @out = m_input.CreateOutput(name, LuceneTestCase.NewIOContext(randomState))) { while (upto < length) { var limit = (int)Math.Min(length - upto, zeroes.Length); @out.WriteBytes(zeroes, 0, limit); upto += limit; } } } else if (damage == 2) { action = "partially truncated"; // Partially Truncate the file: // First, make temp file and copy only half this // file over: string tempFileName; while (true) { tempFileName = "" + randomState.Next(); if (!LuceneTestCase.SlowFileExists(m_input, tempFileName)) { break; } } using (IndexOutput tempOut = m_input.CreateOutput(tempFileName, LuceneTestCase.NewIOContext(randomState))) { using (IndexInput ii = m_input.OpenInput(name, LuceneTestCase.NewIOContext(randomState))) { tempOut.CopyBytes(ii, ii.Length / 2); } } // Delete original and copy bytes back: DeleteFile(name, true); using (IndexOutput @out = m_input.CreateOutput(name, LuceneTestCase.NewIOContext(randomState))) { using (IndexInput ii = m_input.OpenInput(tempFileName, LuceneTestCase.NewIOContext(randomState))) { @out.CopyBytes(ii, ii.Length); } } DeleteFile(tempFileName, true); } else if (damage == 3) { // The file survived intact: action = "didn't change"; } else { action = "fully truncated"; // Totally truncate the file to zero bytes DeleteFile(name, true); using (IndexOutput @out = m_input.CreateOutput(name, LuceneTestCase.NewIOContext(randomState))) { @out.Length = 0; } } if (LuceneTestCase.Verbose) { Console.WriteLine("MockDirectoryWrapper: " + action + " unsynced file: " + name); } } } } } public virtual void ClearCrash() { lock (this) { crashed = false; openLocks.Clear(); } } public virtual long MaxSizeInBytes { get => maxSize; set => maxSize = value; } /// <summary> /// Returns the peek actual storage used (bytes) in this /// directory. /// </summary> public virtual long MaxUsedSizeInBytes => maxUsedSize; public virtual void ResetMaxUsedSizeInBytes() { this.maxUsedSize = GetRecomputedActualSizeInBytes(); } /// <summary> /// Emulate Windows whereby deleting an open file is not /// allowed (raise <see cref="IOException"/>). /// </summary> public virtual bool NoDeleteOpenFile { get => noDeleteOpenFile; set => noDeleteOpenFile = value; } /// <summary> /// Trip a test assert if there is an attempt /// to delete an open file. /// </summary> public virtual bool AssertNoDeleteOpenFile { get => assertNoDeleteOpenFile; set => assertNoDeleteOpenFile = value; } /// <summary> /// If 0.0, no exceptions will be thrown. Else this should /// be a double 0.0 - 1.0. We will randomly throw an /// <see cref="IOException"/> on the first write to a <see cref="Stream"/> based /// on this probability. /// </summary> public virtual double RandomIOExceptionRate { get => randomIOExceptionRate; set => randomIOExceptionRate = value; } /// <summary> /// If 0.0, no exceptions will be thrown during <see cref="OpenInput(string, IOContext)"/> /// and <see cref="CreateOutput(string, IOContext)"/>. Else this should /// be a double 0.0 - 1.0 and we will randomly throw an /// <see cref="IOException"/> in <see cref="OpenInput(string, IOContext)"/> and <see cref="CreateOutput(string, IOContext)"/> with /// this probability. /// </summary> public virtual double RandomIOExceptionRateOnOpen { get => randomIOExceptionRateOnOpen; set => randomIOExceptionRateOnOpen = value; } internal virtual void MaybeThrowIOException(string message) { if (randomState.NextDouble() < randomIOExceptionRate) { if (LuceneTestCase.Verbose) { Console.WriteLine(Thread.CurrentThread.Name + ": MockDirectoryWrapper: now throw random exception" + (message == null ? "" : " (" + message + ")")); } throw new IOException("a random IOException" + (message == null ? "" : " (" + message + ")")); } } internal virtual void MaybeThrowIOExceptionOnOpen(string name) { if (randomState.NextDouble() < randomIOExceptionRateOnOpen) { if (LuceneTestCase.Verbose) { Console.WriteLine(Thread.CurrentThread.Name + ": MockDirectoryWrapper: now throw random exception during open file=" + name); } if (allowRandomFileNotFoundException == false || randomState.NextBoolean()) { throw new IOException("a random IOException (" + name + ")"); } else { throw randomState.NextBoolean() ? (IOException)new FileNotFoundException("a random IOException (" + name + ")") : new DirectoryNotFoundException("a random IOException (" + name + ")"); } } } [MethodImpl(MethodImplOptions.NoInlining)] public override void DeleteFile(string name) { lock (this) { MaybeYield(); DeleteFile(name, false); } } // if there are any exceptions in OpenFileHandles // capture those as inner exceptions private Exception WithAdditionalErrorInformation(Exception t, string name, bool input) { lock (this) { foreach (var ent in openFileHandles) { if (input && ent.Key is MockIndexInputWrapper && ((MockIndexInputWrapper)ent.Key).name.Equals(name, StringComparison.Ordinal)) { t = CreateException(t, ent.Value); break; } else if (!input && ent.Key is MockIndexOutputWrapper && ((MockIndexOutputWrapper)ent.Key).name.Equals(name, StringComparison.Ordinal)) { t = CreateException(t, ent.Value); break; } } return t; } } private Exception CreateException(Exception exception, Exception innerException) { return (Exception)Activator.CreateInstance(exception.GetType(), exception.Message, innerException); } private void MaybeYield() { if (randomState.NextBoolean()) { #if !FEATURE_THREAD_YIELD Thread.Sleep(0); #else Thread.Yield(); #endif } } [MethodImpl(MethodImplOptions.NoInlining)] private void DeleteFile(string name, bool forced) { lock (this) { MaybeYield(); MaybeThrowDeterministicException(); if (crashed && !forced) { throw new IOException("cannot delete after crash"); } if (unSyncedFiles.Contains(name)) { unSyncedFiles.Remove(name); } if (!forced && (noDeleteOpenFile || assertNoDeleteOpenFile)) { if (openFiles.ContainsKey(name)) { openFilesDeleted.Add(name); if (!assertNoDeleteOpenFile) { throw WithAdditionalErrorInformation(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open: cannot delete"), name, true); } else { throw WithAdditionalErrorInformation(new AssertionError("MockDirectoryWrapper: file \"" + name + "\" is still open: cannot delete"), name, true); } } else { openFilesDeleted.Remove(name); } } m_input.DeleteFile(name); } } public virtual ICollection<string> GetOpenDeletedFiles() { lock (this) { return new JCG.HashSet<string>(openFilesDeleted, StringComparer.Ordinal); } } private bool failOnCreateOutput = true; public virtual bool FailOnCreateOutput { get => failOnCreateOutput; // LUCENENET specific - added getter (to follow MSDN property guidelines) set => failOnCreateOutput = value; } public override IndexOutput CreateOutput(string name, IOContext context) { lock (this) { MaybeThrowDeterministicException(); MaybeThrowIOExceptionOnOpen(name); MaybeYield(); if (failOnCreateOutput) { MaybeThrowDeterministicException(); } if (crashed) { throw new IOException("cannot createOutput after crash"); } Init(); lock (this) { if (preventDoubleWrite && createdFiles.Contains(name) && !name.Equals("segments.gen", StringComparison.Ordinal)) { throw new IOException("file \"" + name + "\" was already written to"); } } if ((noDeleteOpenFile || assertNoDeleteOpenFile) && openFiles.ContainsKey(name)) { if (!assertNoDeleteOpenFile) { throw new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open: cannot overwrite"); } else { throw new AssertionError("MockDirectoryWrapper: file \"" + name + "\" is still open: cannot overwrite"); } } if (crashed) { throw new IOException("cannot createOutput after crash"); } unSyncedFiles.Add(name); createdFiles.Add(name); if (m_input is RAMDirectory) { RAMDirectory ramdir = (RAMDirectory)m_input; RAMFile file = new RAMFile(ramdir); ramdir.m_fileMap.TryGetValue(name, out RAMFile existing); // Enforce write once: if (existing != null && !name.Equals("segments.gen", StringComparison.Ordinal) && preventDoubleWrite) { throw new IOException("file " + name + " already exists"); } else { if (existing != null) { ramdir.m_sizeInBytes.AddAndGet(-existing.GetSizeInBytes()); // LUCENENET: GetAndAdd in Lucene, but we are not using the value existing.directory = null; } ramdir.m_fileMap[name] = file; } } //System.out.println(Thread.currentThread().getName() + ": MDW: create " + name); IndexOutput delegateOutput = m_input.CreateOutput(name, LuceneTestCase.NewIOContext(randomState, context)); if (randomState.Next(10) == 0) { // once in a while wrap the IO in a Buffered IO with random buffer sizes delegateOutput = new BufferedIndexOutputWrapper(1 + randomState.Next(BufferedIndexOutput.DEFAULT_BUFFER_SIZE), delegateOutput); } IndexOutput io = new MockIndexOutputWrapper(this, delegateOutput, name); AddFileHandle(io, name, Handle.Output); openFilesForWrite.Add(name); // throttling REALLY slows down tests, so don't do it very often for SOMETIMES. if (throttling == Throttling.ALWAYS || (throttling == Throttling.SOMETIMES && randomState.Next(50) == 0) && !(m_input is RateLimitedDirectoryWrapper)) { if (LuceneTestCase.Verbose) { Console.WriteLine("MockDirectoryWrapper: throttling indexOutput (" + name + ")"); } return throttledOutput.NewFromDelegate(io); } else { return io; } } } internal enum Handle { Input, Output, Slice } internal void AddFileHandle(IDisposable c, string name, Handle handle) { //Trace.TraceInformation("Add {0} {1}", c, name); lock (this) { if (openFiles.TryGetValue(name, out int v)) { v++; //Debug.WriteLine("Add {0} - {1} - {2}", c, name, v); openFiles[name] = v; } else { //Debug.WriteLine("Add {0} - {1} - {2}", c, name, 1); openFiles[name] = 1; } openFileHandles[c] = new Exception("unclosed Index" + handle.ToString() + ": " + name); } } private bool failOnOpenInput = true; public virtual bool FailOnOpenInput { get => failOnOpenInput; // LUCENENET specific - added getter (to follow MSDN property guidelines) set => failOnOpenInput = value; } public override IndexInput OpenInput(string name, IOContext context) { lock (this) { MaybeThrowDeterministicException(); MaybeThrowIOExceptionOnOpen(name); MaybeYield(); if (failOnOpenInput) { MaybeThrowDeterministicException(); } if (!LuceneTestCase.SlowFileExists(m_input, name)) { throw randomState.NextBoolean() ? (IOException)new FileNotFoundException(name + " in dir=" + m_input) : new DirectoryNotFoundException(name + " in dir=" + m_input); } // cannot open a file for input if it's still open for // output, except for segments.gen and segments_N if (!allowReadingFilesStillOpenForWrite && openFilesForWrite.Contains(name) && !name.StartsWith("segments", StringComparison.Ordinal)) { throw WithAdditionalErrorInformation(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open for writing"), name, false); } IndexInput delegateInput = m_input.OpenInput(name, LuceneTestCase.NewIOContext(randomState, context)); IndexInput ii; int randomInt = randomState.Next(500); if (randomInt == 0) { if (LuceneTestCase.Verbose) { Console.WriteLine("MockDirectoryWrapper: using SlowClosingMockIndexInputWrapper for file " + name); } ii = new SlowClosingMockIndexInputWrapper(this, name, delegateInput); } else if (randomInt == 1) { if (LuceneTestCase.Verbose) { Console.WriteLine("MockDirectoryWrapper: using SlowOpeningMockIndexInputWrapper for file " + name); } ii = new SlowOpeningMockIndexInputWrapper(this, name, delegateInput); } else { ii = new MockIndexInputWrapper(this, name, delegateInput); } AddFileHandle(ii, name, Handle.Input); return ii; } } /// <summary> /// Provided for testing purposes. Use <see cref="GetSizeInBytes()"/> instead. </summary> public long GetRecomputedSizeInBytes() { lock (this) { if (!(m_input is RAMDirectory)) { return GetSizeInBytes(); } long size = 0; foreach (RAMFile file in ((RAMDirectory)m_input).m_fileMap.Values) { size += file.GetSizeInBytes(); } return size; } } /// <summary> /// Like <see cref="GetRecomputedSizeInBytes()"/>, but, uses actual file /// lengths rather than buffer allocations (which are /// quantized up to nearest /// <see cref="RAMOutputStream.BUFFER_SIZE"/> (now 1024) bytes. /// </summary> public long GetRecomputedActualSizeInBytes() { lock (this) { if (!(m_input is RAMDirectory)) { return GetSizeInBytes(); } long size = 0; foreach (RAMFile file in ((RAMDirectory)m_input).m_fileMap.Values) { size += file.Length; } return size; } } // NOTE: this is off by default; see LUCENE-5574 private bool assertNoUnreferencedFilesOnClose; public virtual bool AssertNoUnreferencedFilesOnClose // LUCENENET TODO: Rename AssertNoUnreferencedFilesOnDispose ? { get => assertNoUnreferencedFilesOnClose; // LUCENENET specific - added getter (to follow MSDN property guidelines) set => assertNoUnreferencedFilesOnClose = value; } /// <summary> /// Set to false if you want to return the pure lockfactory /// and not wrap it with <see cref="MockLockFactoryWrapper"/>. /// <para/> /// Be careful if you turn this off: <see cref="MockDirectoryWrapper"/> might /// no longer be able to detect if you forget to close an <see cref="IndexWriter"/>, /// and spit out horribly scary confusing exceptions instead of /// simply telling you that. /// </summary> public virtual bool WrapLockFactory { get => wrapLockFactory; // LUCENENET specific - added getter (to follow MSDN property guidelines) set => wrapLockFactory = value; } protected override void Dispose(bool disposing) { lock (this) { if (disposing) { // files that we tried to delete, but couldn't because readers were open. // all that matters is that we tried! (they will eventually go away) ISet<string> pendingDeletions = new JCG.HashSet<string>(openFilesDeleted, StringComparer.Ordinal); MaybeYield(); if (openFiles == null) { openFiles = new Dictionary<string, int>(StringComparer.Ordinal); openFilesDeleted = new JCG.HashSet<string>(StringComparer.Ordinal); } if (openFiles.Count > 0) { // print the first one as its very verbose otherwise Exception cause = openFileHandles.Values.FirstOrDefault(); // RuntimeException instead ofIOException because // super() does not throw IOException currently: throw new Exception("MockDirectoryWrapper: cannot close: there are still open files: " + Collections.ToString(openFiles), cause); } if (openLocks.Count > 0) { throw new Exception("MockDirectoryWrapper: cannot close: there are still open locks: " + Collections.ToString(openLocks)); } IsOpen = false; if (CheckIndexOnDispose) { randomIOExceptionRate = 0.0; randomIOExceptionRateOnOpen = 0.0; if (DirectoryReader.IndexExists(this)) { if (LuceneTestCase.Verbose) { Console.WriteLine("\nNOTE: MockDirectoryWrapper: now crush"); } Crash(); // corrupt any unsynced-files if (LuceneTestCase.Verbose) { Console.WriteLine("\nNOTE: MockDirectoryWrapper: now run CheckIndex"); } TestUtil.CheckIndex(this, CrossCheckTermVectorsOnDispose); // TODO: factor this out / share w/ TestIW.assertNoUnreferencedFiles if (assertNoUnreferencedFilesOnClose) { // now look for unreferenced files: discount ones that we tried to delete but could not ISet<string> allFiles = new JCG.HashSet<string>(ListAll()); allFiles.ExceptWith(pendingDeletions); string[] startFiles = allFiles.ToArray(/*new string[0]*/); IndexWriterConfig iwc = new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, null); iwc.SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); new IndexWriter(m_input, iwc).Rollback(); string[] endFiles = m_input.ListAll(); ISet<string> startSet = new JCG.SortedSet<string>(startFiles, StringComparer.Ordinal); ISet<string> endSet = new JCG.SortedSet<string>(endFiles, StringComparer.Ordinal); if (pendingDeletions.Contains("segments.gen") && endSet.Contains("segments.gen")) { // this is possible if we hit an exception while writing segments.gen, we try to delete it // and it ends out in pendingDeletions (but IFD wont remove this). startSet.Add("segments.gen"); if (LuceneTestCase.Verbose) { Console.WriteLine("MDW: Unreferenced check: Ignoring segments.gen that we could not delete."); } } // its possible we cannot delete the segments_N on windows if someone has it open and // maybe other files too, depending on timing. normally someone on windows wouldnt have // an issue (IFD would nuke this stuff eventually), but we pass NoDeletionPolicy... foreach (string file in pendingDeletions) { if (file.StartsWith("segments", StringComparison.Ordinal) && !file.Equals("segments.gen", StringComparison.Ordinal) && endSet.Contains(file)) { startSet.Add(file); if (LuceneTestCase.Verbose) { Console.WriteLine("MDW: Unreferenced check: Ignoring segments file: " + file + " that we could not delete."); } SegmentInfos sis = new SegmentInfos(); try { sis.Read(m_input, file); } #pragma warning disable 168 catch (IOException ioe) #pragma warning restore 168 { // OK: likely some of the .si files were deleted } try { ISet<string> ghosts = new JCG.HashSet<string>(sis.GetFiles(m_input, false)); foreach (string s in ghosts) { if (endSet.Contains(s) && !startSet.Contains(s)) { if (Debugging.AssertsEnabled) Debugging.Assert(pendingDeletions.Contains(s)); if (LuceneTestCase.Verbose) { Console.WriteLine("MDW: Unreferenced check: Ignoring referenced file: " + s + " " + "from " + file + " that we could not delete."); } startSet.Add(s); } } } catch (Exception t) { Console.Error.WriteLine("ERROR processing leftover segments file " + file + ":"); Console.WriteLine(t.ToString()); } } } startFiles = startSet.ToArray(/*new string[0]*/); endFiles = endSet.ToArray(/*new string[0]*/); if (!Arrays.Equals(startFiles, endFiles)) { IList<string> removed = new List<string>(); foreach (string fileName in startFiles) { if (!endSet.Contains(fileName)) { removed.Add(fileName); } } IList<string> added = new List<string>(); foreach (string fileName in endFiles) { if (!startSet.Contains(fileName)) { added.Add(fileName); } } string extras; if (removed.Count != 0) { extras = "\n\nThese files were removed: " + Collections.ToString(removed); } else { extras = ""; } if (added.Count != 0) { extras += "\n\nThese files were added (waaaaaaaaaat!): " + Collections.ToString(added); } if (pendingDeletions.Count != 0) { extras += "\n\nThese files we had previously tried to delete, but couldn't: " + pendingDeletions; } if (Debugging.AssertsEnabled) Debugging.Assert(false, () => "unreferenced files: before delete:\n " + Arrays.ToString(startFiles) + "\n after delete:\n " + Arrays.ToString(endFiles) + extras); } DirectoryReader ir1 = DirectoryReader.Open(this); int numDocs1 = ir1.NumDocs; ir1.Dispose(); (new IndexWriter(this, new IndexWriterConfig(LuceneTestCase.TEST_VERSION_CURRENT, null))).Dispose(); DirectoryReader ir2 = DirectoryReader.Open(this); int numDocs2 = ir2.NumDocs; ir2.Dispose(); if (Debugging.AssertsEnabled) Debugging.Assert(numDocs1 == numDocs2, () => "numDocs changed after opening/closing IW: before=" + numDocs1 + " after=" + numDocs2); } } } m_input.Dispose(); // LUCENENET TODO: using blocks in this entire class } } } internal virtual void RemoveOpenFile(IDisposable c, string name) { //Trace.TraceInformation("Rem {0} {1}", c, name); lock (this) { if (openFiles.TryGetValue(name, out int v)) { if (v == 1) { //Debug.WriteLine("RemoveOpenFile OpenFiles.Remove {0} - {1}", c, name); openFiles.Remove(name); } else { v--; openFiles[name] = v; //Debug.WriteLine("RemoveOpenFile OpenFiles DECREMENT {0} - {1} - {2}", c, name, v); } } openFileHandles.TryRemove(c, out Exception _); } } public virtual void RemoveIndexOutput(IndexOutput @out, string name) { lock (this) { openFilesForWrite.Remove(name); RemoveOpenFile(@out, name); } } public virtual void RemoveIndexInput(IndexInput @in, string name) { lock (this) { RemoveOpenFile(@in, name); } } // LUCENENET specific - de-nested Failure internal List<Failure> failures; /// <summary> /// Add a <see cref="Failure"/> object to the list of objects to be evaluated /// at every potential failure point. /// </summary> public virtual void FailOn(Failure fail) { lock (this) { if (failures == null) { failures = new List<Failure>(); } failures.Add(fail); } } /// <summary> /// Iterate through the failures list, giving each object a /// chance to throw an <see cref="IOException"/>. /// </summary> internal virtual void MaybeThrowDeterministicException() { lock (this) { if (failures != null) { for (int i = 0; i < failures.Count; i++) { failures[i].Eval(this); } } } } public override string[] ListAll() { lock (this) { MaybeYield(); return m_input.ListAll(); } } [Obsolete("this method will be removed in 5.0")] public override bool FileExists(string name) { lock (this) { MaybeYield(); return m_input.FileExists(name); } } public override long FileLength(string name) { lock (this) { MaybeYield(); return m_input.FileLength(name); } } public override Lock MakeLock(string name) { lock (this) { MaybeYield(); return LockFactory.MakeLock(name); } } public override void ClearLock(string name) { lock (this) { MaybeYield(); LockFactory.ClearLock(name); } } public override void SetLockFactory(LockFactory lockFactory) { lock (this) { MaybeYield(); // sneaky: we must pass the original this way to the dir, because // some impls (e.g. FSDir) do instanceof here. m_input.SetLockFactory(lockFactory); // now set our wrapped factory here this.m_lockFactory = new MockLockFactoryWrapper(this, lockFactory); } } public override LockFactory LockFactory { get { lock (this) { MaybeYield(); if (wrapLockFactory) { return m_lockFactory; } else { return m_input.LockFactory; } } } } public override string GetLockID() { lock (this) { MaybeYield(); return m_input.GetLockID(); } } public override void Copy(Directory to, string src, string dest, IOContext context) { lock (this) { MaybeYield(); // randomize the IOContext here? m_input.Copy(to, src, dest, context); } } public override IndexInputSlicer CreateSlicer(string name, IOContext context) { MaybeYield(); if (!LuceneTestCase.SlowFileExists(m_input, name)) { throw randomState.NextBoolean() ? (IOException)new FileNotFoundException(name) : new DirectoryNotFoundException(name); } // cannot open a file for input if it's still open for // output, except for segments.gen and segments_N if (openFilesForWrite.Contains(name) && !name.StartsWith("segments", StringComparison.Ordinal)) { throw WithAdditionalErrorInformation(new IOException("MockDirectoryWrapper: file \"" + name + "\" is still open for writing"), name, false); } IndexInputSlicer delegateHandle = m_input.CreateSlicer(name, context); IndexInputSlicer handle = new IndexInputSlicerAnonymousInnerClassHelper(this, name, delegateHandle); AddFileHandle(handle, name, Handle.Slice); return handle; } private class IndexInputSlicerAnonymousInnerClassHelper : IndexInputSlicer { private readonly MockDirectoryWrapper outerInstance; private string name; private IndexInputSlicer delegateHandle; public IndexInputSlicerAnonymousInnerClassHelper(MockDirectoryWrapper outerInstance, string name, IndexInputSlicer delegateHandle) { this.outerInstance = outerInstance; this.name = name; this.delegateHandle = delegateHandle; } private int disposed = 0; protected override void Dispose(bool disposing) { if (0 == Interlocked.CompareExchange(ref this.disposed, 1, 0)) { if (disposing) { delegateHandle.Dispose(); outerInstance.RemoveOpenFile(this, name); } } } public override IndexInput OpenSlice(string sliceDescription, long offset, long length) { outerInstance.MaybeYield(); IndexInput ii = new MockIndexInputWrapper(outerInstance, name, delegateHandle.OpenSlice(sliceDescription, offset, length)); outerInstance.AddFileHandle(ii, name, Handle.Input); return ii; } [Obsolete("Only for reading CFS files from 3.x indexes.")] public override IndexInput OpenFullSlice() { outerInstance.MaybeYield(); IndexInput ii = new MockIndexInputWrapper(outerInstance, name, delegateHandle.OpenFullSlice()); outerInstance.AddFileHandle(ii, name, Handle.Input); return ii; } } internal sealed class BufferedIndexOutputWrapper : BufferedIndexOutput { private readonly IndexOutput io; public BufferedIndexOutputWrapper(int bufferSize, IndexOutput io) : base(bufferSize) { this.io = io; } public override long Length => io.Length; protected internal override void FlushBuffer(byte[] b, int offset, int len) { io.WriteBytes(b, offset, len); } [Obsolete("(4.1) this method will be removed in Lucene 5.0")] public override void Seek(long pos) { Flush(); io.Seek(pos); } public override void Flush() { try { base.Flush(); } finally { io.Flush(); } } protected override void Dispose(bool disposing) { if (disposing) { try { base.Dispose(disposing); } finally { io.Dispose(); } } } } // LUCENENET specific - de-nested FakeIOException } /// <summary> /// Objects that represent fail-able conditions. Objects of a derived /// class are created and registered with the mock directory. After /// register, each object will be invoked once for each first write /// of a file, giving the object a chance to throw an <see cref="IOException"/>. /// </summary> public class Failure { /// <summary> /// Eval is called on the first write of every new file. /// </summary> public virtual void Eval(MockDirectoryWrapper dir) { } /// <summary> /// Reset should set the state of the failure to its default /// (freshly constructed) state. Reset is convenient for tests /// that want to create one failure object and then reuse it in /// multiple cases. This, combined with the fact that <see cref="Failure"/> /// subclasses are often anonymous classes makes reset difficult to /// do otherwise. /// <para/> /// A typical example of use is /// <code> /// Failure failure = new Failure() { ... }; /// ... /// mock.FailOn(failure.Reset()) /// </code> /// </summary> public virtual Failure Reset() { return this; } protected internal bool m_doFail; public virtual void SetDoFail() { m_doFail = true; } public virtual void ClearDoFail() { m_doFail = false; } } /// <summary> /// Use this when throwing fake <see cref="IOException"/>, /// e.g. from <see cref="Failure"/>. /// </summary> // LUCENENET: It is no longer good practice to use binary serialization. // See: https://github.com/dotnet/corefx/issues/23584#issuecomment-325724568 #if FEATURE_SERIALIZABLE_EXCEPTIONS [Serializable] #endif public class FakeIOException : IOException { public FakeIOException() { } // LUCENENET specific - added public constructor for serialization #if FEATURE_SERIALIZABLE_EXCEPTIONS /// <summary> /// Initializes a new instance of this class with serialized data. /// </summary> /// <param name="info">The <see cref="SerializationInfo"/> that holds the serialized object data about the exception being thrown.</param> /// <param name="context">The <see cref="StreamingContext"/> that contains contextual information about the source or destination.</param> protected FakeIOException(SerializationInfo info, StreamingContext context) : base(info, context) { } #endif } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /****************************************************************************** * This file is auto-generated from a template file by the GenerateTests.csx * * script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make * * changes, please update the corresponding template and run according to the * * directions listed in the file. * ******************************************************************************/ using System; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; using System.Runtime.Intrinsics; using System.Runtime.Intrinsics.X86; namespace JIT.HardwareIntrinsics.X86 { public static partial class Program { private static void CompareEqualDouble() { var test = new SimpleBinaryOpTest__CompareEqualDouble(); if (test.IsSupported) { // Validates basic functionality works, using Unsafe.Read test.RunBasicScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates basic functionality works, using Load test.RunBasicScenario_Load(); // Validates basic functionality works, using LoadAligned test.RunBasicScenario_LoadAligned(); } // Validates calling via reflection works, using Unsafe.Read test.RunReflectionScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates calling via reflection works, using Load test.RunReflectionScenario_Load(); // Validates calling via reflection works, using LoadAligned test.RunReflectionScenario_LoadAligned(); } // Validates passing a static member works test.RunClsVarScenario(); // Validates passing a local works, using Unsafe.Read test.RunLclVarScenario_UnsafeRead(); if (Sse2.IsSupported) { // Validates passing a local works, using Load test.RunLclVarScenario_Load(); // Validates passing a local works, using LoadAligned test.RunLclVarScenario_LoadAligned(); } // Validates passing the field of a local works test.RunLclFldScenario(); // Validates passing an instance member works test.RunFldScenario(); } else { // Validates we throw on unsupported hardware test.RunUnsupportedScenario(); } if (!test.Succeeded) { throw new Exception("One or more scenarios did not complete as expected."); } } } public sealed unsafe class SimpleBinaryOpTest__CompareEqualDouble { private const int VectorSize = 16; private const int Op1ElementCount = VectorSize / sizeof(Double); private const int Op2ElementCount = VectorSize / sizeof(Double); private const int RetElementCount = VectorSize / sizeof(Double); private static Double[] _data1 = new Double[Op1ElementCount]; private static Double[] _data2 = new Double[Op2ElementCount]; private static Vector128<Double> _clsVar1; private static Vector128<Double> _clsVar2; private Vector128<Double> _fld1; private Vector128<Double> _fld2; private SimpleBinaryOpTest__DataTable<Double, Double, Double> _dataTable; static SimpleBinaryOpTest__CompareEqualDouble() { var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar1), ref Unsafe.As<Double, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (double)(random.NextDouble()); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _clsVar2), ref Unsafe.As<Double, byte>(ref _data2[0]), VectorSize); } public SimpleBinaryOpTest__CompareEqualDouble() { Succeeded = true; var random = new Random(); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), VectorSize); for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (double)(random.NextDouble()); } Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector128<Double>, byte>(ref _fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), VectorSize); for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); } for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = (double)(random.NextDouble()); } _dataTable = new SimpleBinaryOpTest__DataTable<Double, Double, Double>(_data1, _data2, new Double[RetElementCount], VectorSize); } public bool IsSupported => Sse2.IsSupported; public bool Succeeded { get; set; } public void RunBasicScenario_UnsafeRead() { var result = Sse2.CompareEqual( Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_Load() { var result = Sse2.CompareEqual( Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunBasicScenario_LoadAligned() { var result = Sse2.CompareEqual( Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr)) ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_UnsafeRead() { var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) }) .Invoke(null, new object[] { Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr), Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_Load() { var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) }) .Invoke(null, new object[] { Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunReflectionScenario_LoadAligned() { var result = typeof(Sse2).GetMethod(nameof(Sse2.CompareEqual), new Type[] { typeof(Vector128<Double>), typeof(Vector128<Double>) }) .Invoke(null, new object[] { Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)), Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr)) }); Unsafe.Write(_dataTable.outArrayPtr, (Vector128<Double>)(result)); ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr); } public void RunClsVarScenario() { var result = Sse2.CompareEqual( _clsVar1, _clsVar2 ); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr); } public void RunLclVarScenario_UnsafeRead() { var left = Unsafe.Read<Vector128<Double>>(_dataTable.inArray1Ptr); var right = Unsafe.Read<Vector128<Double>>(_dataTable.inArray2Ptr); var result = Sse2.CompareEqual(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_Load() { var left = Sse2.LoadVector128((Double*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadVector128((Double*)(_dataTable.inArray2Ptr)); var result = Sse2.CompareEqual(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclVarScenario_LoadAligned() { var left = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray1Ptr)); var right = Sse2.LoadAlignedVector128((Double*)(_dataTable.inArray2Ptr)); var result = Sse2.CompareEqual(left, right); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(left, right, _dataTable.outArrayPtr); } public void RunLclFldScenario() { var test = new SimpleBinaryOpTest__CompareEqualDouble(); var result = Sse2.CompareEqual(test._fld1, test._fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr); } public void RunFldScenario() { var result = Sse2.CompareEqual(_fld1, _fld2); Unsafe.Write(_dataTable.outArrayPtr, result); ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr); } public void RunUnsupportedScenario() { Succeeded = false; try { RunBasicScenario_UnsafeRead(); } catch (PlatformNotSupportedException) { Succeeded = true; } } private void ValidateResult(Vector128<Double> left, Vector128<Double> right, void* result, [CallerMemberName] string method = "") { Double[] inArray1 = new Double[Op1ElementCount]; Double[] inArray2 = new Double[Op2ElementCount]; Double[] outArray = new Double[RetElementCount]; Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left); Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "") { Double[] inArray1 = new Double[Op1ElementCount]; Double[] inArray2 = new Double[Op2ElementCount]; Double[] outArray = new Double[RetElementCount]; Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize); Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize); ValidateResult(inArray1, inArray2, outArray, method); } private void ValidateResult(Double[] left, Double[] right, Double[] result, [CallerMemberName] string method = "") { if (BitConverter.DoubleToInt64Bits(result[0]) != ((left[0] == right[0]) ? -1 : 0)) { Succeeded = false; } else { for (var i = 1; i < RetElementCount; i++) { if (BitConverter.DoubleToInt64Bits(result[i]) != ((left[i] == right[i]) ? -1 : 0)) { Succeeded = false; break; } } } if (!Succeeded) { Console.WriteLine($"{nameof(Sse2)}.{nameof(Sse2.CompareEqual)}<Double>(Vector128<Double>, Vector128<Double>): {method} failed:"); Console.WriteLine($" left: ({string.Join(", ", left)})"); Console.WriteLine($" right: ({string.Join(", ", right)})"); Console.WriteLine($" result: ({string.Join(", ", result)})"); Console.WriteLine(); } } } }
/* * MindTouch Core - open source enterprise collaborative networking * Copyright (c) 2006-2010 MindTouch Inc. * www.mindtouch.com oss@mindtouch.com * * For community documentation and downloads visit www.opengarden.org; * please review the licensing section. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * http://www.gnu.org/copyleft/gpl.html */ using System; using System.Collections.Generic; using MindTouch.Dream; using MindTouch.Deki.Data; using MindTouch.Xml; namespace MindTouch.Deki.Logic { public class DekiInstanceSettings : IInstanceSettings { //--- Properties --- public XDoc License { get { return LicenseBL.RetrieveCurrentLicense(false); } } //--- Methods --- public string GetValue(string key, string def) { return ConfigBL.GetInstanceSettingsValue(key, def); } public T GetValue<T>(string key, T def) { return ConfigBL.GetInstanceSettingsValueAs<T>(key, def); } public XDoc GetAsDoc() { return ConfigBL.GetInstanceSettingsAsDoc(false); } public bool IsInitialized() { return DekiContext.Current.Instance.Status == DekiInstanceStatus.RUNNING; } } public static class ConfigBL { //--- Constants --- public const string UI_LOGO_UPLOADED = "ui/logo-uploaded"; public const string UI_LOGO_URI = "ui/logo-uri"; public const string LICENSE_STATE = "license/state"; public const string LICENSE_EXPIRATION = "license/expiration"; public const string READONLY_SUFFIX = "/@readonly"; public const string HIDDEN_SUFFIX = "/@hidden"; public const string TEXT_SUFFIX = "/#text"; private const string CACHE_SETTINGS = "SETTINGS"; private const string CACHE_SETTINGSDOC = "SETTINGSDOC"; private const string CACHE_SETTINGSDOC_WITHHIDDEN = "SETTINGSDOCHIDDEN"; private static readonly string[] SETTINGS_REQUIREDKEYS = new string[] { "storage/type", "ui/language" }; //--- Class Fields --- private static log4net.ILog _log = LogUtils.CreateLog(); //--- Class Methods --- public static void SetInstanceSettings(XDoc doc) { // Remove elements that contain no child elements or attributes Utils.RemoveEmptyNodes(doc); // convert document to ConfigValue dictionary Dictionary<string, ConfigValue> config = new Dictionary<string, ConfigValue>(); foreach(KeyValuePair<string, string> entry in doc.ToKeyValuePairs()) { if(StringUtil.EndsWithInvariant(entry.Key, TEXT_SUFFIX) || StringUtil.EndsWithInvariant(entry.Key, READONLY_SUFFIX) || StringUtil.EndsWithInvariant(entry.Key, HIDDEN_SUFFIX)) { continue; } config.Add(entry.Key, new ConfigValue(entry.Value)); } //TODO MaxM: This will currently overwrite db settings that are overriden with the overridden value rather than leaving it alone. //OverrideSettings //for every key in given doc. if value is readonly in //GetInstanceSettings()[key] //then use value from ConfigDA.ReadInstanceSettings()[key] SetInstanceSettings(config); } public static void SetInstanceSettings(Dictionary<string, ConfigValue> settings) { // check that required fields are present foreach(string requiredKey in SETTINGS_REQUIREDKEYS) { if(!settings.ContainsKey(requiredKey)) { throw new DreamBadRequestException(string.Format(DekiResources.MISSING_REQUIRED_CONFIG_KEY, requiredKey)); } } //If there's an existing instance apikey, set it on the incoming settings. string currentApiKey = ConfigBL.GetInstanceSettingsValue("security/api-key", null); if(currentApiKey != null) settings["security/api-key"] = new ConfigValue(currentApiKey); // override global settings (i.e. static instance settings and system-wide settings) OverrideSettings(settings); // filter out settings that do not need to be saved List<KeyValuePair<string, string>> result = new List<KeyValuePair<string, string>>(); foreach(KeyValuePair<string, ConfigValue> entry in settings) { //Hidden entries are not saved in the db. if(entry.Value.IsHidden) { continue; } //Add auto-generated values here that should not be saved to the db! switch(entry.Key.ToLowerInvariant()) { case ConfigBL.UI_LOGO_URI: case ConfigBL.LICENSE_EXPIRATION: case ConfigBL.LICENSE_STATE: continue; } // add element result.Add(new KeyValuePair<string, string>(entry.Key, entry.Value.Value)); } // check that we have enough settings if(settings.Count < 2) { throw new DreamBadRequestException(DekiResources.ERROR_UPDATE_CONFIG_SETTINGS); } // update settings lock(DekiContext.Current.Instance.SettingsSyncRoot) { ClearConfigCache(); DbUtils.CurrentSession.Config_WriteInstanceSettings(result); } DekiContext.Current.Instance.SettingsLastModified = DateTime.UtcNow; } public static Dictionary<string, ConfigValue> GetInstanceSettings() { Dictionary<string, ConfigValue> result = DekiContext.Current.Instance.Cache.Get<Dictionary<string, ConfigValue>>(CACHE_SETTINGS, null); if(result != null) { return result; } lock(DekiContext.Current.Instance.SettingsSyncRoot) { result = DekiContext.Current.Instance.Cache.Get<Dictionary<string, ConfigValue>>(CACHE_SETTINGS, null); if(result == null) { result = new Dictionary<string, ConfigValue>(); //Hidden settings are never entered into the db. This will filter them out. foreach(KeyValuePair<string, ConfigValue> setting in DbUtils.CurrentSession.Config_ReadInstanceSettings()) { if(StringUtil.EndsWithInvariant(setting.Key, HIDDEN_SUFFIX)) continue; result[setting.Key] = setting.Value; } //Ensure that api key is not visible unless client has the key ConfigValue apiKey = null; if(result.TryGetValue("security/api-key", out apiKey)) { apiKey.IsHidden = true; apiKey.IsReadOnly = true; } OverrideSettings(result); DekiContext.Current.Instance.Cache.Set(CACHE_SETTINGS, result, DateTime.UtcNow.AddSeconds(60)); } } return result; } public static string GetInstanceSettingsValue(string key, string def) { Dictionary<string, ConfigValue> config = GetInstanceSettings(); lock(config) { ConfigValue valueWrapper; if(config.TryGetValue(key, out valueWrapper)) { if(!string.IsNullOrEmpty(valueWrapper.Value)) return valueWrapper.Value; } return def; } } public static T GetInstanceSettingsValueAs<T>(string key, T def) { var value = GetInstanceSettingsValue(key, null); if(value == null) { return def; } try { return SysUtil.ChangeType<T>(value); } catch(Exception e) { _log.WarnExceptionFormat(e, "Unable to convert configuration setting '{0}' to type '{1}'", key, typeof(T)); return def; } } public static T? GetInstanceSettingsValueAs<T>(string key) where T : struct { string value = GetInstanceSettingsValue(key, null); if(value == null) { return null; } try { return SysUtil.ChangeType<T>(value); } catch(Exception e) { _log.WarnExceptionFormat(e, "Unexpected format of configuration setting '{0}'", key); return null; } } public static void SetInstanceSettingsValue(string key, string value) { Dictionary<string, ConfigValue> config = CopyInstanceSettings(); ConfigValue current; if(!config.TryGetValue(key, out current) || (current.Value != value)) { config[key] = new ConfigValue(value); SetInstanceSettings(config); } } public static void DeleteInstanceSettingsValue(string key) { Dictionary<string, ConfigValue> config = CopyInstanceSettings(); bool found = false; foreach(string existingKey in new List<string>(config.Keys)) { if((existingKey == key) || (StringUtil.StartsWithInvariant(existingKey, key) && (existingKey[key.Length] == '/'))) { config.Remove(existingKey); found = true; } } if(found) { SetInstanceSettings(config); } } private static Dictionary<string, ConfigValue> CopyInstanceSettings() { Dictionary<string, ConfigValue> config = GetInstanceSettings(); lock(config) { Dictionary<string, ConfigValue> result = new Dictionary<string, ConfigValue>(); foreach(KeyValuePair<string, ConfigValue> entry in config) { result.Add(entry.Key, entry.Value); } return result; } } private static void OverrideSettings(Dictionary<string, ConfigValue> instanceSettings) { List<KeyValuePair<string, string>> readOnlySettings = new List<KeyValuePair<string, string>>(); // collect read-only settings that are instance specific if(!DekiContext.Current.Instance.Config.IsEmpty) { readOnlySettings.AddRange(DekiContext.Current.Instance.Config.ToKeyValuePairs()); } // collect read-only settings that are system-wide var instanceManager = DekiContext.Current.Deki.Instancemanager; XDoc globalconfig = instanceManager.GetGlobalConfig(); if(globalconfig != null && !globalconfig.IsEmpty) { readOnlySettings.AddRange(globalconfig.ToKeyValuePairs()); } readOnlySettings.AddRange(instanceManager.GetGlobalServices()); //Add custom computed readonly settings here //Current license status readOnlySettings.Add(new KeyValuePair<string, string>(LICENSE_STATE, LicenseBL.LicenseState.ToString())); if(LicenseBL.LicenseExpiration != DateTime.MaxValue) { readOnlySettings.Add(new KeyValuePair<string, string>(LICENSE_EXPIRATION, LicenseBL.LicenseExpiration.ToString(XDoc.RFC_DATETIME_FORMAT))); } List<string> hiddenKeys = new List<string>(); // mark all 'readonly' settings as read-only foreach(KeyValuePair<string, string> setting in readOnlySettings) { // skip elements which have a @hidden attribute if(StringUtil.EndsWithInvariant(setting.Key, HIDDEN_SUFFIX)) { hiddenKeys.Add(setting.Key.Substring(0, setting.Key.Length - HIDDEN_SUFFIX.Length)); continue; } //For backwards compatibility to older style config xml's (without the <wikis> element). if(StringUtil.StartsWithInvariant(setting.Key, "indexer/")) continue; instanceSettings[setting.Key] = new ConfigValue(setting.Value, true, false); } foreach(string hiddenKey in hiddenKeys) { ConfigValue cv = null; if(instanceSettings.TryGetValue(hiddenKey, out cv)) cv.IsHidden = true; } } public static XDoc GetInstanceSettingsAsDoc(bool includeHidden) { var instance = DekiContext.Current.Instance; string cachekey = includeHidden ? CACHE_SETTINGSDOC_WITHHIDDEN : CACHE_SETTINGSDOC; XDoc result = instance.Cache.Get<XDoc>(cachekey, null); if(result == null) { Dictionary<string, ConfigValue> config = GetInstanceSettings(); List<KeyValuePair<string, string>> items = new List<KeyValuePair<string, string>>(); lock(config) { foreach(KeyValuePair<string, ConfigValue> entry in config) { if(entry.Value.IsHidden && !includeHidden) { continue; } // check if overwritten setting was an element int index = entry.Key.LastIndexOf('/'); bool isElement = ((index + 1) < entry.Key.Length) && (entry.Key[index + 1] != '@'); items.Add(new KeyValuePair<string, string>(entry.Key, entry.Value.Value)); if(isElement) { if(entry.Value.IsReadOnly) { // we need to add a 'readonly' attribute items.Add(new KeyValuePair<string, string>(entry.Key + READONLY_SUFFIX, "true")); } if(entry.Value.IsHidden) { // we need to add a 'hidden' attribute items.Add(new KeyValuePair<string, string>(entry.Key + HIDDEN_SUFFIX, "true")); } } } } //Ensure that attributes are after their associated elements to ensure that the #text and the @attribute are part of the same element rather than creating a new element with just the #text //after the attribute. Consider moving this to Dream XDocFactory.From items.Sort((left, right) => StringUtil.CompareInvariant(left.Key, right.Key)); result = XDocFactory.From(items, "config"); instance.Cache.Set(cachekey, result, DateTime.UtcNow.AddSeconds(60)); } // TODO: remove the clone once cached settings come out of IKeyValueCache (i.e. are serialized) result = result.Clone(); result.InsertValueAt("api/@href", DekiContext.Current.Deki.Self.Uri.AsPublicUri().ToString()); return result; } public static void ClearConfigCache() { var cache = DekiContext.Current.Instance.Cache; cache.Delete(CACHE_SETTINGS); cache.Delete(CACHE_SETTINGSDOC); cache.Delete(CACHE_SETTINGSDOC_WITHHIDDEN); } } }
using System; using System.Text; using System.Linq; using System.Collections.Generic; class Program { static readonly string[] PIZZA_LIST = @"- Mediteraneo - motsarela - 122g - domaten sos - 104g - presni domati - 30g - zeleni chushki - 69g - krave sirene - 195g - maslini - 56g - Alfredo - smetana - 191g - motsarela - 194g - pileshko file - 12g - beybi spanak - 127g - Vita - domaten sos - 59g - motsarela - 97g - beybi spanak - 147g - krave sirene - 28g - presni domati - 55g - Margarita - domaten sos - 6g - motsarela - 152g - Chikenita - domaten sos - 30g - motsarela - 17g - pileshko file - 199g - peperoni - 166g - domati - 144g - emental - 79g - Dominos Spetsialna - domaten sos - 161g - motsarela - 137g - shunka - 181g - bekon - 37g - zeleni chushki - 158g - luk - 183g - gabi - 90g - ChikChiRik - domaten sos - 133g - motsarela - 20g - krehko pile - 64g - topeno sirene - 166g - tsarevitsa - 0g - Karbonara - smetana - 40g - motsarela - 158g - bekon - 141g - gabi - 160g - Amerikan Hot - domaten sos - 82g - motsarela - 88g - peperoni - 149g - halapenyo - 197g - luk - 196g - Gardan Klasik - domaten sos - 165g - motsarela - 165g - maslini - 194g - zeleni chushki - 123g - luk - 91g - presni domati - 86g - gabi - 8g - Peperoni Klasik - domaten sos - 105g - motsarela - 98g - peperoni - 186g - Barbekyu Pile - barbekyu sos - 195g - motsarela - 169g - bekon - 20g - krehko pile - 26g - Barbekyu Klasik - barbekyu sos - 147g - motsarela - 175g - bekon - 150g - pikantno teleshko - 104g - Nyu york - domaten sos - 177g - motsarela - 152g - bekon - 151g - chedar - 126g - presni gabi - 18g - Shunka Klasik - domaten sos - 110g - motsarela - 74g - shunka - 26g - zeleni chushki - 166g - presni gabi - 150g - Zverska - domaten sos - 147g - motsarela - 125g - shunka - 9g - bekon - 78g - pikantno teleshko - 55g - Italianska - domaten sos - 32g - motsarela - 61g - pesto - 79g - parmezan - 132g - presni domati - 60g - bosilek - 30g - Havay - domaten sos - 81g - motsarela - 180g - shunka - 90g - ananas - 47g - Balgarska - motsarela - 45g - domaten sos - 175g - luk - 163g - maslini - 146g - zeleni chushki - 110g - krave sirene - 0g - selska nadenitsa - 195g - presni domati - 53g - rigan - 159g - Formadzhi - domaten sos - 72g - motsarela - 98g - chedar - 194g - krave sirene - 82g - parmezan - 117g - Ton - domaten sos - 54g - motsarela - 119g - riba ton - 190g - presni domati - 78g - luk - 36g - Chorizana - domaten sos - 56g - motsarela - 102g - chorizo - 161g - pileshko file - 63g - krave sirene - 197g - presni domati - 17g - Meat Mania - domaten sos - 1g - motsarela - 111g - shunka - 83g - bekon - 129g - teleshko - 70g - pileshko file - 86g - chorizo - 182g - Unika - domaten sos - 185g - motsarela - 177g - parmezan - 114g - peperoni - 44g - gabi - 49g - presni domati - 86g - rukola - 42g - Bene - domaten sos - 139g - kashkaval - 93g - shunka - 129g - tsarevitsa - 149g - maslini - 4g - Bondzhorno - domaten sos - 99g - kashkaval - 23g - svinsko file - 199g - gabi - 86g - kiseli krastavichki - 91g - Vegetarianska - domaten sos - 9g - kashkaval - 184g - gabi - 67g - chushki - 42g - luk - 42g - tsarevitsa - 124g - Venetsiya - domaten sos - 148g - kashkaval - 47g - gabi - 43g - pusheni gardi - 32g - yaytse - 50g - luk - 122g - Garda - domaten sos - 84g - kashkaval - 35g - pileshko role - 107g - pusheno sirene - 197g - chushki - 113g - maslini - 151g - Kaltsone - domaten sos - 21g - kashkaval - 142g - shunka - 195g - gabi - 127g - Kaprichoza - domaten sos - 40g - kashkaval - 22g - yaytse - 57g - presni domati - 14g - magadanoz - 160g - shunka - 10g - maslini - 88g - pusheni gardi - 22g - gabi - 35g - Kompaniola - domaten sos - 16g - kashkaval - 87g - bekon - 111g - gabi - 198g - rigan - 95g - Meksikana - domaten sos - 119g - kashkaval - 147g - gabi - 88g - lukanka - 155g - tsarevitsa - 153g - luk - 83g - lyuta chushka - 60g - Morski darove - domaten sos - 89g - kashkaval - 66g - midi - 6g - kalmari - 175g - limon - 142g - zehtin - 113g - rigan - 121g - Kastelo - domaten sok - 195g - shunka - 89g - bekon - 88g - pusheno sirene - 19g - kashkaval - 114g - pileshko role - 30g - maslini - 63g - kiseli krastavichki - 179g - Prima Vera - domaten sos - 126g - kashkaval - 155g - shunka - 26g - gabi - 44g - domati - 22g - zehtin - 64g - bosilek - 4g - Proshuto - domaten sos - 58g - kashkaval - 136g - shunka - 30g - proshuto - 100g - maslini - 136g - Rimini - domaten sos - 55g - kashkaval - 81g - shunka - 4g - bekon - 23g - lukanka - 188g - chushki - 91g - gabi - 147g - tsarevitsa - 57g - yaytse - 60g - San Marko - domaten sos - 129g - kashkaval - 28g - gabi - 106g - lukanka - 38g - pusheno sirene - 91g - zehtin - 62g - Tono - domaten sos - 155g - kashkaval - 60g - riba ton - 131g - ratsi - 124g - luk - 100g - chesan - 142g - limon - 106g".Split('\n'); static void Main() { var menu = new Dictionary<string, Dictionary<string, int>>(); Dictionary<string, int> products = null; foreach(var line in PIZZA_LIST) { if(line[0] == '-') { products = new Dictionary<string, int>(); products.Add("testo", 256); menu.Add(line.Split('-')[1].Trim().ToLower(), products); } else { var spl = line.Split('-'); var product = spl[1].Trim().ToLower(); var weight = int.Parse(spl[2].Split('g')[0].Trim()); products.Add(product, weight); } } var inputBuilder = new StringBuilder(); string input; while(true) { input = Console.ReadLine(); if(input == null) { break; } inputBuilder.Append(' ' + input); } input = inputBuilder.ToString(); int dotIndex = input.IndexOf('.'); var words = input .Substring(0, dotIndex) .Replace(",", " , ") .Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries) .Skip(1) .Select(x => x.ToLower()) .Where(x => x != "pizza") .ToList(); words.Add("i"); var result = new SortedDictionary<string, int>(); int quantity = 1; string item = null; products = null; foreach(var w in words) { if(w == "," || w == "i" || (w == "bez" && item != null) || (char.IsDigit(w[0]) && item != null)) { if(products == null) { products = menu[item]; foreach(var p in products) { if(result.ContainsKey(p.Key)) { result[p.Key] += p.Value * quantity; } else { result[p.Key] = p.Value * quantity; } } } else { result[item] -= products[item] * quantity; } item = null; } else if(char.IsDigit(w[0]) || w == "bez") {} else if(item == null) { item = w; } else { item += " " + w; } if(char.IsDigit(w[0])) { quantity = int.Parse(w); } else if(w == "," || w == "i") { products = null; quantity = 1; } } foreach(var x in result) { if(x.Value > 0) { Console.WriteLine("{0}: {1}g", x.Key, x.Value); } } } }
using System; using Cocos2D; using Random = Cocos2D.CCRandom; namespace tests { public class EaseSpriteDemo : CCLayer { protected CCSprite m_grossini; protected CCSprite m_kathia; protected String m_strTitle; protected CCSprite m_tamara; public virtual String title() { return "No title"; } public override void OnEnter() { base.OnEnter(); m_grossini = new CCSprite(TestResource.s_pPathGrossini); m_tamara = new CCSprite(TestResource.s_pPathSister1); m_kathia = new CCSprite(TestResource.s_pPathSister2); AddChild(m_grossini, 3); AddChild(m_kathia, 2); AddChild(m_tamara, 1); var s = CCDirector.SharedDirector.WinSize; m_grossini.Position = new CCPoint(60, 50); m_kathia.Position = new CCPoint(60, 150); m_tamara.Position = new CCPoint(60, 250); var label = new CCLabelTTF(title(), "arial", 32); AddChild(label); label.Position = new CCPoint(s.Width / 2, s.Height - 50); var item1 = new CCMenuItemImage(TestResource.s_pPathB1, TestResource.s_pPathB2, backCallback); var item2 = new CCMenuItemImage(TestResource.s_pPathR1, TestResource.s_pPathR2, restartCallback); var item3 = new CCMenuItemImage(TestResource.s_pPathF1, TestResource.s_pPathF2, nextCallback); var menu = new CCMenu(item1, item2, item3); menu.Position = CCPoint.Zero; item1.Position = new CCPoint(s.Width / 2 - 100, 30); item2.Position = new CCPoint(s.Width / 2, 30); item3.Position = new CCPoint(s.Width / 2 + 100, 30); AddChild(menu, 1); } public void restartCallback(object pSender) { CCScene s = new EaseActionsTestScene(); s.AddChild(EaseTest.restartEaseAction()); CCDirector.SharedDirector.ReplaceScene(s); } public void nextCallback(object pSender) { CCScene s = new EaseActionsTestScene(); s.AddChild(EaseTest.nextEaseAction()); CCDirector.SharedDirector.ReplaceScene(s); ; } public void backCallback(object pSender) { CCScene s = new EaseActionsTestScene(); s.AddChild(EaseTest.backEaseAction()); CCDirector.SharedDirector.ReplaceScene(s); } public void positionForTwo() { m_grossini.Position = new CCPoint(60, 120); m_tamara.Position = new CCPoint(60, 220); m_kathia.Visible = false; } } public class SpriteEase : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var size = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(size.Width - 130, 0)); var move_back = (CCActionInterval) move.Reverse(); var move_ease_in = new CCEaseIn((CCActionInterval) move.Copy(), 2.5f); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseOut((CCActionInterval) move.Copy(), 2.5f); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease_in, (CCFiniteTimeAction) delay.Copy(), move_ease_in_back, (CCFiniteTimeAction) delay.Copy()); var seq3 = new CCSequence(move_ease_out, (CCFiniteTimeAction) delay.Copy(), move_ease_out_back, (CCFiniteTimeAction) delay.Copy()); var a2 = m_grossini.RunAction(new CCRepeatForever ((CCActionInterval)seq1)); a2.Tag = 1; var a1 = m_tamara.RunAction(new CCRepeatForever ((CCActionInterval)seq2)); a1.Tag = 1; var a = m_kathia.RunAction(new CCRepeatForever ((CCActionInterval)seq3)); a.Tag = 1; Schedule(testStopAction, 6.25f); } public override String title() { return "EaseIn - EaseOut - Stop"; } public void testStopAction(float dt) { Unschedule(testStopAction); m_kathia.StopActionByTag(1); m_tamara.StopActionByTag(1); m_grossini.StopActionByTag(1); } } public class SpriteEaseInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var size = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(size.Width - 130, 0)); var move_ease_inout1 = new CCEaseInOut((CCActionInterval) move.Copy(), 0.65f); var move_ease_inout_back1 = move_ease_inout1.Reverse(); var move_ease_inout2 = new CCEaseInOut((CCActionInterval) move.Copy(), 1.35f); var move_ease_inout_back2 = move_ease_inout2.Reverse(); var move_ease_inout3 = new CCEaseInOut((CCActionInterval) move.Copy(), 1.0f); var move_ease_inout_back3 = move_ease_inout3.Reverse() as CCActionInterval; var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move_ease_inout1, delay, move_ease_inout_back1, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease_inout2, (CCFiniteTimeAction) delay.Copy(), move_ease_inout_back2, (CCFiniteTimeAction) delay.Copy()); var seq3 = new CCSequence(move_ease_inout3, (CCFiniteTimeAction) delay.Copy(), move_ease_inout_back3, (CCFiniteTimeAction) delay.Copy()); m_tamara.RunAction(new CCRepeatForever ((CCActionInterval)seq1)); m_kathia.RunAction(new CCRepeatForever ((CCActionInterval)seq2)); m_grossini.RunAction(new CCRepeatForever ((CCActionInterval)seq3)); } public override String title() { return "EaseInOut and rates"; } } public class SpriteEaseExponential : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseExponentialIn((CCActionInterval) (move.Copy())); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseExponentialOut((CCActionInterval) (move.Copy())); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease_in, (CCFiniteTimeAction) delay.Copy(), move_ease_in_back, (CCFiniteTimeAction) delay.Copy()); var seq3 = new CCSequence(move_ease_out, (CCFiniteTimeAction) delay.Copy(), move_ease_out_back, (CCFiniteTimeAction) delay.Copy()); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); m_kathia.RunAction(new CCRepeatForever (seq3)); } public override String title() { return "ExpIn - ExpOut actions"; } } public class SpriteEaseExponentialInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease = new CCEaseExponentialInOut((CCActionInterval) move.Copy()); var move_ease_back = move_ease.Reverse(); //-. reverse() var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease, (CCFiniteTimeAction) delay.Copy(), move_ease_back, (CCFiniteTimeAction) delay.Copy()); positionForTwo(); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); } public override String title() { return "EaseExponentialInOut action"; } } public class SpriteEaseSine : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseSineIn((CCActionInterval) move.Copy()); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseSineOut((CCActionInterval) move.Copy()); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease_in, (CCFiniteTimeAction) delay.Copy(), move_ease_in_back, (CCFiniteTimeAction) delay.Copy()); var seq3 = new CCSequence(move_ease_out, (CCFiniteTimeAction) delay.Copy(), move_ease_out_back, (CCFiniteTimeAction) delay.Copy()); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); m_kathia.RunAction(new CCRepeatForever (seq3)); } public override String title() { return "EaseSineIn - EaseSineOut"; } } public class SpriteEaseSineInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease = new CCEaseSineInOut((CCActionInterval) (move.Copy())); var move_ease_back = move_ease.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease, (CCFiniteTimeAction) delay.Copy(), move_ease_back, (CCFiniteTimeAction) delay.Copy()); positionForTwo(); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); } public override String title() { return "EaseSineInOut action"; } } public class SpriteEaseElastic : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseElasticIn((CCActionInterval) (move.Copy())); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseElasticOut((CCActionInterval) (move.Copy())); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease_in, (CCFiniteTimeAction) delay.Copy(), move_ease_in_back, (CCFiniteTimeAction) delay.Copy()); var seq3 = new CCSequence(move_ease_out, (CCFiniteTimeAction) delay.Copy(), move_ease_out_back, (CCFiniteTimeAction) delay.Copy()); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); m_kathia.RunAction(new CCRepeatForever (seq3)); } public override String title() { return "Elastic In - Out actions"; } } public class SpriteEaseElasticInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_ease_inout1 = new CCEaseElasticInOut((CCActionInterval) (move.Copy()), 0.3f); var move_ease_inout_back1 = move_ease_inout1.Reverse(); var move_ease_inout2 = new CCEaseElasticInOut((CCActionInterval) (move.Copy()), 0.45f); var move_ease_inout_back2 = move_ease_inout2.Reverse(); var move_ease_inout3 = new CCEaseElasticInOut((CCActionInterval) (move.Copy()), 0.6f); var move_ease_inout_back3 = move_ease_inout3.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move_ease_inout1, delay, move_ease_inout_back1, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease_inout2, (CCFiniteTimeAction) delay.Copy(), move_ease_inout_back2, (CCFiniteTimeAction) delay.Copy()); var seq3 = new CCSequence(move_ease_inout3, (CCFiniteTimeAction) delay.Copy(), move_ease_inout_back3, (CCFiniteTimeAction) delay.Copy()); m_tamara.RunAction(new CCRepeatForever (seq1)); m_kathia.RunAction(new CCRepeatForever (seq2)); m_grossini.RunAction(new CCRepeatForever (seq3)); } public override String title() { return "EaseElasticInOut action"; } } public class SpriteEaseBounce : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseBounceIn((CCActionInterval) (move.Copy())); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseBounceOut((CCActionInterval) (move.Copy())); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease_in, (CCFiniteTimeAction) delay.Copy(), move_ease_in_back, (CCFiniteTimeAction) delay.Copy()); var seq3 = new CCSequence(move_ease_out, (CCFiniteTimeAction) delay.Copy(), move_ease_out_back, (CCFiniteTimeAction) delay.Copy()); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); m_kathia.RunAction(new CCRepeatForever (seq3)); } public override String title() { return "Bounce In - Out actions"; } } public class SpriteEaseBounceInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease = new CCEaseBounceInOut((CCActionInterval) (move.Copy())); var move_ease_back = move_ease.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease, (CCFiniteTimeAction) delay.Copy(), move_ease_back, (CCFiniteTimeAction) delay.Copy()); positionForTwo(); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); } public override String title() { return "EaseBounceInOut action"; } } public class SpriteEaseBack : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease_in = new CCEaseBackIn((CCActionInterval) (move.Copy())); var move_ease_in_back = move_ease_in.Reverse(); var move_ease_out = new CCEaseBackOut((CCActionInterval) (move.Copy())); var move_ease_out_back = move_ease_out.Reverse(); var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease_in, (CCFiniteTimeAction) delay.Copy(), move_ease_in_back, (CCFiniteTimeAction) delay.Copy()); var seq3 = new CCSequence(move_ease_out, (CCFiniteTimeAction) delay.Copy(), move_ease_out_back, (CCFiniteTimeAction) delay.Copy()); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); m_kathia.RunAction(new CCRepeatForever (seq3)); } public override String title() { return "Back In - Out actions"; } } public class SpriteEaseBackInOut : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; var move = new CCMoveBy (3, new CCPoint(s.Width - 130, 0)); var move_back = move.Reverse(); var move_ease = new CCEaseBackInOut((CCActionInterval) (move.Copy())); var move_ease_back = move_ease.Reverse() as CCActionInterval; var delay = new CCDelayTime (0.25f); var seq1 = new CCSequence(move, delay, move_back, (CCFiniteTimeAction) delay.Copy()); var seq2 = new CCSequence(move_ease, (CCFiniteTimeAction) delay.Copy(), move_ease_back, (CCFiniteTimeAction) delay.Copy()); positionForTwo(); m_grossini.RunAction(new CCRepeatForever (seq1)); m_tamara.RunAction(new CCRepeatForever (seq2)); } public override String title() { return "EaseBackInOut action"; } } public class SpeedTest : EaseSpriteDemo { public override void OnEnter() { base.OnEnter(); var s = CCDirector.SharedDirector.WinSize; // rotate and jump var jump1 = new CCJumpBy (4, new CCPoint(-s.Width + 80, 0), 100, 4); var jump2 = jump1.Reverse(); var rot1 = new CCRotateBy (4, 360 * 2); var rot2 = rot1.Reverse(); var seq3_1 = new CCSequence(jump2, jump1); var seq3_2 = new CCSequence(rot1, rot2); var spawn = new CCSpawn(seq3_1, seq3_2); var action = new CCSpeed (new CCRepeatForever (spawn), 1.0f); action.Tag = EaseTest.kTagAction1; var action2 = (CCAction) (action.Copy()); var action3 = (CCAction) (action.Copy()); action2.Tag = EaseTest.kTagAction1; action3.Tag = EaseTest.kTagAction1; m_grossini.RunAction(action2); m_tamara.RunAction(action3); m_kathia.RunAction(action); Schedule(altertime, 1.0f); } public void altertime(float dt) { var action1 = (CCSpeed) (m_grossini.GetActionByTag(EaseTest.kTagAction1)); var action2 = (CCSpeed) (m_tamara.GetActionByTag(EaseTest.kTagAction1)); var action3 = (CCSpeed) (m_kathia.GetActionByTag(EaseTest.kTagAction1)); action1.Speed = CCRandom.Float_0_1() * 2; action2.Speed = CCRandom.Float_0_1() * 2; action3.Speed = CCRandom.Float_0_1() * 2; } public override String title() { return "Speed action"; } } public class EaseActionsTestScene : TestScene { protected override void NextTestCase() { } protected override void PreviousTestCase() { } protected override void RestTestCase() { } public override void runThisTest() { var pLayer = EaseTest.nextEaseAction(); AddChild(pLayer); CCDirector.SharedDirector.ReplaceScene(this); } } public static class EaseTest { public const int MAX_LAYER = 13; public const int kTagAction1 = 1; public const int kTagAction2 = 2; public const int kTagSlider = 1; private static int sceneIdx = -1; public static CCLayer createEaseLayer(int nIndex) { switch (nIndex) { case 0: return new SpriteEase(); case 1: return new SpriteEaseInOut(); case 2: return new SpriteEaseExponential(); case 3: return new SpriteEaseExponentialInOut(); case 4: return new SpriteEaseSine(); case 5: return new SpriteEaseSineInOut(); case 6: return new SpriteEaseElastic(); case 7: return new SpriteEaseElasticInOut(); case 8: return new SpriteEaseBounce(); case 9: return new SpriteEaseBounceInOut(); case 10: return new SpriteEaseBack(); case 11: return new SpriteEaseBackInOut(); case 12: return new SpeedTest(); } return null; } public static CCLayer nextEaseAction() { sceneIdx++; sceneIdx %= MAX_LAYER; var pLayer = createEaseLayer(sceneIdx); return pLayer; } public static CCLayer backEaseAction() { sceneIdx--; var total = MAX_LAYER; if (sceneIdx < 0) sceneIdx += total; var pLayer = createEaseLayer(sceneIdx); return pLayer; } public static CCLayer restartEaseAction() { var pLayer = createEaseLayer(sceneIdx); return pLayer; } } }
// // Copyright 2010, Novell, Inc. // Copyright 2012 - 2013, Xamarin Inc. // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using System; using System.Reflection; using System.Collections.Generic; using System.Runtime.InteropServices; using MonoMac.ObjCRuntime; namespace MonoMac.Foundation { [StructLayout(LayoutKind.Sequential)] public partial class NSObject : INativeObject, IDisposable { public static readonly Assembly MonoMacAssembly = typeof (NSObject).Assembly; IntPtr gchandle; bool has_managed_ref; static object lock_obj = new object (); private void InitializeObject (bool alloced) { if (alloced && handle == IntPtr.Zero && Class.ThrowOnInitFailure) { if (ClassHandle == IntPtr.Zero) throw new Exception (string.Format ("Could not create an native instance of the type '{0}': the native class hasn't been loaded.\n" + "It is possible to ignore this condition by setting MonoMac.ObjCRuntime.Class.ThrowOnInitFailure to false.", GetType ().FullName)); throw new Exception (string.Format ("Failed to create a instance of the native type '{0}'.\n" + "It is possible to ignore this condition by setting MonoMac.ObjCRuntime.Class.ThrowOnInitFailure to false.", new Class (ClassHandle).Name)); } IsDirectBinding = (this.GetType ().Assembly == NSObject.MonoMacAssembly); Runtime.RegisterNSObject (this, handle); CreateManagedRef (!alloced); } static int GetRetainCount (IntPtr @this) { return Messaging.int_objc_msgSend (@this, Selector.RetainCount); } #if DEBUG_REF_COUNTING static string GetClassName (IntPtr @this) { return new Class (@this).Name; } static bool HasManagedRef (IntPtr @this) { NSObject obj = Runtime.TryGetNSObject (@this); return obj != null && obj.has_managed_ref; } static int GetGCHandle (IntPtr @this) { NSObject obj = Runtime.TryGetNSObject (@this); return obj != null ? obj.gchandle.ToInt32 () : 0; } #endif void UnregisterObject () { Runtime.NativeObjectHasDied (handle); } void FreeGCHandle () { if (gchandle != IntPtr.Zero) { #if DEBUG_REF_COUNTING Console.WriteLine ("\tGCHandle {0} destroyed for object 0x{1}", gchandle, handle.ToString ("x")); #endif GCHandle.FromIntPtr (gchandle).Free (); gchandle = IntPtr.Zero; } else { #if DEBUG_REF_COUNTING Console.WriteLine ("\tNo GCHandle for the object 0x{0}", handle.ToString ("x")); #endif } } void SwitchGCHandle (bool to_weak) { if (gchandle != IntPtr.Zero) GCHandle.FromIntPtr (gchandle).Free (); if (to_weak) { gchandle = GCHandle.ToIntPtr (GCHandle.Alloc (this, GCHandleType.WeakTrackResurrection)); } else { gchandle = GCHandle.ToIntPtr (GCHandle.Alloc (this, GCHandleType.Normal)); } } delegate IntPtr RetainTrampolineDelegate (IntPtr @this, IntPtr sel); delegate void ReleaseTrampolineDelegate (IntPtr @this, IntPtr sel); static IntPtr RetainTrampolineFunctionPointer; static IntPtr ReleaseTrampolineFunctionPointer; static RetainTrampolineDelegate retainTrampoline; static ReleaseTrampolineDelegate releaseTrampoline; static internal void OverrideRetainAndRelease (IntPtr @class) { // TODO: implement overriding of platform types too. lock (lock_obj) { if (ReleaseTrampolineFunctionPointer == IntPtr.Zero) { retainTrampoline = new RetainTrampolineDelegate (RetainTrampoline); releaseTrampoline = new ReleaseTrampolineDelegate (ReleaseTrampoline); RetainTrampolineFunctionPointer = Marshal.GetFunctionPointerForDelegate (retainTrampoline); ReleaseTrampolineFunctionPointer = Marshal.GetFunctionPointerForDelegate (releaseTrampoline); } } Class.class_addMethod (@class, Selector.RetainHandle, RetainTrampolineFunctionPointer, "@@:"); Class.class_addMethod (@class, Selector.ReleaseHandle, ReleaseTrampolineFunctionPointer, "v@:"); } static bool IsUserType (IntPtr @this) { IntPtr cls = object_getClass (@this); if (Class.class_getMethodImplementation (cls, Selector.RetainHandle) == RetainTrampolineFunctionPointer) return true; // Unfortunately just checking if the retain trampoline is ours does not always work. // Instruments may add its own retain method, intercepting our own, causing this check to fail. // http://stackoverflow.com/questions/14324507/nsactiondispatcher-is-garbage-collected-when-instruments-is-attached // // Check if the class is in our list of custom types instead. // // TODO: Compare performance with the retain trampoline check above. // var type = Class.Lookup (cls, false); return type != null && Class.IsCustomType (type); } void CreateGCHandle (bool force_weak) { // force_weak is to avoid calling retainCount unless needed, since some classes (UIWebView in iOS 5) // will crash if retainCount is called before init. See bug #9261. bool weak = force_weak || (GetRetainCount (handle) == 1); this.has_managed_ref = true; if (weak) { gchandle = GCHandle.ToIntPtr (GCHandle.Alloc (this, GCHandleType.WeakTrackResurrection)); } else { gchandle = GCHandle.ToIntPtr (GCHandle.Alloc (this, GCHandleType.Normal)); } #if DEBUG_REF_COUNTING Console.WriteLine ("\tGCHandle created for 0x{0}: {1} (HasManagedRef: true)", handle.ToString ("x"), gchandle); #endif } void CreateManagedRef (bool retain) { bool user_type = IsUserType (handle); #if DEBUG_REF_COUNTING Console.WriteLine ("CreateManagedRef ({0} Handle=0x{1}) retainCount={2}; HasManagedRef={3} GCHandle={4} IsUserType={5}", GetClassName (handle), handle.ToString ("x"), GetRetainCount (handle), has_managed_ref, gchandle, user_type); #endif if (user_type) { if (gchandle == IntPtr.Zero) { CreateGCHandle (!retain); } else { #if DEBUG_REF_COUNTING Console.WriteLine ("GCHandle already exists for 0x{0}: {1}", handle.ToString ("x"), gchandle); #endif } } if (retain) Messaging.void_objc_msgSend (handle, Selector.RetainHandle); } void ReleaseManagedRef () { var handle = this.handle; bool user_type = IsUserType (handle); #if DEBUG_REF_COUNTING Console.WriteLine ("ReleaseManagedRef ({0} Handle=0x{1}) retainCount={2}; HasManagedRef={3} GCHandle={4} IsUserType={5}", GetClassName (handle), handle.ToString ("x"), Messaging.int_objc_msgSend (handle, Selector.RetainCount), has_managed_ref, gchandle, user_type); #endif if (user_type) { has_managed_ref = false; } else { /* If we're a wrapper type, we need to unregister here, since we won't enter the release trampoline */ UnregisterObject (); } Messaging.void_objc_msgSend (handle, Selector.ReleaseHandle); } [DllImport ("/usr/lib/libobjc.dylib")] static extern IntPtr object_getClass (IntPtr @this); [DllImport ("/usr/lib/libobjc.dylib")] extern static IntPtr objc_msgSendSuper (ref objc_super super, IntPtr selector); static IntPtr InvokeObjCMethodImplementation (IntPtr @this, IntPtr sel) { objc_super sup; IntPtr klass = object_getClass (@this); IntPtr sklass = Class.class_getSuperclass (klass); IntPtr imp = Class.class_getMethodImplementation (klass, sel); IntPtr simp = Class.class_getMethodImplementation (sklass, sel); while (imp == simp) { sklass = Class.class_getSuperclass (sklass); simp = Class.class_getMethodImplementation (sklass, sel); } sup.receiver = @this; sup.super = sklass; return objc_msgSendSuper (ref sup, sel); } static void ReleaseTrampoline (IntPtr @this, IntPtr sel) { int ref_count = Messaging.int_objc_msgSend (@this, Selector.RetainCount); NSObject obj = null; #if DEBUG_REF_COUNTING Console.WriteLine ("ReleaseTrampoline ({0} Handle=0x{1}) retainCount={2}; HasManagedRef={3} GCHandle={4}", GetClassName (@this), @this.ToString ("x"), ref_count, HasManagedRef (@this), GetGCHandle (@this)); #endif /* Object is about to die. Unregister it and free any gchandles we may have */ if (ref_count == 1) { obj = Runtime.TryGetNSObject (@this); if (obj != null) { obj.UnregisterObject (); obj.FreeGCHandle (); } else { #if DEBUG_REF_COUNTING Console.WriteLine ("\tCould not find managed object"); #endif } } /* * We need to decide if the gchandle should become a weak one. * This happens if managed code will end up holding the only ref. */ if (ref_count == 2) { obj = Runtime.TryGetNSObject (@this); if (obj != null && obj.has_managed_ref) obj.SwitchGCHandle (true /* weak */); } InvokeObjCMethodImplementation (@this, sel); } static IntPtr RetainTrampoline (IntPtr @this, IntPtr sel) { int ref_count = Messaging.int_objc_msgSend (@this, Selector.RetainCount); NSObject obj = null; #if DEBUG_REF_COUNTING bool had_managed_ref = HasManagedRef (@this); int pre_gchandle = GetGCHandle (@this); #endif /* * We need to decide if the gchandle should become a strong one. * This happens if managed code has a ref, and the current refcount is 1. */ if (ref_count == 1) { obj = Runtime.TryGetNSObject (@this); if (obj != null && obj.has_managed_ref) obj.SwitchGCHandle (false /* strong */); } @this = InvokeObjCMethodImplementation (@this, sel); #if DEBUG_REF_COUNTING Console.WriteLine ("RetainTrampoline ({0} Handle=0x{1}) initial retainCount={2}; new retainCount={3} HadManagedRef={4} HasManagedRef={5} old GCHandle={6} new GCHandle={7}", Class.GetName (Messaging.intptr_objc_msgSend (@this, Selector.GetHandle ("class"))), @this.ToString ("x"), ref_count, Messaging.int_objc_msgSend (@this, Selector.RetainCount), had_managed_ref, HasManagedRef (@this), pre_gchandle, GetGCHandle (@this)); #endif return @this; } internal void SetAsProxy () { IsDirectBinding = true; } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Diagnostics; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using System.Windows.Threading; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Diagnostics; using Microsoft.CodeAnalysis.Editor.Shared.Options; using Microsoft.CodeAnalysis.Editor.Shared.Utilities; using Microsoft.CodeAnalysis.ErrorReporting; using Microsoft.CodeAnalysis.Host; using Microsoft.CodeAnalysis.Notification; using Microsoft.VisualStudio.ComponentModelHost; using Microsoft.VisualStudio.LanguageServices.Implementation.EditAndContinue; using Microsoft.VisualStudio.LanguageServices.Implementation.TaskList; using Microsoft.VisualStudio.Shell; using Microsoft.VisualStudio.Shell.Interop; using Microsoft.VisualStudio.Text; using Microsoft.VisualStudio.TextManager.Interop; using Microsoft.VisualStudio.Utilities; using Roslyn.Utilities; using Task = System.Threading.Tasks.Task; namespace Microsoft.VisualStudio.LanguageServices.Implementation.ProjectSystem { // NOTE: Microsoft.VisualStudio.LanguageServices.TypeScript.TypeScriptProject derives from AbstractProject. internal abstract partial class AbstractProject : ForegroundThreadAffinitizedObject, IVisualStudioHostProject { internal static object RuleSetErrorId = new object(); private readonly object _gate = new object(); #region Mutable fields accessed from foreground or background threads - need locking for access. private readonly List<ProjectReference> _projectReferences = new List<ProjectReference>(); private readonly List<VisualStudioMetadataReference> _metadataReferences = new List<VisualStudioMetadataReference>(); private readonly Dictionary<DocumentId, IVisualStudioHostDocument> _documents = new Dictionary<DocumentId, IVisualStudioHostDocument>(); private readonly Dictionary<string, IVisualStudioHostDocument> _documentMonikers = new Dictionary<string, IVisualStudioHostDocument>(StringComparer.OrdinalIgnoreCase); private readonly Dictionary<string, VisualStudioAnalyzer> _analyzers = new Dictionary<string, VisualStudioAnalyzer>(StringComparer.OrdinalIgnoreCase); private readonly Dictionary<DocumentId, IVisualStudioHostDocument> _additionalDocuments = new Dictionary<DocumentId, IVisualStudioHostDocument>(); /// <summary> /// The list of files which have been added to the project but we aren't tracking since they /// aren't real source files. Sometimes we're asked to add silly things like HTML files or XAML /// files, and if those are open in a strange editor we just bail. /// </summary> private readonly ISet<string> _untrackedDocuments = new HashSet<string>(StringComparer.OrdinalIgnoreCase); /// <summary> /// The path to a metadata reference that was converted to project references. /// </summary> private readonly Dictionary<string, ProjectReference> _metadataFileNameToConvertedProjectReference = new Dictionary<string, ProjectReference>(StringComparer.OrdinalIgnoreCase); private bool _pushingChangesToWorkspaceHosts; #endregion #region Mutable fields accessed only from the foreground thread - does not need locking for access. /// <summary> /// When a reference changes on disk we start a delayed task to update the <see cref="Workspace"/>. /// It is delayed for two reasons: first, there are often a bunch of change notifications in quick succession /// as the file is written. Second, we often get the first notification while something is still writing the /// file, so we're unable to actually load it. To avoid both of these issues, we wait five seconds before /// reloading the metadata. This <see cref="Dictionary{TKey, TValue}"/> holds on to /// <see cref="CancellationTokenSource"/>s that allow us to cancel the existing reload task if another file /// change comes in before we process it. /// </summary> private readonly Dictionary<VisualStudioMetadataReference, CancellationTokenSource> _donotAccessDirectlyChangedReferencesPendingUpdate = new Dictionary<VisualStudioMetadataReference, CancellationTokenSource>(); private Dictionary<VisualStudioMetadataReference, CancellationTokenSource> ChangedReferencesPendingUpdate { get { AssertIsForeground(); return _donotAccessDirectlyChangedReferencesPendingUpdate; } } #endregion // PERF: Create these event handlers once to be shared amongst all documents (the sender arg identifies which document and project) private static readonly EventHandler<bool> s_documentOpenedEventHandler = OnDocumentOpened; private static readonly EventHandler<bool> s_documentClosingEventHandler = OnDocumentClosing; private static readonly EventHandler s_documentUpdatedOnDiskEventHandler = OnDocumentUpdatedOnDisk; private static readonly EventHandler<bool> s_additionalDocumentOpenedEventHandler = OnAdditionalDocumentOpened; private static readonly EventHandler<bool> s_additionalDocumentClosingEventHandler = OnAdditionalDocumentClosing; private static readonly EventHandler s_additionalDocumentUpdatedOnDiskEventHandler = OnAdditionalDocumentUpdatedOnDisk; private readonly DiagnosticDescriptor _errorReadingRulesetRule = new DiagnosticDescriptor( id: IDEDiagnosticIds.ErrorReadingRulesetId, title: ServicesVSResources.ErrorReadingRuleset, messageFormat: ServicesVSResources.Error_reading_ruleset_file_0_1, category: FeaturesResources.Roslyn_HostError, defaultSeverity: DiagnosticSeverity.Error, isEnabledByDefault: true); public AbstractProject( VisualStudioProjectTracker projectTracker, Func<ProjectId, IVsReportExternalErrors> reportExternalErrorCreatorOpt, string projectSystemName, string projectFilePath, IVsHierarchy hierarchy, string language, Guid projectGuid, IServiceProvider serviceProvider, VisualStudioWorkspaceImpl visualStudioWorkspaceOpt, HostDiagnosticUpdateSource hostDiagnosticUpdateSourceOpt, ICommandLineParserService commandLineParserServiceOpt = null) { Contract.ThrowIfNull(projectSystemName); ServiceProvider = serviceProvider; Language = language; Hierarchy = hierarchy; Guid = projectGuid; var componentModel = (IComponentModel)serviceProvider.GetService(typeof(SComponentModel)); ContentTypeRegistryService = componentModel.GetService<IContentTypeRegistryService>(); this.RunningDocumentTable = (IVsRunningDocumentTable4)serviceProvider.GetService(typeof(SVsRunningDocumentTable)); this.DisplayName = projectSystemName; this.ProjectTracker = projectTracker; ProjectSystemName = projectSystemName; Workspace = visualStudioWorkspaceOpt; CommandLineParserService = commandLineParserServiceOpt; HostDiagnosticUpdateSource = hostDiagnosticUpdateSourceOpt; // Set the default value for last design time build result to be true, until the project system lets us know that it failed. LastDesignTimeBuildSucceeded = true; UpdateProjectDisplayNameAndFilePath(projectSystemName, projectFilePath); if (ProjectFilePath != null) { Version = VersionStamp.Create(File.GetLastWriteTimeUtc(ProjectFilePath)); } else { Version = VersionStamp.Create(); } Id = this.ProjectTracker.GetOrCreateProjectIdForPath(ProjectFilePath ?? ProjectSystemName, ProjectSystemName); if (reportExternalErrorCreatorOpt != null) { ExternalErrorReporter = reportExternalErrorCreatorOpt(Id); } if (visualStudioWorkspaceOpt != null) { if (Language == LanguageNames.CSharp || Language == LanguageNames.VisualBasic) { this.EditAndContinueImplOpt = new VsENCRebuildableProjectImpl(this); } this.MetadataService = visualStudioWorkspaceOpt.Services.GetService<IMetadataService>(); } UpdateAssemblyName(); } internal IServiceProvider ServiceProvider { get; } /// <summary> /// Indicates whether this project is a website type. /// </summary> public bool IsWebSite { get; protected set; } /// <summary> /// A full path to the project obj output binary, or null if the project doesn't have an obj output binary. /// </summary> internal string ObjOutputPath { get; private set; } /// <summary> /// A full path to the project bin output binary, or null if the project doesn't have an bin output binary. /// </summary> internal string BinOutputPath { get; private set; } public IRuleSetFile RuleSetFile { get; private set; } protected VisualStudioProjectTracker ProjectTracker { get; } protected IVsRunningDocumentTable4 RunningDocumentTable { get; } protected IVsReportExternalErrors ExternalErrorReporter { get; } internal HostDiagnosticUpdateSource HostDiagnosticUpdateSource { get; } public ProjectId Id { get; } public string Language { get; } private ICommandLineParserService CommandLineParserService { get; } /// <summary> /// The <see cref="IVsHierarchy"/> for this project. NOTE: May be null in Deferred Project Load cases. /// </summary> public IVsHierarchy Hierarchy { get; } /// <summary> /// Guid of the project /// /// it is not readonly since it can be changed while loading project /// </summary> public Guid Guid { get; protected set; } public Workspace Workspace { get; } public VersionStamp Version { get; } public IMetadataService MetadataService { get; } /// <summary> /// The containing directory of the project. Null if none exists (consider Venus.) /// </summary> protected string ContainingDirectoryPathOpt { get { var projectFilePath = this.ProjectFilePath; if (projectFilePath != null) { return Path.GetDirectoryName(projectFilePath); } else { return null; } } } /// <summary> /// The full path of the project file. Null if none exists (consider Venus.) /// Note that the project file path might change with project file rename. /// If you need the folder of the project, just use <see cref="ContainingDirectoryPathOpt" /> which doesn't change for a project. /// </summary> public string ProjectFilePath { get; private set; } /// <summary> /// The public display name of the project. This name is not unique and may be shared /// between multiple projects, especially in cases like Venus where the intellisense /// projects will match the name of their logical parent project. /// </summary> public string DisplayName { get; private set; } internal string AssemblyName { get; private set; } /// <summary> /// The name of the project according to the project system. In "regular" projects this is /// equivalent to <see cref="DisplayName"/>, but in Venus cases these will differ. The /// ProjectSystemName is the 2_Default.aspx project name, whereas the regular display name /// matches the display name of the project the user actually sees in the solution explorer. /// These can be assumed to be unique within the Visual Studio workspace. /// </summary> public string ProjectSystemName { get; } protected DocumentProvider DocumentProvider => this.ProjectTracker.DocumentProvider; protected VisualStudioMetadataReferenceManager MetadataReferenceProvider => this.ProjectTracker.MetadataReferenceProvider; protected IContentTypeRegistryService ContentTypeRegistryService { get; } /// <summary> /// Flag indicating if the latest design time build has succeeded for current project state. /// </summary> /// <remarks>Default value is true.</remarks> protected bool LastDesignTimeBuildSucceeded { get; private set; } internal VsENCRebuildableProjectImpl EditAndContinueImplOpt { get; private set; } /// <summary> /// Override this method to validate references when creating <see cref="ProjectInfo"/> for current state. /// By default, this method does nothing. /// </summary> protected virtual void ValidateReferences() { } public ProjectInfo CreateProjectInfoForCurrentState() { ValidateReferences(); lock (_gate) { var info = ProjectInfo.Create( this.Id, this.Version, this.DisplayName, this.AssemblyName ?? this.ProjectSystemName, this.Language, filePath: this.ProjectFilePath, outputFilePath: this.ObjOutputPath, compilationOptions: this.CurrentCompilationOptions, parseOptions: this.CurrentParseOptions, documents: _documents.Values.Select(d => d.GetInitialState()), metadataReferences: _metadataReferences.Select(r => r.CurrentSnapshot), projectReferences: _projectReferences, analyzerReferences: _analyzers.Values.Select(a => a.GetReference()), additionalDocuments: _additionalDocuments.Values.Select(d => d.GetInitialState())); return info.WithHasAllInformation(hasAllInformation: LastDesignTimeBuildSucceeded); } } protected void SetIntellisenseBuildResultAndNotifyWorkspaceHosts(bool succeeded) { // set intellisense related info LastDesignTimeBuildSucceeded = succeeded; if (PushingChangesToWorkspaceHosts) { // set workspace reference info ProjectTracker.NotifyWorkspaceHosts(host => (host as IVisualStudioWorkspaceHost2)?.OnHasAllInformation(Id, succeeded)); } } protected ImmutableArray<string> GetStrongNameKeyPaths() { var outputPath = this.ObjOutputPath; if (this.ContainingDirectoryPathOpt == null && outputPath == null) { return ImmutableArray<string>.Empty; } var builder = ArrayBuilder<string>.GetInstance(); if (this.ContainingDirectoryPathOpt != null) { builder.Add(this.ContainingDirectoryPathOpt); } if (outputPath != null) { builder.Add(Path.GetDirectoryName(outputPath)); } return builder.ToImmutableAndFree(); } public ImmutableArray<ProjectReference> GetCurrentProjectReferences() { lock (_gate) { return ImmutableArray.CreateRange(_projectReferences); } } public ImmutableArray<VisualStudioMetadataReference> GetCurrentMetadataReferences() { lock (_gate) { return ImmutableArray.CreateRange(_metadataReferences); } } public ImmutableArray<VisualStudioAnalyzer> GetCurrentAnalyzers() { lock (_gate) { return ImmutableArray.CreateRange(_analyzers.Values); } } public IVisualStudioHostDocument GetDocumentOrAdditionalDocument(DocumentId id) { IVisualStudioHostDocument doc; lock (_gate) { _documents.TryGetValue(id, out doc); if (doc == null) { _additionalDocuments.TryGetValue(id, out doc); } return doc; } } public ImmutableArray<IVisualStudioHostDocument> GetCurrentDocuments() { lock (_gate) { return _documents.Values.ToImmutableArrayOrEmpty(); } } public ImmutableArray<IVisualStudioHostDocument> GetCurrentAdditionalDocuments() { lock (_gate) { return _additionalDocuments.Values.ToImmutableArrayOrEmpty(); } } public bool ContainsFile(string moniker) { lock (_gate) { return _documentMonikers.ContainsKey(moniker); } } public IVisualStudioHostDocument GetCurrentDocumentFromPath(string filePath) { lock (_gate) { IVisualStudioHostDocument document; _documentMonikers.TryGetValue(filePath, out document); return document; } } public bool HasMetadataReference(string filename) { lock (_gate) { return _metadataReferences.Any(r => StringComparer.OrdinalIgnoreCase.Equals(r.FilePath, filename)); } } public VisualStudioMetadataReference TryGetCurrentMetadataReference(string filename) { // We must normalize the file path, since the paths we're comparing to are always normalized filename = FileUtilities.NormalizeAbsolutePath(filename); lock (_gate) { return _metadataReferences.SingleOrDefault(r => StringComparer.OrdinalIgnoreCase.Equals(r.FilePath, filename)); } } private void AddMetadataFileNameToConvertedProjectReference(string filePath, ProjectReference projectReference) { lock (_gate) { _metadataFileNameToConvertedProjectReference.Add(filePath, projectReference); } } private void UpdateMetadataFileNameToConvertedProjectReference(string filePath, ProjectReference projectReference) { lock (_gate) { _metadataFileNameToConvertedProjectReference[filePath] = projectReference; } } private bool RemoveMetadataFileNameToConvertedProjectReference(string filePath) { lock (_gate) { return _metadataFileNameToConvertedProjectReference.Remove(filePath); } } private bool TryGetMetadataFileNameToConvertedProjectReference(string filePath, out ProjectReference projectReference) { lock (_gate) { return _metadataFileNameToConvertedProjectReference.TryGetValue(filePath, out projectReference); } } private bool HasMetadataFileNameToConvertedProjectReference(string filePath) { lock (_gate) { return _metadataFileNameToConvertedProjectReference.ContainsKey(filePath); } } public bool CurrentProjectReferencesContains(ProjectId projectId) { lock (_gate) { return _projectReferences.Any(r => r.ProjectId == projectId); } } private bool TryGetAnalyzer(string analyzerAssemblyFullPath, out VisualStudioAnalyzer analyzer) { lock (_gate) { return _analyzers.TryGetValue(analyzerAssemblyFullPath, out analyzer); } } private void AddOrUpdateAnalyzer(string analyzerAssemblyFullPath, VisualStudioAnalyzer analyzer) { lock (_gate) { _analyzers[analyzerAssemblyFullPath] = analyzer; } } private void RemoveAnalyzer(string analyzerAssemblyFullPath) { lock (_gate) { _analyzers.Remove(analyzerAssemblyFullPath); } } public bool CurrentProjectAnalyzersContains(string fullPath) { lock (_gate) { return _analyzers.ContainsKey(fullPath); } } /// <summary> /// Returns a map from full path to <see cref="VisualStudioAnalyzer"/>. /// </summary> public ImmutableDictionary<string, VisualStudioAnalyzer> GetProjectAnalyzersMap() { lock (_gate) { return _analyzers.ToImmutableDictionary(); } } private static string GetAssemblyNameFromPath(string outputPath) { Contract.Requires(outputPath != null); // dev11 sometimes gives us output path w/o extension, so removing extension becomes problematic if (outputPath.EndsWith(".exe", StringComparison.OrdinalIgnoreCase) || outputPath.EndsWith(".dll", StringComparison.OrdinalIgnoreCase) || outputPath.EndsWith(".netmodule", StringComparison.OrdinalIgnoreCase) || outputPath.EndsWith(".winmdobj", StringComparison.OrdinalIgnoreCase)) { return Path.GetFileNameWithoutExtension(outputPath); } else { return Path.GetFileName(outputPath); } } protected bool CanConvertToProjectReferences { get { if (this.Workspace != null) { return this.Workspace.Options.GetOption(InternalFeatureOnOffOptions.ProjectReferenceConversion); } else { return InternalFeatureOnOffOptions.ProjectReferenceConversion.DefaultValue; } } } protected int AddMetadataReferenceAndTryConvertingToProjectReferenceIfPossible(string filePath, MetadataReferenceProperties properties) { // If this file is coming from a project, then we should convert it to a project reference instead AbstractProject project; if (this.CanConvertToProjectReferences && ProjectTracker.TryGetProjectByBinPath(filePath, out project)) { var projectReference = new ProjectReference(project.Id, properties.Aliases, properties.EmbedInteropTypes); if (CanAddProjectReference(projectReference)) { AddProjectReference(projectReference); AddMetadataFileNameToConvertedProjectReference(filePath, projectReference); return VSConstants.S_OK; } } // regardless whether the file exists or not, we still record it. one of reason // we do that is some cross language p2p references might be resolved // after they are already reported as metadata references. since we use bin path // as a way to discover them, if we don't previously record the reference ourselves, // cross p2p references won't be resolved as p2p references when we finally have // all required information. // // it looks like // 1. project system sometimes won't guarantee build dependency for intellisense build // if it is cross language dependency // 2. output path of referenced cross language project might be changed to right one // once it is already added as a metadata reference. // // but this has one consequence. even if a user adds a project in the solution as // a metadata reference explicitly, that dll will be automatically converted back to p2p // reference. // // unfortunately there is no way to prevent this using information we have since, // at this point, we don't know whether it is a metadata reference added because // we don't have enough information yet for p2p reference or user explicitly added it // as a metadata reference. AddMetadataReferenceCore(this.MetadataReferenceProvider.CreateMetadataReference(this, filePath, properties)); // here, we change behavior compared to old C# language service. regardless of file being exist or not, // we will always return S_OK. this is to support cross language p2p reference better. // // this should make project system to cache all cross language p2p references regardless // whether it actually exist in disk or not. // (see Roslyn bug 7315 for history - http://vstfdevdiv:8080/DevDiv_Projects/Roslyn/_workitems?_a=edit&id=7315) // // after this point, Roslyn will take care of non-exist metadata reference. // // But, this doesn't sovle the issue where actual metadata reference // (not cross language p2p reference) is missing at the time project is opened. // // in that case, msbuild filter those actual metadata references out, so project system doesn't know // path to the reference. since it doesn't know where dll is, it can't (or currently doesn't) // setup file change notification either to find out when dll becomes available. // // at this point, user has 2 ways to recover missing metadata reference once it becomes available. // // one way is explicitly clicking that missing reference from solution explorer reference node. // the other is building the project. at that point, project system will refresh references // which will discover new dll and connect to us. once it is connected, we will take care of it. return VSConstants.S_OK; } protected void RemoveMetadataReference(string filePath) { // Is this a reference we converted to a project reference? ProjectReference projectReference; if (TryGetMetadataFileNameToConvertedProjectReference(filePath, out projectReference)) { // We converted this, so remove the project reference instead RemoveProjectReference(projectReference); Contract.ThrowIfFalse(RemoveMetadataFileNameToConvertedProjectReference(filePath)); } // Just a metadata reference, so remove all of those var referenceToRemove = TryGetCurrentMetadataReference(filePath); if (referenceToRemove != null) { RemoveMetadataReferenceCore(referenceToRemove, disposeReference: true); } } private void AddMetadataReferenceCore(VisualStudioMetadataReference reference) { lock (_gate) { _metadataReferences.Add(reference); } if (_pushingChangesToWorkspaceHosts) { var snapshot = reference.CurrentSnapshot; this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnMetadataReferenceAdded(this.Id, snapshot)); } reference.UpdatedOnDisk += OnImportChanged; } private void RemoveMetadataReferenceCore(VisualStudioMetadataReference reference, bool disposeReference) { lock (_gate) { _metadataReferences.Remove(reference); } if (_pushingChangesToWorkspaceHosts) { var snapshot = reference.CurrentSnapshot; this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnMetadataReferenceRemoved(this.Id, snapshot)); } reference.UpdatedOnDisk -= OnImportChanged; if (disposeReference) { reference.Dispose(); } } /// <summary> /// Called when a referenced metadata file changes on disk. /// </summary> private void OnImportChanged(object sender, EventArgs e) { AssertIsForeground(); VisualStudioMetadataReference reference = (VisualStudioMetadataReference)sender; CancellationTokenSource delayTaskCancellationTokenSource; if (ChangedReferencesPendingUpdate.TryGetValue(reference, out delayTaskCancellationTokenSource)) { delayTaskCancellationTokenSource.Cancel(); } delayTaskCancellationTokenSource = new CancellationTokenSource(); ChangedReferencesPendingUpdate[reference] = delayTaskCancellationTokenSource; var task = Task.Delay(TimeSpan.FromSeconds(5), delayTaskCancellationTokenSource.Token) .ContinueWith( OnImportChangedAfterDelay, reference, delayTaskCancellationTokenSource.Token, TaskContinuationOptions.None, TaskScheduler.FromCurrentSynchronizationContext()); } private void OnImportChangedAfterDelay(Task previous, object state) { AssertIsForeground(); var reference = (VisualStudioMetadataReference)state; ChangedReferencesPendingUpdate.Remove(reference); lock (_gate) { // Ensure that we are still referencing this binary if (_metadataReferences.Contains(reference)) { // remove the old metadata reference this.RemoveMetadataReferenceCore(reference, disposeReference: false); // Signal to update the underlying reference snapshot reference.UpdateSnapshot(); // add it back (it will now be based on the new file contents) this.AddMetadataReferenceCore(reference); } } } private void OnAnalyzerChanged(object sender, EventArgs e) { // Postpone handler's actions to prevent deadlock. This AnalyzeChanged event can // be invoked while the FileChangeService lock is held, and VisualStudioAnalyzer's // efforts to listen to file changes can lead to a deadlock situation. // Postponing the VisualStudioAnalyzer operations gives this thread the opportunity // to release the lock. Dispatcher.CurrentDispatcher.BeginInvoke(new Action(() => { VisualStudioAnalyzer analyzer = (VisualStudioAnalyzer)sender; RemoveAnalyzerReference(analyzer.FullPath); AddAnalyzerReference(analyzer.FullPath); })); } // Internal for unit testing internal void AddProjectReference(ProjectReference projectReference) { // dev11 is sometimes calling us multiple times for the same data if (!CanAddProjectReference(projectReference)) { return; } lock (_gate) { // always manipulate current state after workspace is told so it will correctly observe the initial state _projectReferences.Add(projectReference); } if (_pushingChangesToWorkspaceHosts) { // This project is already pushed to listening workspace hosts, but it's possible that our target // project hasn't been yet. Get the dependent project into the workspace as well. var targetProject = this.ProjectTracker.GetProject(projectReference.ProjectId); this.ProjectTracker.StartPushingToWorkspaceAndNotifyOfOpenDocuments(SpecializedCollections.SingletonEnumerable(targetProject)); this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnProjectReferenceAdded(this.Id, projectReference)); } } protected bool CanAddProjectReference(ProjectReference projectReference) { if (projectReference.ProjectId == this.Id) { // cannot self reference return false; } lock (_gate) { if (_projectReferences.Contains(projectReference)) { // already have this reference return false; } } var project = this.ProjectTracker.GetProject(projectReference.ProjectId); if (project != null) { // cannot add a reference to a project that references us (it would make a cycle) return !project.TransitivelyReferences(this.Id); } return true; } private bool TransitivelyReferences(ProjectId projectId) { return TransitivelyReferencesWorker(projectId, new HashSet<ProjectId>()); } private bool TransitivelyReferencesWorker(ProjectId projectId, HashSet<ProjectId> visited) { visited.Add(this.Id); foreach (var pr in GetCurrentProjectReferences()) { if (projectId == pr.ProjectId) { return true; } if (!visited.Contains(pr.ProjectId)) { var project = this.ProjectTracker.GetProject(pr.ProjectId); if (project != null) { if (project.TransitivelyReferencesWorker(projectId, visited)) { return true; } } } } return false; } protected void RemoveProjectReference(ProjectReference projectReference) { lock (_gate) { Contract.ThrowIfFalse(_projectReferences.Remove(projectReference)); } if (_pushingChangesToWorkspaceHosts) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnProjectReferenceRemoved(this.Id, projectReference)); } } private static void OnDocumentOpened(object sender, bool isCurrentContext) { IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender; AbstractProject project = (AbstractProject)document.Project; if (project._pushingChangesToWorkspaceHosts) { project.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentOpened(document.Id, document.GetOpenTextBuffer(), isCurrentContext)); } else { StartPushingToWorkspaceAndNotifyOfOpenDocuments(project); } } private static void OnDocumentClosing(object sender, bool updateActiveContext) { IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender; AbstractProject project = (AbstractProject)document.Project; var projectTracker = project.ProjectTracker; if (project._pushingChangesToWorkspaceHosts) { projectTracker.NotifyWorkspaceHosts(host => host.OnDocumentClosed(document.Id, document.GetOpenTextBuffer(), document.Loader, updateActiveContext)); } } private static void OnDocumentUpdatedOnDisk(object sender, EventArgs e) { IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender; AbstractProject project = (AbstractProject)document.Project; if (project._pushingChangesToWorkspaceHosts) { project.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentTextUpdatedOnDisk(document.Id)); } } private static void OnAdditionalDocumentOpened(object sender, bool isCurrentContext) { IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender; AbstractProject project = (AbstractProject)document.Project; if (project._pushingChangesToWorkspaceHosts) { project.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentOpened(document.Id, document.GetOpenTextBuffer(), isCurrentContext)); } else { StartPushingToWorkspaceAndNotifyOfOpenDocuments(project); } } private static void OnAdditionalDocumentClosing(object sender, bool notUsed) { IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender; AbstractProject project = (AbstractProject)document.Project; var projectTracker = project.ProjectTracker; if (project._pushingChangesToWorkspaceHosts) { projectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentClosed(document.Id, document.GetOpenTextBuffer(), document.Loader)); } } private static void OnAdditionalDocumentUpdatedOnDisk(object sender, EventArgs e) { IVisualStudioHostDocument document = (IVisualStudioHostDocument)sender; AbstractProject project = (AbstractProject)document.Project; if (project._pushingChangesToWorkspaceHosts) { project.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentTextUpdatedOnDisk(document.Id)); } } protected void AddFile( string filename, SourceCodeKind sourceCodeKind, Func<IVisualStudioHostDocument, bool> getIsCurrentContext, Func<uint, IReadOnlyList<string>> getFolderNames) { // We can currently be on a background thread. // So, hookup the handlers when creating the standard text document, as we might receive these handler notifications on the UI thread. var document = this.DocumentProvider.TryGetDocumentForFile( this, filePath: filename, sourceCodeKind: sourceCodeKind, getFolderNames: getFolderNames, canUseTextBuffer: CanUseTextBuffer, updatedOnDiskHandler: s_documentUpdatedOnDiskEventHandler, openedHandler: s_documentOpenedEventHandler, closingHandler: s_documentClosingEventHandler); if (document == null) { // It's possible this file is open in some very strange editor. In that case, we'll just ignore it. // This might happen if somebody decides to mark a non-source-file as something to compile. // TODO: Venus does this for .aspx/.cshtml files which is completely unnecessary for Roslyn. We should remove that code. AddUntrackedFile(filename); return; } AddDocument(document, getIsCurrentContext(document), hookupHandlers: false); } protected virtual bool CanUseTextBuffer(ITextBuffer textBuffer) { return true; } protected void AddUntrackedFile(string filename) { lock (_gate) { _untrackedDocuments.Add(filename); } } protected void RemoveFile(string filename) { lock (_gate) { // Remove this as an untracked file, if it is if (_untrackedDocuments.Remove(filename)) { return; } } IVisualStudioHostDocument document = this.GetCurrentDocumentFromPath(filename); if (document == null) { throw new InvalidOperationException("The document is not a part of the finalProject."); } RemoveDocument(document); } internal void AddDocument(IVisualStudioHostDocument document, bool isCurrentContext, bool hookupHandlers) { // We do not want to allow message pumping/reentrancy when processing project system changes. using (Dispatcher.CurrentDispatcher.DisableProcessing()) { lock (_gate) { _documents.Add(document.Id, document); _documentMonikers.Add(document.Key.Moniker, document); } if (_pushingChangesToWorkspaceHosts) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentAdded(document.GetInitialState())); if (document.IsOpen) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentOpened(document.Id, document.GetOpenTextBuffer(), isCurrentContext)); } } if (hookupHandlers) { document.Opened += s_documentOpenedEventHandler; document.Closing += s_documentClosingEventHandler; document.UpdatedOnDisk += s_documentUpdatedOnDiskEventHandler; } DocumentProvider.NotifyDocumentRegisteredToProjectAndStartToRaiseEvents(document); if (!_pushingChangesToWorkspaceHosts && document.IsOpen) { StartPushingToWorkspaceAndNotifyOfOpenDocuments(); } } } internal void RemoveDocument(IVisualStudioHostDocument document) { // We do not want to allow message pumping/reentrancy when processing project system changes. using (Dispatcher.CurrentDispatcher.DisableProcessing()) { lock (_gate) { _documents.Remove(document.Id); _documentMonikers.Remove(document.Key.Moniker); } UninitializeDocument(document); OnDocumentRemoved(document.Key.Moniker); } } internal void AddAdditionalDocument(IVisualStudioHostDocument document, bool isCurrentContext) { lock (_gate) { _additionalDocuments.Add(document.Id, document); _documentMonikers.Add(document.Key.Moniker, document); } if (_pushingChangesToWorkspaceHosts) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentAdded(document.GetInitialState())); if (document.IsOpen) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentOpened(document.Id, document.GetOpenTextBuffer(), isCurrentContext)); } } DocumentProvider.NotifyDocumentRegisteredToProjectAndStartToRaiseEvents(document); if (!_pushingChangesToWorkspaceHosts && document.IsOpen) { StartPushingToWorkspaceAndNotifyOfOpenDocuments(); } } internal void RemoveAdditionalDocument(IVisualStudioHostDocument document) { lock (_gate) { _additionalDocuments.Remove(document.Id); _documentMonikers.Remove(document.Key.Moniker); } UninitializeAdditionalDocument(document); } public virtual void Disconnect() { AssertIsForeground(); using (Workspace?.Services.GetService<IGlobalOperationNotificationService>()?.Start("Disconnect Project")) { lock (_gate) { // No sense in reloading any metadata references anymore. foreach (var cancellationTokenSource in ChangedReferencesPendingUpdate.Values) { cancellationTokenSource.Cancel(); } ChangedReferencesPendingUpdate.Clear(); var wasPushing = _pushingChangesToWorkspaceHosts; // disable pushing down to workspaces, so we don't get redundant workspace document removed events _pushingChangesToWorkspaceHosts = false; // The project is going away, so let's remove ourselves from the host. First, we // close and dispose of any remaining documents foreach (var document in _documents.Values) { UninitializeDocument(document); } foreach (var document in _additionalDocuments.Values) { UninitializeAdditionalDocument(document); } // Dispose metadata references. foreach (var reference in _metadataReferences) { reference.Dispose(); } foreach (var analyzer in _analyzers.Values) { analyzer.Dispose(); } // Make sure we clear out any external errors left when closing the project. ExternalErrorReporter?.ClearAllErrors(); // Make sure we clear out any host errors left when closing the project. HostDiagnosticUpdateSource?.ClearAllDiagnosticsForProject(this.Id); ClearAnalyzerRuleSet(); // reinstate pushing down to workspace, so the workspace project remove event fires _pushingChangesToWorkspaceHosts = wasPushing; this.ProjectTracker.RemoveProject(this); _pushingChangesToWorkspaceHosts = false; this.EditAndContinueImplOpt = null; } } } internal void TryProjectConversionForIntroducedOutputPath(string binPath, AbstractProject projectToReference) { if (this.CanConvertToProjectReferences) { // We should not already have references for this, since we're only introducing the path for the first time Contract.ThrowIfTrue(HasMetadataFileNameToConvertedProjectReference(binPath)); var metadataReference = TryGetCurrentMetadataReference(binPath); if (metadataReference != null) { var projectReference = new ProjectReference( projectToReference.Id, metadataReference.Properties.Aliases, metadataReference.Properties.EmbedInteropTypes); if (CanAddProjectReference(projectReference)) { RemoveMetadataReferenceCore(metadataReference, disposeReference: true); AddProjectReference(projectReference); AddMetadataFileNameToConvertedProjectReference(binPath, projectReference); } } } } internal void UndoProjectReferenceConversionForDisappearingOutputPath(string binPath) { ProjectReference projectReference; if (TryGetMetadataFileNameToConvertedProjectReference(binPath, out projectReference)) { // We converted this, so convert it back to a metadata reference RemoveProjectReference(projectReference); var metadataReferenceProperties = new MetadataReferenceProperties( MetadataImageKind.Assembly, projectReference.Aliases, projectReference.EmbedInteropTypes); AddMetadataReferenceCore(MetadataReferenceProvider.CreateMetadataReference(this, binPath, metadataReferenceProperties)); Contract.ThrowIfFalse(RemoveMetadataFileNameToConvertedProjectReference(binPath)); } } protected void UpdateMetadataReferenceAliases(string file, ImmutableArray<string> aliases) { file = FileUtilities.NormalizeAbsolutePath(file); // Have we converted these to project references? ProjectReference convertedProjectReference; if (TryGetMetadataFileNameToConvertedProjectReference(file, out convertedProjectReference)) { var project = ProjectTracker.GetProject(convertedProjectReference.ProjectId); UpdateProjectReferenceAliases(project, aliases); } else { var existingReference = TryGetCurrentMetadataReference(file); Contract.ThrowIfNull(existingReference); var newProperties = existingReference.Properties.WithAliases(aliases); RemoveMetadataReferenceCore(existingReference, disposeReference: true); AddMetadataReferenceCore(this.MetadataReferenceProvider.CreateMetadataReference(this, file, newProperties)); } } protected void UpdateProjectReferenceAliases(AbstractProject referencedProject, ImmutableArray<string> aliases) { var projectReference = GetCurrentProjectReferences().Single(r => r.ProjectId == referencedProject.Id); var newProjectReference = new ProjectReference(referencedProject.Id, aliases, projectReference.EmbedInteropTypes); // Is this a project with converted references? If so, make sure we track it string referenceBinPath = referencedProject.BinOutputPath; if (referenceBinPath != null && HasMetadataFileNameToConvertedProjectReference(referenceBinPath)) { UpdateMetadataFileNameToConvertedProjectReference(referenceBinPath, newProjectReference); } // Remove the existing reference first RemoveProjectReference(projectReference); AddProjectReference(newProjectReference); } private void UninitializeDocument(IVisualStudioHostDocument document) { if (_pushingChangesToWorkspaceHosts) { if (document.IsOpen) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentClosed(document.Id, document.GetOpenTextBuffer(), document.Loader, updateActiveContext: true)); } this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnDocumentRemoved(document.Id)); } document.Opened -= s_documentOpenedEventHandler; document.Closing -= s_documentClosingEventHandler; document.UpdatedOnDisk -= s_documentUpdatedOnDiskEventHandler; document.Dispose(); } private void UninitializeAdditionalDocument(IVisualStudioHostDocument document) { if (_pushingChangesToWorkspaceHosts) { if (document.IsOpen) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentClosed(document.Id, document.GetOpenTextBuffer(), document.Loader)); } this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAdditionalDocumentRemoved(document.Id)); } document.Opened -= s_additionalDocumentOpenedEventHandler; document.Closing -= s_additionalDocumentClosingEventHandler; document.UpdatedOnDisk -= s_additionalDocumentUpdatedOnDiskEventHandler; document.Dispose(); } protected virtual void OnDocumentRemoved(string filePath) { } internal void StartPushingToWorkspaceHosts() { _pushingChangesToWorkspaceHosts = true; } internal void StopPushingToWorkspaceHosts() { _pushingChangesToWorkspaceHosts = false; } internal void StartPushingToWorkspaceAndNotifyOfOpenDocuments() { StartPushingToWorkspaceAndNotifyOfOpenDocuments(this); } internal bool PushingChangesToWorkspaceHosts { get { return _pushingChangesToWorkspaceHosts; } } protected void UpdateRuleSetError(IRuleSetFile ruleSetFile) { if (this.HostDiagnosticUpdateSource == null) { return; } if (ruleSetFile == null || ruleSetFile.GetException() == null) { this.HostDiagnosticUpdateSource.ClearDiagnosticsForProject(this.Id, RuleSetErrorId); } else { var messageArguments = new string[] { ruleSetFile.FilePath, ruleSetFile.GetException().Message }; DiagnosticData diagnostic; if (DiagnosticData.TryCreate(_errorReadingRulesetRule, messageArguments, this.Id, this.Workspace, out diagnostic)) { this.HostDiagnosticUpdateSource.UpdateDiagnosticsForProject(this.Id, RuleSetErrorId, SpecializedCollections.SingletonEnumerable(diagnostic)); } } } protected void SetObjOutputPathAndRelatedData(string objOutputPath) { var currentObjOutputPath = this.ObjOutputPath; if (PathUtilities.IsAbsolute(objOutputPath) && !string.Equals(currentObjOutputPath, objOutputPath, StringComparison.OrdinalIgnoreCase)) { // set obj output path this.ObjOutputPath = objOutputPath; // Workspace/services can be null for tests. if (this.MetadataService != null) { var newCompilationOptions = CurrentCompilationOptions.WithMetadataReferenceResolver(CreateMetadataReferenceResolver( metadataService: this.MetadataService, projectDirectory: this.ContainingDirectoryPathOpt, outputDirectory: Path.GetDirectoryName(objOutputPath))); SetOptionsCore(newCompilationOptions); } if (_pushingChangesToWorkspaceHosts) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnOptionsChanged(this.Id, CurrentCompilationOptions, CurrentParseOptions)); this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnOutputFilePathChanged(this.Id, objOutputPath)); } UpdateAssemblyName(); } } private void UpdateAssemblyName() { // set assembly name if changed // we use designTimeOutputPath to get assembly name since it is more reliable way to get the assembly name. // otherwise, friend assembly all get messed up. var newAssemblyName = GetAssemblyNameFromPath(this.ObjOutputPath ?? this.ProjectSystemName); if (!string.Equals(AssemblyName, newAssemblyName, StringComparison.Ordinal)) { AssemblyName = newAssemblyName; if (_pushingChangesToWorkspaceHosts) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnAssemblyNameChanged(this.Id, newAssemblyName)); } } } protected void SetBinOutputPathAndRelatedData(string binOutputPath) { // refresh final output path var currentBinOutputPath = this.BinOutputPath; if (binOutputPath != null && !string.Equals(currentBinOutputPath, binOutputPath, StringComparison.OrdinalIgnoreCase)) { this.BinOutputPath = binOutputPath; // If the project has been hooked up with the project tracker, then update the bin path with the tracker. if (this.ProjectTracker.GetProject(Id) != null) { this.ProjectTracker.UpdateProjectBinPath(this, currentBinOutputPath, binOutputPath); } } } protected void UpdateProjectDisplayName(string newDisplayName) { UpdateProjectDisplayNameAndFilePath(newDisplayName, newFilePath: null); } protected void UpdateProjectFilePath(string newFilePath) { UpdateProjectDisplayNameAndFilePath(newDisplayName: null, newFilePath: newFilePath); } protected void UpdateProjectDisplayNameAndFilePath(string newDisplayName, string newFilePath) { bool updateMade = false; if (newDisplayName != null && this.DisplayName != newDisplayName) { this.DisplayName = newDisplayName; updateMade = true; } if (newFilePath != null && File.Exists(newFilePath) && this.ProjectFilePath != newFilePath) { Debug.Assert(PathUtilities.IsAbsolute(newFilePath)); this.ProjectFilePath = newFilePath; updateMade = true; } if (updateMade && _pushingChangesToWorkspaceHosts) { this.ProjectTracker.NotifyWorkspaceHosts(host => host.OnProjectNameChanged(Id, this.DisplayName, this.ProjectFilePath)); } } private static void StartPushingToWorkspaceAndNotifyOfOpenDocuments(AbstractProject project) { // If a document is opened in a project but we haven't started pushing yet, we want to stop doing lazy // loading for this project and get it up to date so the user gets a fast experience there. If the file // was presented as open to us right away, then we'll never do this in OnDocumentOpened, so we should do // it here. It's important to do this after everything else happens in this method, so we don't get // strange ordering issues. It's still possible that this won't actually push changes if the workspace // host isn't ready to receive events yet. project.ProjectTracker.StartPushingToWorkspaceAndNotifyOfOpenDocuments(SpecializedCollections.SingletonEnumerable(project)); } private static MetadataReferenceResolver CreateMetadataReferenceResolver(IMetadataService metadataService, string projectDirectory, string outputDirectory) { ImmutableArray<string> assemblySearchPaths; if (projectDirectory != null && outputDirectory != null) { assemblySearchPaths = ImmutableArray.Create(projectDirectory, outputDirectory); } else if (projectDirectory != null) { assemblySearchPaths = ImmutableArray.Create(projectDirectory); } else if (outputDirectory != null) { assemblySearchPaths = ImmutableArray.Create(outputDirectory); } else { assemblySearchPaths = ImmutableArray<string>.Empty; } return new WorkspaceMetadataFileReferenceResolver(metadataService, new RelativePathResolver(assemblySearchPaths, baseDirectory: projectDirectory)); } #if DEBUG public virtual bool Debug_VBEmbeddedCoreOptionOn { get { return false; } } #endif /// <summary> /// Used for unit testing: don't crash the process if something bad happens. /// </summary> internal static bool CrashOnException = true; protected static bool FilterException(Exception e) { if (CrashOnException) { FatalError.Report(e); } // Nothing fancy, so don't catch return false; } #region FolderNames private readonly List<string> _tmpFolders = new List<string>(); private readonly Dictionary<uint, IReadOnlyList<string>> _folderNameMap = new Dictionary<uint, IReadOnlyList<string>>(); public IReadOnlyList<string> GetFolderNamesFromHierarchy(uint documentItemID) { object parentObj; if (documentItemID != (uint)VSConstants.VSITEMID.Nil && Hierarchy.GetProperty(documentItemID, (int)VsHierarchyPropID.Parent, out parentObj) == VSConstants.S_OK) { var parentID = UnboxVSItemId(parentObj); if (parentID != (uint)VSConstants.VSITEMID.Nil && parentID != (uint)VSConstants.VSITEMID.Root) { return GetFolderNamesForFolder(parentID); } } return SpecializedCollections.EmptyReadOnlyList<string>(); } private IReadOnlyList<string> GetFolderNamesForFolder(uint folderItemID) { // note: use of tmpFolders is assuming this API is called on UI thread only. _tmpFolders.Clear(); IReadOnlyList<string> names; if (!_folderNameMap.TryGetValue(folderItemID, out names)) { ComputeFolderNames(folderItemID, _tmpFolders, Hierarchy); names = _tmpFolders.ToImmutableArray(); _folderNameMap.Add(folderItemID, names); } else { // verify names, and change map if we get a different set. // this is necessary because we only get document adds/removes from the project system // when a document name or folder name changes. ComputeFolderNames(folderItemID, _tmpFolders, Hierarchy); if (!Enumerable.SequenceEqual(names, _tmpFolders)) { names = _tmpFolders.ToImmutableArray(); _folderNameMap[folderItemID] = names; } } return names; } // Different hierarchies are inconsistent on whether they return ints or uints for VSItemIds. // Technically it should be a uint. However, there's no enforcement of this, and marshalling // from native to managed can end up resulting in boxed ints instead. Handle both here so // we're resilient to however the IVsHierarchy was actually implemented. private static uint UnboxVSItemId(object id) { return id is uint ? (uint)id : unchecked((uint)(int)id); } private static void ComputeFolderNames(uint folderItemID, List<string> names, IVsHierarchy hierarchy) { object nameObj; if (hierarchy.GetProperty((uint)folderItemID, (int)VsHierarchyPropID.Name, out nameObj) == VSConstants.S_OK) { // For 'Shared' projects, IVSHierarchy returns a hierarchy item with < character in its name (i.e. <SharedProjectName>) // as a child of the root item. There is no such item in the 'visual' hierarchy in solution explorer and no such folder // is present on disk either. Since this is not a real 'folder', we exclude it from the contents of Document.Folders. // Note: The parent of the hierarchy item that contains < character in its name is VSITEMID.Root. So we don't need to // worry about accidental propagation out of the Shared project to any containing 'Solution' folders - the check for // VSITEMID.Root below already takes care of that. var name = (string)nameObj; if (!name.StartsWith("<", StringComparison.OrdinalIgnoreCase)) { names.Insert(0, name); } } object parentObj; if (hierarchy.GetProperty((uint)folderItemID, (int)VsHierarchyPropID.Parent, out parentObj) == VSConstants.S_OK) { var parentID = UnboxVSItemId(parentObj); if (parentID != (uint)VSConstants.VSITEMID.Nil && parentID != (uint)VSConstants.VSITEMID.Root) { ComputeFolderNames(parentID, names, hierarchy); } } } #endregion } }
// Modified from xenko fast list // Copyright (c) Xenko contributors (https://xenko.com) and Silicon Studio Corp. (https://www.siliconstudio.co.jp) // Distributed under the MIT license. See the LICENSE.md file in the project root for more information. using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics; using System.Runtime.InteropServices; #if !NETFX_CORE namespace HelixToolkit.Wpf.SharpDX #else #if CORE namespace HelixToolkit.SharpDX.Core #else namespace HelixToolkit.UWP #endif #endif { /// <summary> /// Similar to <see cref="List{T}"/>, with direct access to underlying array. /// </summary> /// <typeparam name="T">The type of elements in the list.</typeparam> [DebuggerDisplay("Count = {" + nameof(Count) + "}")] public class FastList<T> : IList<T>, IReadOnlyList<T>, ICollection<T>, IEnumerable<T>, IEnumerable { // Fields private const int _defaultCapacity = 4; /// <summary> /// Gets the items from internal array. Make sure to access this array using <see cref="Count"/> instead of Array Length /// </summary> internal T[] Items { get; private set; } private static readonly T[] empty = new T[0]; private int _size; public FastList() { Items = empty; } public FastList(IEnumerable<T> collection) { if (collection is ICollection<T> is2) { var count = is2.Count; Items = new T[count]; is2.CopyTo(Items, 0); _size = count; } else { _size = 0; Items = new T[_defaultCapacity]; using (var enumerator = collection.GetEnumerator()) { while (enumerator.MoveNext()) { Add(enumerator.Current); } } } } public FastList(int capacity) { Items = new T[capacity]; } public int Capacity { get { return Items.Length; } set { if (value != Items.Length) { if (value > 0) { var destinationArray = new T[value]; if (_size > 0) { Array.Copy(Items, 0, destinationArray, 0, _size); } Items = destinationArray; } else { Items = empty; } } } } #region IList<T> Members public void Add(T item) { if (_size == Items.Length) { EnsureCapacity(_size + 1); } Items[_size++] = item; } public void IncreaseCapacity(int index) { EnsureCapacity(_size + index); _size += index; } public void Clear() { Clear(false); } public bool Contains(T item) { if (item == null) { for (var j = 0; j < _size; j++) { if (Items[j] == null) { return true; } } return false; } var comparer = EqualityComparer<T>.Default; for (var i = 0; i < _size; i++) { if (comparer.Equals(Items[i], item)) { return true; } } return false; } public void CopyTo(T[] array, int arrayIndex) { Array.Copy(Items, 0, array, arrayIndex, _size); } public int IndexOf(T item) { return Array.IndexOf(Items, item, 0, _size); } public void Insert(int index, T item) { if (_size == Items.Length) { EnsureCapacity(_size + 1); } if (index < _size) { Array.Copy(Items, index, Items, index + 1, _size - index); } Items[index] = item; _size++; } public bool Remove(T item) { var index = IndexOf(item); if (index >= 0) { RemoveAt(index); return true; } return false; } public void RemoveAt(int index) { if (index < 0 || index >= _size) throw new ArgumentOutOfRangeException(nameof(index)); _size--; if (index < _size) { Array.Copy(Items, index + 1, Items, index, _size - index); } Items[_size] = default(T); } IEnumerator<T> IEnumerable<T>.GetEnumerator() { return new Enumerator(this); } IEnumerator IEnumerable.GetEnumerator() { return new Enumerator(this); } public int Count => _size; public T this[int index] { get { return Items[index]; } set { Items[index] = value; } } bool ICollection<T>.IsReadOnly => false; #endregion /// <summary> /// Clears this list with a fast-clear option. /// </summary> /// <param name="fastClear">if set to <c>true</c> this method only resets the count elements but doesn't clear items referenced already stored in the list.</param> public void Clear(bool fastClear) { Resize(0, fastClear); } public void Resize(int newSize, bool fastClear) { if (_size < newSize) { EnsureCapacity(newSize); } else if (!fastClear && _size - newSize > 0) { Array.Clear(Items, newSize, _size - newSize); } _size = newSize; } public void AddRange(IEnumerable<T> collection) { InsertRange(_size, collection); } public ReadOnlyCollection<T> AsReadOnly() { return new ReadOnlyCollection<T>(this); } public int BinarySearch(T item) { return BinarySearch(0, Count, item, null); } public int BinarySearch(T item, IComparer<T> comparer) { return BinarySearch(0, Count, item, comparer); } public int BinarySearch(int index, int count, T item, IComparer<T> comparer) { return Array.BinarySearch(Items, index, count, item, comparer); } public void CopyTo(T[] array) { CopyTo(array, 0); } public void CopyTo(int index, T[] array, int arrayIndex, int count) { Array.Copy(Items, index, array, arrayIndex, count); } public void EnsureCapacity(int min) { if (Items.Length < min) { var num = (Items.Length == 0) ? _defaultCapacity : (Items.Length * 2); if (num < min) { num = min; } Capacity = num; } } public bool Exists(Predicate<T> match) { return (FindIndex(match) != -1); } public T Find(Predicate<T> match) { for (var i = 0; i < _size; i++) { if (match(Items[i])) { return Items[i]; } } return default(T); } public FastList<T> FindAll(Predicate<T> match) { var list = new FastList<T>(); for (var i = 0; i < _size; i++) { if (match(Items[i])) { list.Add(Items[i]); } } return list; } public int FindIndex(Predicate<T> match) { return FindIndex(0, _size, match); } public int FindIndex(int startIndex, Predicate<T> match) { return FindIndex(startIndex, _size - startIndex, match); } public int FindIndex(int startIndex, int count, Predicate<T> match) { var num = startIndex + count; for (var i = startIndex; i < num; i++) { if (match(Items[i])) { return i; } } return -1; } public T FindLast(Predicate<T> match) { for (var i = _size - 1; i >= 0; i--) { if (match(Items[i])) { return Items[i]; } } return default(T); } public int FindLastIndex(Predicate<T> match) { return FindLastIndex(_size - 1, _size, match); } public int FindLastIndex(int startIndex, Predicate<T> match) { return FindLastIndex(startIndex, startIndex + 1, match); } public int FindLastIndex(int startIndex, int count, Predicate<T> match) { var num = startIndex - count; for (var i = startIndex; i > num; i--) { if (match(Items[i])) { return i; } } return -1; } public void ForEach(Action<T> action) { for (var i = 0; i < _size; i++) { action(Items[i]); } } public Enumerator GetEnumerator() { return new Enumerator(this); } public FastList<T> GetRange(int index, int count) { var list = new FastList<T>(count); Array.Copy(Items, index, list.Items, 0, count); list._size = count; return list; } public int IndexOf(T item, int index) { return Array.IndexOf(Items, item, index, _size - index); } public int IndexOf(T item, int index, int count) { return Array.IndexOf(Items, item, index, count); } public void InsertRange(int index, IEnumerable<T> collection) { if (collection is ICollection<T> is2) { var count = is2.Count; if (count > 0) { EnsureCapacity(_size + count); if (index < _size) { Array.Copy(Items, index, Items, index + count, _size - index); } if (this == is2) { Array.Copy(Items, 0, Items, index, index); Array.Copy(Items, (index + count), Items, (index * 2), (_size - index)); } else { is2.CopyTo(Items, index); } _size += count; } } else { using (var enumerator = collection.GetEnumerator()) { while (enumerator.MoveNext()) { Insert(index++, enumerator.Current); } } } } private static bool IsCompatibleObject(object value) { return value is T || value == null && default(T) == null; } public int LastIndexOf(T item) { if (_size == 0) { return -1; } return LastIndexOf(item, _size - 1, _size); } public int LastIndexOf(T item, int index) { return LastIndexOf(item, index, index + 1); } public int LastIndexOf(T item, int index, int count) { if (_size == 0) { return -1; } return Array.LastIndexOf(Items, item, index, count); } public int RemoveAll(Predicate<T> match) { var index = 0; while ((index < _size) && !match(Items[index])) { index++; } if (index >= _size) { return 0; } var num2 = index + 1; while (num2 < _size) { while ((num2 < _size) && match(Items[num2])) { num2++; } if (num2 < _size) { Items[index++] = Items[num2++]; } } Array.Clear(Items, index, _size - index); var num3 = _size - index; _size = index; return num3; } public void RemoveRange(int index, int count) { if (count > 0) { _size -= count; if (index < _size) { Array.Copy(Items, index + count, Items, index, _size - index); } Array.Clear(Items, _size, count); } } public void Reverse() { Reverse(0, Count); } public void Reverse(int index, int count) { Array.Reverse(Items, index, count); } public void Sort() { Array.Sort(Items, 0, Count); } public void Sort(IComparer<T> comparer) { Sort(0, Count, comparer); } //public void Sort(Comparison<T> comparison) //{ // if (this._size > 0) // { // IComparer<T> comparer = new Array.FunctorComparer<T>(comparison); // Array.Sort<T>(this.Items, 0, this._size, comparer); // } //} public void Sort(int index, int count, IComparer<T> comparer) { Array.Sort(Items, index, count, comparer); } public T[] ToArray() { var destinationArray = new T[_size]; Array.Copy(Items, 0, destinationArray, 0, _size); return destinationArray; } public void TrimExcess() { if (Count == Capacity) { return; } var curr = Items; Items = Count == 0 ? empty : new T[Count]; if (Count > 0) { Array.Copy(curr, 0, Items, 0, Count); } Capacity = Count; } public bool TrueForAll(Predicate<T> match) { for (var i = 0; i < _size; i++) { if (!match(Items[i])) { return false; } } return true; } // Properties // Nested Types #region Nested type: Enumerator [StructLayout(LayoutKind.Sequential)] public struct Enumerator : IEnumerator<T>, IDisposable, IEnumerator { private readonly FastList<T> list; private int index; private T current; internal Enumerator(FastList<T> list) { this.list = list; index = 0; current = default(T); } public void Dispose() { } public bool MoveNext() { var list = this.list; if (index < list._size) { current = list.Items[index]; index++; return true; } return MoveNextRare(); } private bool MoveNextRare() { index = list._size + 1; current = default(T); return false; } public T Current => current; object IEnumerator.Current => Current; void IEnumerator.Reset() { index = 0; current = default(T); } } #endregion /// <summary> /// Fast add all from another <see cref="FastList{T}"/>. /// </summary> /// <param name="list">The list.</param> public void AddAll(FastList<T> list) { EnsureCapacity(_size + list.Count); Array.Copy(list.Items, 0, Items, Count, list.Count); _size += list.Count; } /// <summary> /// Gets the internal array used to hold data. /// </summary> /// <returns></returns> public T[] GetInternalArray() { return Items; } } }
// // Copyright (c) 2004-2011 Jaroslaw Kowalski <jaak@jkowalski.net> // // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // * Neither the name of Jaroslaw Kowalski nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF // THE POSSIBILITY OF SUCH DAMAGE. // namespace NLog.Targets.Wrappers { using System; using System.ComponentModel; using System.Threading; using Common; using Internal; using System.Collections.Generic; /// <summary> /// Provides asynchronous, buffered execution of target writes. /// </summary> /// <seealso href="http://nlog-project.org/wiki/AsyncWrapper_target">Documentation on NLog Wiki</seealso> /// <remarks> /// <p> /// Asynchronous target wrapper allows the logger code to execute more quickly, by queueing /// messages and processing them in a separate thread. You should wrap targets /// that spend a non-trivial amount of time in their Write() method with asynchronous /// target to speed up logging. /// </p> /// <p> /// Because asynchronous logging is quite a common scenario, NLog supports a /// shorthand notation for wrapping all targets with AsyncWrapper. Just add async="true" to /// the &lt;targets/&gt; element in the configuration file. /// </p> /// <code lang="XML"> /// <![CDATA[ /// <targets async="true"> /// ... your targets go here ... /// </targets> /// ]]></code> /// </remarks> /// <example> /// <p> /// To set up the target in the <a href="config.html">configuration file</a>, /// use the following syntax: /// </p> /// <code lang="XML" source="examples/targets/Configuration File/AsyncWrapper/NLog.config" /> /// <p> /// The above examples assume just one target and a single rule. See below for /// a programmatic configuration that's equivalent to the above config file: /// </p> /// <code lang="C#" source="examples/targets/Configuration API/AsyncWrapper/Wrapping File/Example.cs" /> /// </example> [Target("AsyncWrapper", IsWrapper = true)] public class AsyncTargetWrapper : WrapperTargetBase { private readonly object lockObject = new object(); private Timer lazyWriterTimer; private readonly Queue<AsyncContinuation> flushAllContinuations = new Queue<AsyncContinuation>(); private readonly object continuationQueueLock = new object(); /// <summary> /// Initializes a new instance of the <see cref="AsyncTargetWrapper" /> class. /// </summary> public AsyncTargetWrapper() : this(null) { } /// <summary> /// Initializes a new instance of the <see cref="AsyncTargetWrapper" /> class. /// </summary> /// <param name="wrappedTarget">The wrapped target.</param> public AsyncTargetWrapper(Target wrappedTarget) : this(wrappedTarget, 10000, AsyncTargetWrapperOverflowAction.Discard) { } /// <summary> /// Initializes a new instance of the <see cref="AsyncTargetWrapper" /> class. /// </summary> /// <param name="wrappedTarget">The wrapped target.</param> /// <param name="queueLimit">Maximum number of requests in the queue.</param> /// <param name="overflowAction">The action to be taken when the queue overflows.</param> public AsyncTargetWrapper(Target wrappedTarget, int queueLimit, AsyncTargetWrapperOverflowAction overflowAction) { this.RequestQueue = new AsyncRequestQueue(10000, AsyncTargetWrapperOverflowAction.Discard); this.TimeToSleepBetweenBatches = 50; this.BatchSize = 100; this.WrappedTarget = wrappedTarget; this.QueueLimit = queueLimit; this.OverflowAction = overflowAction; } /// <summary> /// Gets or sets the number of log events that should be processed in a batch /// by the lazy writer thread. /// </summary> /// <docgen category='Buffering Options' order='100' /> [DefaultValue(100)] public int BatchSize { get; set; } /// <summary> /// Gets or sets the time in milliseconds to sleep between batches. /// </summary> /// <docgen category='Buffering Options' order='100' /> [DefaultValue(50)] public int TimeToSleepBetweenBatches { get; set; } /// <summary> /// Gets or sets the action to be taken when the lazy writer thread request queue count /// exceeds the set limit. /// </summary> /// <docgen category='Buffering Options' order='100' /> [DefaultValue("Discard")] public AsyncTargetWrapperOverflowAction OverflowAction { get { return this.RequestQueue.OnOverflow; } set { this.RequestQueue.OnOverflow = value; } } /// <summary> /// Gets or sets the limit on the number of requests in the lazy writer thread request queue. /// </summary> /// <docgen category='Buffering Options' order='100' /> [DefaultValue(10000)] public int QueueLimit { get { return this.RequestQueue.RequestLimit; } set { this.RequestQueue.RequestLimit = value; } } /// <summary> /// Gets the queue of lazy writer thread requests. /// </summary> internal AsyncRequestQueue RequestQueue { get; private set; } /// <summary> /// Waits for the lazy writer thread to finish writing messages. /// </summary> /// <param name="asyncContinuation">The asynchronous continuation.</param> protected override void FlushAsync(AsyncContinuation asyncContinuation) { lock (continuationQueueLock) { this.flushAllContinuations.Enqueue(asyncContinuation); } } /// <summary> /// Initializes the target by starting the lazy writer timer. /// </summary> protected override void InitializeTarget() { base.InitializeTarget(); this.RequestQueue.Clear(); this.lazyWriterTimer = new Timer(this.ProcessPendingEvents, null, Timeout.Infinite, Timeout.Infinite); this.StartLazyWriterTimer(); } /// <summary> /// Shuts down the lazy writer timer. /// </summary> protected override void CloseTarget() { this.StopLazyWriterThread(); if (this.RequestQueue.RequestCount > 0) { ProcessPendingEvents(null); } base.CloseTarget(); } /// <summary> /// Starts the lazy writer thread which periodically writes /// queued log messages. /// </summary> protected virtual void StartLazyWriterTimer() { lock (this.lockObject) { if (this.lazyWriterTimer != null) { this.lazyWriterTimer.Change(this.TimeToSleepBetweenBatches, Timeout.Infinite); } } } /// <summary> /// Starts the lazy writer thread. /// </summary> protected virtual void StopLazyWriterThread() { lock (this.lockObject) { if (this.lazyWriterTimer != null) { this.lazyWriterTimer.Change(Timeout.Infinite, Timeout.Infinite); this.lazyWriterTimer = null; } } } /// <summary> /// Adds the log event to asynchronous queue to be processed by /// the lazy writer thread. /// </summary> /// <param name="logEvent">The log event.</param> /// <remarks> /// The <see cref="Target.PrecalculateVolatileLayouts"/> is called /// to ensure that the log event can be processed in another thread. /// </remarks> protected override void Write(AsyncLogEventInfo logEvent) { this.MergeEventProperties(logEvent.LogEvent); this.PrecalculateVolatileLayouts(logEvent.LogEvent); this.RequestQueue.Enqueue(logEvent); } private void ProcessPendingEvents(object state) { AsyncContinuation[] continuations; lock (this.continuationQueueLock) { continuations = this.flushAllContinuations.Count > 0 ? this.flushAllContinuations.ToArray() : new AsyncContinuation[] { null }; this.flushAllContinuations.Clear(); } try { foreach (var continuation in continuations) { int count = this.BatchSize; if (continuation != null) { count = this.RequestQueue.RequestCount; InternalLogger.Trace("Flushing {0} events.", count); } if (this.RequestQueue.RequestCount == 0) { if (continuation != null) { continuation(null); } } AsyncLogEventInfo[] logEventInfos = this.RequestQueue.DequeueBatch(count); if (continuation != null) { // write all events, then flush, then call the continuation this.WrappedTarget.WriteAsyncLogEvents(logEventInfos, ex => this.WrappedTarget.Flush(continuation)); } else { // just write all events this.WrappedTarget.WriteAsyncLogEvents(logEventInfos); } } } catch (Exception exception) { if (exception.MustBeRethrown()) { throw; } InternalLogger.Error("Error in lazy writer timer procedure: {0}", exception); } finally { this.StartLazyWriterTimer(); } } } }
using System; using System.IO; using UnityEngine; using Pathfinding; using Pathfinding.Util; using Pathfinding.Serialization.JsonFx; #if NETFX_CORE && !UNITY_EDITOR #if !ASTAR_NO_ZIP using Pathfinding.Ionic.Zip; #else using Pathfinding.Serialization.Zip; #endif #else using CompatFileStream = System.IO.FileStream; #if !ASTAR_NO_ZIP using Pathfinding.Ionic.Zip; #else using Pathfinding.Serialization.Zip; #endif #endif namespace Pathfinding.Serialization { /** Holds information passed to custom graph serializers */ public class GraphSerializationContext { private readonly GraphNode[] id2NodeMapping; /** Deserialization stream. * Will only be set when deserializing */ public readonly BinaryReader reader; /** Serialization stream. * Will only be set when serializing */ public readonly BinaryWriter writer; /** Index of the graph which is currently being processed */ public readonly int graphIndex; public GraphSerializationContext (BinaryReader reader, GraphNode[] id2NodeMapping, int graphIndex) { this.reader = reader; this.id2NodeMapping = id2NodeMapping; this.graphIndex = graphIndex; } public GraphSerializationContext (BinaryWriter writer) { this.writer = writer; } public int GetNodeIdentifier (GraphNode node) { return node == null ? -1 : node.NodeIndex; } public GraphNode GetNodeFromIdentifier (int id) { if (id2NodeMapping == null) throw new Exception ("Calling GetNodeFromIdentifier when serializing"); if (id == -1) return null; GraphNode node = id2NodeMapping[id]; if (node == null) throw new Exception ("Invalid id"); return node; } #if ASTAR_NO_JSON /** Write a Vector3 */ public void SerializeVector3 ( Vector3 v ) { writer.Write (v.x); writer.Write (v.y); writer.Write (v.z); } /** Read a Vector3 */ public Vector3 DeserializeVector3 () { return new Vector3 (reader.ReadSingle (), reader.ReadSingle (), reader.ReadSingle ()); } public int DeserializeInt (int defaultValue) { if (reader.BaseStream.Position <= reader.BaseStream.Length-4) { return reader.ReadInt32 (); } else { return defaultValue; } } public float DeserializeFloat (float defaultValue) { if (reader.BaseStream.Position <= reader.BaseStream.Length-4) { return reader.ReadSingle (); } else { return defaultValue; } } /** Write a UnityEngine.Object */ public void SerializeUnityObject ( UnityEngine.Object ob ) { if ( ob == null ) { writer.Write (int.MaxValue); return; } int inst = ob.GetInstanceID(); string name = ob.name; string type = ob.GetType().AssemblyQualifiedName; string guid = ""; //Write scene path if the object is a Component or GameObject Component component = ob as Component; GameObject go = ob as GameObject; if (component != null || go != null) { if (component != null && go == null) { go = component.gameObject; } UnityReferenceHelper helper = go.GetComponent<UnityReferenceHelper>(); if (helper == null) { Debug.Log ("Adding UnityReferenceHelper to Unity Reference '"+ob.name+"'"); helper = go.AddComponent<UnityReferenceHelper>(); } //Make sure it has a unique GUID helper.Reset (); guid = helper.GetGUID (); } writer.Write(inst); writer.Write(name); writer.Write(type); writer.Write(guid); } /** Read a UnityEngine.Object */ public UnityEngine.Object DeserializeUnityObject ( ) { int inst = reader.ReadInt32(); if ( inst == int.MaxValue ) { return null; } string name = reader.ReadString(); string typename = reader.ReadString(); string guid = reader.ReadString(); System.Type type = System.Type.GetType (typename); if (type == null) { Debug.LogError ("Could not find type '"+typename+"'. Cannot deserialize Unity reference"); return null; } if (!string.IsNullOrEmpty(guid)) { UnityReferenceHelper[] helpers = UnityEngine.Object.FindObjectsOfType(typeof(UnityReferenceHelper)) as UnityReferenceHelper[]; for (int i=0;i<helpers.Length;i++) { if (helpers[i].GetGUID () == guid) { if (type == typeof(GameObject)) { return helpers[i].gameObject; } else { return helpers[i].GetComponent (type); } } } } //Try to load from resources UnityEngine.Object[] objs = Resources.LoadAll (name,type); for (int i=0;i<objs.Length;i++) { if (objs[i].name == name || objs.Length == 1) { return objs[i]; } } return null; } #endif } /** Handles low level serialization and deserialization of graph settings and data */ public class AstarSerializer { private AstarData data; #if !ASTAR_NO_JSON public JsonWriterSettings writerSettings; public JsonReaderSettings readerSettings; #endif private ZipFile zip; private MemoryStream str; private GraphMeta meta; private SerializeSettings settings; private NavGraph[] graphs; private int graphIndexOffset; const string binaryExt = ".binary"; #if !ASTAR_NO_JSON const string jsonExt = ".json"; #else const string jsonExt = binaryExt; #endif private uint checksum = 0xffffffff; #if !ASTAR_NO_JSON System.Text.UTF8Encoding encoding=new System.Text.UTF8Encoding(); #endif static System.Text.StringBuilder _stringBuilder = new System.Text.StringBuilder(); /** Returns a cached StringBuilder. * This function only has one string builder cached and should * thus only be called from a single thread and should not be called while using an earlier got string builder. */ static System.Text.StringBuilder GetStringBuilder () { _stringBuilder.Length = 0; return _stringBuilder; } public AstarSerializer (AstarData data) { this.data = data; settings = SerializeSettings.Settings; } public AstarSerializer (AstarData data, SerializeSettings settings) { this.data = data; this.settings = settings; } public void SetGraphIndexOffset (int offset) { graphIndexOffset = offset; } void AddChecksum (byte[] bytes) { checksum = Checksum.GetChecksum (bytes,checksum); } public uint GetChecksum () { return checksum; } #region Serialize public void OpenSerialize () { // Create a new zip file, here we will store all the data zip = new ZipFile(); zip.AlternateEncoding = System.Text.Encoding.UTF8; zip.AlternateEncodingUsage = ZipOption.Always; #if !ASTAR_NO_JSON // Add some converters so that we can serialize some Unity types writerSettings = new JsonWriterSettings(); writerSettings.AddTypeConverter (new VectorConverter()); writerSettings.AddTypeConverter (new BoundsConverter()); writerSettings.AddTypeConverter (new LayerMaskConverter()); writerSettings.AddTypeConverter (new MatrixConverter()); writerSettings.AddTypeConverter (new GuidConverter()); writerSettings.AddTypeConverter (new UnityObjectConverter()); writerSettings.PrettyPrint = settings.prettyPrint; #endif meta = new GraphMeta(); } public byte[] CloseSerialize () { // As the last step, serialize metadata byte[] bytes = SerializeMeta (); AddChecksum (bytes); zip.AddEntry("meta"+jsonExt,bytes); #if !ASTAR_NO_ZIP // Set dummy dates on every file to prevent the binary data to change // for identical settings and graphs. // Prevents the scene from being marked as dirty in the editor // If ASTAR_NO_ZIP is defined this is not relevant since the replacement zip // implementation does not even store dates var dummy = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); foreach (var entry in zip.Entries) { entry.AccessedTime = dummy; entry.CreationTime = dummy; entry.LastModified = dummy; entry.ModifiedTime = dummy; } #endif // Save all entries to a single byte array var output = new MemoryStream(); zip.Save(output); bytes = output.ToArray(); output.Dispose(); zip.Dispose(); zip = null; return bytes; } public void SerializeGraphs (NavGraph[] _graphs) { if (graphs != null) throw new InvalidOperationException ("Cannot serialize graphs multiple times."); graphs = _graphs; if (zip == null) throw new NullReferenceException ("You must not call CloseSerialize before a call to this function"); if (graphs == null) graphs = new NavGraph[0]; for (int i=0;i<graphs.Length;i++) { //Ignore graph if null if (graphs[i] == null) continue; // Serialize the graph to a byte array byte[] bytes = Serialize(graphs[i]); AddChecksum (bytes); zip.AddEntry ("graph"+i+jsonExt,bytes); } } /** Serialize metadata about all graphs */ byte[] SerializeMeta () { meta.version = AstarPath.Version; meta.graphs = data.graphs.Length; meta.guids = new string[data.graphs.Length]; meta.typeNames = new string[data.graphs.Length]; meta.nodeCounts = new int[data.graphs.Length]; // For each graph, save the guid // of the graph and the type of it for (int i=0;i<data.graphs.Length;i++) { if (data.graphs[i] == null) continue; meta.guids[i] = data.graphs[i].guid.ToString(); meta.typeNames[i] = data.graphs[i].GetType().FullName; } #if !ASTAR_NO_JSON // Grab a cached string builder to avoid allocations var output = GetStringBuilder (); var writer = new JsonWriter (output,writerSettings); writer.Write (meta); return encoding.GetBytes (output.ToString()); #else // Serialize the metadata without using json for compatibility var mem = new System.IO.MemoryStream(); var writer = new System.IO.BinaryWriter(mem); writer.Write ( "A*" ); // Magic string writer.Write (meta.version.Major); writer.Write (meta.version.Minor); writer.Write (meta.version.Build); writer.Write (meta.version.Revision); writer.Write (meta.graphs); writer.Write (meta.guids.Length); for ( int i = 0; i < meta.guids.Length; i++ ) writer.Write ( meta.guids[i] ?? "" ); writer.Write (meta.typeNames.Length); for ( int i = 0; i < meta.typeNames.Length; i++ ) writer.Write ( meta.typeNames[i] ?? "" ); writer.Write (meta.nodeCounts.Length); for ( int i = 0; i < meta.nodeCounts.Length; i++ ) writer.Write ( meta.nodeCounts[i] ); return mem.ToArray(); #endif } /** Serializes the graph settings to JSON and returns the data */ public byte[] Serialize (NavGraph graph) { #if !ASTAR_NO_JSON // Grab a cached string builder to avoid allocations var output = GetStringBuilder (); var writer = new JsonWriter (output,writerSettings); writer.Write (graph); return encoding.GetBytes (output.ToString()); #else var mem = new System.IO.MemoryStream(); var writer = new System.IO.BinaryWriter(mem); var ctx = new GraphSerializationContext (writer); graph.SerializeSettings (ctx); return mem.ToArray(); #endif } public void SerializeNodes () { if (!settings.nodes) return; if (graphs == null) throw new InvalidOperationException ("Cannot serialize nodes with no serialized graphs (call SerializeGraphs first)"); for (int i=0;i<graphs.Length;i++) { byte[] bytes = SerializeNodes (i); AddChecksum (bytes); zip.AddEntry ("graph"+i+"_nodes"+binaryExt,bytes); } for (int i=0;i<graphs.Length;i++) { byte[] bytes = SerializeNodeConnections (i); AddChecksum (bytes); zip.AddEntry ("graph"+i+"_conns"+binaryExt,bytes); } } /** Not used anymore */ byte[] SerializeNodes (int index) { return new byte[0]; } public void SerializeExtraInfo () { if (!settings.nodes) return; int totCount = 0; for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) continue; graphs[i].GetNodes (node => { totCount = Math.Max (node.NodeIndex, totCount); if (node.NodeIndex == -1) { Debug.LogError ("Graph contains destroyed nodes. This is a bug."); } return true; }); } { var stream = new MemoryStream (); var wr = new BinaryWriter (stream); wr.Write (totCount); int c = 0; for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) continue; graphs[i].GetNodes (node => { c = Math.Max (node.NodeIndex, c); wr.Write (node.NodeIndex); return true; }); } if (c != totCount) throw new Exception ("Some graphs are not consistent in their GetNodes calls, sequential calls give different results."); byte[] bytes = stream.ToArray (); #if NETFX_CORE wr.Dispose(); #else wr.Close (); #endif AddChecksum (bytes); zip.AddEntry ("graph_references"+binaryExt,bytes); } for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) continue; var stream = new MemoryStream (); var wr = new BinaryWriter (stream); var ctx = new GraphSerializationContext(wr); graphs[i].SerializeExtraInfo (ctx); byte[] bytes = stream.ToArray (); #if NETFX_CORE wr.Dispose(); #else wr.Close (); #endif AddChecksum (bytes); zip.AddEntry ("graph"+i+"_extra"+binaryExt,bytes); stream = new MemoryStream (); wr = new BinaryWriter (stream); ctx = new GraphSerializationContext(wr); graphs[i].GetNodes (delegate (GraphNode node) { node.SerializeReferences (ctx); return true; }); #if NETFX_CORE wr.Dispose(); #else wr.Close (); #endif bytes = stream.ToArray (); AddChecksum (bytes); zip.AddEntry ("graph"+i+"_references"+binaryExt,bytes); } } /** Serialize node connections for given graph index. * \note Not used anymore */ byte[] SerializeNodeConnections (int index) { return new byte[0]; } public void SerializeEditorSettings (GraphEditorBase[] editors) { if (editors == null || !settings.editorSettings) return; #if !ASTAR_NO_JSON for (int i=0;i<editors.Length;i++) { if (editors[i] == null) return; var output = GetStringBuilder (); var writer = new JsonWriter (output,writerSettings); writer.Write (editors[i]); var bytes = encoding.GetBytes (output.ToString()); //Less or equal to 2 bytes means that nothing was saved (file is "{}") if (bytes.Length <= 2) continue; AddChecksum(bytes); zip.AddEntry ("graph"+i+"_editor"+jsonExt,bytes); } #endif } #endregion #region Deserialize public bool OpenDeserialize (byte[] bytes) { #if !ASTAR_NO_JSON // Add some converters so that we can deserialize Unity builtin types readerSettings = new JsonReaderSettings(); readerSettings.AddTypeConverter (new VectorConverter()); readerSettings.AddTypeConverter (new BoundsConverter()); readerSettings.AddTypeConverter (new LayerMaskConverter()); readerSettings.AddTypeConverter (new MatrixConverter()); readerSettings.AddTypeConverter (new GuidConverter()); readerSettings.AddTypeConverter (new UnityObjectConverter()); #endif str = new MemoryStream(); str.Write(bytes,0,bytes.Length); str.Position = 0; try { zip = ZipFile.Read(str); } catch (Exception e) { //Catches exceptions when an invalid zip file is found Debug.LogWarning ("Caught exception when loading from zip\n"+e); str.Dispose (); return false; } meta = DeserializeMeta (zip["meta"+jsonExt]); if (FullyDefinedVersion(meta.version) > FullyDefinedVersion(AstarPath.Version)) { Debug.LogWarning ("Trying to load data from a newer version of the A* Pathfinding Project\nCurrent version: "+AstarPath.Version+" Data version: "+meta.version + "\nThis is usually fine as the stored data is usually backwards and forwards compatible." + "\nHowever node data (not settings) can get corrupted between versions, so it is recommended " + "to recalculate any caches (those for faster startup) and resave any files. Even if it seems to load fine, it might cause subtle bugs.\n"); } else if (FullyDefinedVersion(meta.version) < FullyDefinedVersion(AstarPath.Version)) { Debug.LogWarning ("Trying to load data from an older version of the A* Pathfinding Project\nCurrent version: "+AstarPath.Version+" Data version: "+meta.version+ "\nThis is usually fine, it just means you have upgraded to a new version." + "\nHowever node data (not settings) can get corrupted between versions, so it is recommended " + "to recalculate any caches (those for faster startup) and resave any files. Even if it seems to load fine, it might cause subtle bugs.\n"); } return true; } /** Returns a version with all fields fully defined. * This is used because by default new Version(3,0,0) > new Version(3,0). * This is not the desired behaviour so we make sure that all fields are defined here */ static System.Version FullyDefinedVersion (System.Version v) { return new System.Version(Mathf.Max(v.Major, 0), Mathf.Max(v.Minor, 0), Mathf.Max(v.Build, 0), Mathf.Max(v.Revision, 0)); } public void CloseDeserialize () { str.Dispose(); zip.Dispose(); zip = null; str = null; } /** Deserializes graph settings. * \note Stored in files named "graph#.json" where # is the graph number. */ public NavGraph[] DeserializeGraphs () { // Allocate a list of graphs to be deserialized graphs = new NavGraph[meta.graphs]; int nonNull = 0; for (int i=0;i<meta.graphs;i++) { // Get the graph type from the metadata we deserialized earlier var tp = meta.GetGraphType(i); // Graph was null when saving, ignore if (System.Type.Equals (tp, null)) continue; nonNull++; var entry = zip["graph"+i+jsonExt]; if (entry == null) throw new FileNotFoundException ("Could not find data for graph "+i+" in zip. Entry 'graph+"+i+jsonExt+"' does not exist"); // Create a new graph of the right type NavGraph graph = data.CreateGraph(tp); graph.graphIndex = (uint)(i + graphIndexOffset); #if !ASTAR_NO_JSON var entryText = GetString(entry); var reader = new JsonReader(entryText,readerSettings); reader.PopulateObject (ref graph); #else var mem = new MemoryStream (); entry.Extract(mem); mem.Position = 0; var reader = new BinaryReader (mem); var ctx = new GraphSerializationContext(reader, null, i + graphIndexOffset); graph.DeserializeSettings (ctx); #endif graphs[i] = graph; if (graphs[i].guid.ToString () != meta.guids[i]) throw new Exception ("Guid in graph file not equal to guid defined in meta file. Have you edited the data manually?\n"+graphs[i].guid+" != "+meta.guids[i]); } // Remove any null entries from the list var compressed = new NavGraph[nonNull]; nonNull = 0; for ( int i=0;i<graphs.Length;i++) { if ( graphs[i] != null ) { compressed[nonNull] = graphs[i]; nonNull++; } } graphs = compressed; return graphs; } /** Deserializes extra graph info. * Extra graph info is specified by the graph types. * \see Pathfinding.NavGraph.DeserializeExtraInfo * \note Stored in files named "graph#_extra.binary" where # is the graph number. */ public void DeserializeExtraInfo () { bool anySerialized = false; // Loop through all graphs and deserialize the extra info // if there is any such info in the zip file for (int i=0;i<graphs.Length;i++) { var entry = zip["graph"+i+"_extra"+binaryExt]; if (entry == null) continue; anySerialized = true; var str = new MemoryStream(); entry.Extract (str); str.Seek (0, SeekOrigin.Begin); var reader = new BinaryReader (str); var ctx = new GraphSerializationContext(reader, null, i + graphIndexOffset); // Call the graph to process the data graphs[i].DeserializeExtraInfo (ctx); } if (!anySerialized) { return; } // Sanity check // Make sure the graphs don't contain destroyed nodes int totCount = 0; for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) continue; graphs[i].GetNodes (delegate (GraphNode node) { totCount = Math.Max (node.NodeIndex, totCount); if (node.NodeIndex == -1) { Debug.LogError ("Graph contains destroyed nodes. This is a bug."); } return true; }); } { // Get the file containing the list of all node indices // This is correlated with the new indices of the nodes and a mapping from old to new // is done so that references can be resolved var entry = zip["graph_references"+binaryExt]; if (entry == null) throw new Exception ("Node references not found in the data. Was this loaded from an older version of the A* Pathfinding Project?"); var str = new MemoryStream(); entry.Extract (str); str.Seek (0, SeekOrigin.Begin); var reader = new BinaryReader (str); int count = reader.ReadInt32(); var int2Node = new GraphNode[count+1]; try { for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) continue; graphs[i].GetNodes (node => { int2Node[reader.ReadInt32()] = node; return true; }); } } catch (Exception e) { throw new Exception ("Some graph(s) has thrown an exception during GetNodes, or some graph(s) have deserialized more or fewer nodes than were serialized", e); } #if NETFX_CORE reader.Dispose(); #else reader.Close (); #endif // Deserialize node references for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) continue; entry = zip["graph"+i+"_references"+binaryExt]; if (entry == null) throw new Exception ("Node references for graph " +i + " not found in the data. Was this loaded from an older version of the A* Pathfinding Project?"); str = new MemoryStream(); entry.Extract (str); str.Seek (0, SeekOrigin.Begin); reader = new BinaryReader (str); var ctx = new GraphSerializationContext(reader, int2Node, i + graphIndexOffset); graphs[i].GetNodes (delegate (GraphNode node) { node.DeserializeReferences (ctx); return true; }); } } } /** Calls PostDeserialization on all loaded graphs */ public void PostDeserialization () { for (int i=0;i<graphs.Length;i++) { if (graphs[i] == null) continue; graphs[i].PostDeserialization(); } } /** Deserializes graph editor settings. * For future compatibility this method does not assume that the \a graphEditors array matches the #graphs array in order and/or count. * It searches for a matching graph (matching if graphEditor.target == graph) for every graph editor. * Multiple graph editors should not refer to the same graph.\n * \note Stored in files named "graph#_editor.json" where # is the graph number. */ public void DeserializeEditorSettings (GraphEditorBase[] graphEditors) { #if !ASTAR_NO_JSON if (graphEditors == null) return; for (int i=0;i<graphEditors.Length;i++) { if (graphEditors[i] == null) continue; for (int j=0;j<graphs.Length;j++) { if (graphs[j] == null || graphEditors[i].target != graphs[j]) continue; ZipEntry entry = zip["graph"+j+"_editor"+jsonExt]; if (entry == null) continue; string entryText = GetString (entry); var reader = new JsonReader(entryText,readerSettings); GraphEditorBase graphEditor = graphEditors[i]; reader.PopulateObject (ref graphEditor); graphEditors[i] = graphEditor; break; } } #endif } /** Returns the data in the zip entry as a string */ private string GetString (ZipEntry entry) { var buffer = new MemoryStream(); entry.Extract(buffer); buffer.Position = 0; var reader = new StreamReader(buffer); string s = reader.ReadToEnd(); buffer.Position = 0; reader.Dispose(); return s; } private GraphMeta DeserializeMeta (ZipEntry entry) { if ( entry == null ) throw new Exception ("No metadata found in serialized data."); #if !ASTAR_NO_JSON string s = GetString (entry); var reader = new JsonReader(s,readerSettings); return (GraphMeta)reader.Deserialize(typeof(GraphMeta)); #else var meta = new GraphMeta(); var mem = new System.IO.MemoryStream(); entry.Extract (mem); mem.Position = 0; var reader = new System.IO.BinaryReader(mem); if ( reader.ReadString() != "A*" ) throw new System.Exception ("Invalid magic number in saved data"); int major = reader.ReadInt32 (); int minor = reader.ReadInt32 (); int build = reader.ReadInt32 (); int revision = reader.ReadInt32 (); // Required because when saving a version with a field not set, it will save it as -1 // and then the Version constructor will throw an exception (which we do not want) if ( major < 0 ) meta.version = new Version (0, 0); else if ( minor < 0 ) meta.version = new Version (major, 0); else if ( build < 0 ) meta.version = new Version (major, minor); else if ( revision < 0 ) meta.version = new Version (major, minor, build); else meta.version = new Version (major, minor, build, revision); meta.graphs = reader.ReadInt32 (); meta.guids = new string[reader.ReadInt32()]; for ( int i = 0; i < meta.guids.Length; i++ ) meta.guids[i] = reader.ReadString(); meta.typeNames = new string[reader.ReadInt32()]; for ( int i = 0; i < meta.typeNames.Length; i++ ) meta.typeNames[i] = reader.ReadString(); meta.nodeCounts = new int[reader.ReadInt32()]; for ( int i = 0; i < meta.nodeCounts.Length; i++ ) meta.nodeCounts[i] = reader.ReadInt32(); return meta; #endif } #endregion #region Utils /** Save the specified data at the specified path */ public static void SaveToFile (string path, byte[] data) { #if NETFX_CORE throw new System.NotSupportedException ("Cannot save to file on this platform"); #else using (var stream = new FileStream(path, FileMode.Create)) { stream.Write (data,0,data.Length); } #endif } /** Load the specified data from the specified path */ public static byte[] LoadFromFile (string path) { #if NETFX_CORE throw new System.NotSupportedException ("Cannot load from file on this platform"); #else using (var stream = new FileStream(path, FileMode.Open)) { var bytes = new byte[(int)stream.Length]; stream.Read (bytes,0,(int)stream.Length); return bytes; } #endif } #endregion } /** Metadata for all graphs included in serialization */ class GraphMeta { /** Project version it was saved with */ public Version version; /** Number of graphs serialized */ public int graphs; /** Guids for all graphs */ public string[] guids; /** Type names for all graphs */ public string[] typeNames; /** Number of nodes for every graph. Nodes are not necessarily serialized */ public int[] nodeCounts; /** Returns the Type of graph number \a i */ public Type GetGraphType (int i) { // The graph was null when saving. Ignore it if (String.IsNullOrEmpty(typeNames[i])) return null; #if ASTAR_FAST_NO_EXCEPTIONS || UNITY_WEBGL System.Type[] types = AstarData.DefaultGraphTypes; Type type = null; for (int j=0;j<types.Length;j++) { if (types[j].FullName == typeNames[i]) type = types[j]; } #else Type type = Type.GetType (typeNames[i]); #endif if (!System.Type.Equals (type, null)) return type; throw new Exception ("No graph of type '" + typeNames [i] + "' could be created, type does not exist"); } } /** Holds settings for how graphs should be serialized */ public class SerializeSettings { /** Enable to include node data. * If false, only settings will be saved */ public bool nodes = true; /** Use pretty printing for the json data. * Good if you want to open up the saved data and edit it manually */ public bool prettyPrint; /** Save editor settings. * \warning Only applicable when saving from the editor using the AstarPathEditor methods */ public bool editorSettings; /** Serialization settings for only saving graph settings */ public static SerializeSettings Settings { get { var s = new SerializeSettings(); s.nodes = false; return s; } } /** Serialization settings for saving everything that can be saved. * This includes all node data */ public static SerializeSettings All { get { var s = new SerializeSettings(); s.nodes = true; return s; } } } }
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Classification; using Microsoft.CodeAnalysis.Text; using Microsoft.SourceBrowser.Common; namespace Microsoft.SourceBrowser.HtmlGenerator { public partial class Classification { public async Task<IEnumerable<Range>> Classify(Document document, SourceText text) { var span = TextSpan.FromBounds(0, text.Length); IEnumerable<ClassifiedSpan> classifiedSpans = null; try { classifiedSpans = await Classifier.GetClassifiedSpansAsync(document, span); } catch (Exception ex) { Log.Exception(ex, "Exception during Classification of document: " + document.FilePath); return Array.Empty<Range>(); } // Roslyn 3.0.0 introduced `Symbol - Static` as an "additive" classification, meaning that multiple // classified spans will be emitted for the same TextSpan. This will filter our those classified spans // since they are "extra" information and do not represent the identifier type. This filter can be // removed after taking Roslyn 3.1.0 as the classifier will filter before returning classified spans. var ranges = classifiedSpans.Where(classifiedSpan => classifiedSpan.ClassificationType != ClassificationTypeNames.StaticSymbol && classifiedSpan.ClassificationType != ClassificationTypeNames.StringEscapeCharacter && !classifiedSpan.ClassificationType.StartsWith("regex")) .Select(classifiedSpan => new Range { ClassifiedSpan = classifiedSpan, Text = text.GetSubText(classifiedSpan.TextSpan).ToString() }); ranges = Merge(text, ranges); ranges = FilterByClassification(ranges); ranges = FillGaps(text, ranges); return ranges; } private IEnumerable<Range> FilterByClassification(IEnumerable<Range> spans) { foreach (var span in spans) { string filtered = FilterClassificationType(span.ClassificationType); if (filtered != null) { yield return new Range(filtered, span.TextSpan, span.Text); } } } private IEnumerable<Range> Merge(SourceText text, IEnumerable<Range> spans) { int mergeStart = -1; int mergeEnd = -1; foreach (var span in spans) { if (IsMergeable(span)) { if (mergeStart == -1) { mergeStart = span.TextSpan.Start; } mergeEnd = span.TextSpan.End; } else { if (mergeStart != -1) { var textSpan = new TextSpan(mergeStart, mergeEnd - mergeStart); yield return CreateRange( text, textSpan, Constants.ClassificationKeyword); mergeStart = -1; } yield return span; } } if (mergeStart != -1) { var textSpan = new TextSpan(mergeStart, mergeEnd - mergeStart); yield return CreateRange( text, textSpan, Constants.ClassificationKeyword); } } private static bool IsMergeable(Range span) { return span.ClassificationType == Constants.RoslynClassificationKeyword && span.Text != "this" && span.Text != "base" && span.Text != "New" && span.Text != "new" && span.Text != "var" && span.Text != "partial" && span.Text != "Partial"; } private IEnumerable<Range> FillGaps(SourceText text, IEnumerable<Range> spans) { int current = 0; Range previous = null; foreach (var span in spans) { int start = span.TextSpan.Start; if (start > current) { var textSpan = new TextSpan(current, start - current); yield return CreateRange(text, textSpan, null); } // Filter out duplicate classifications with the same span (see bug 17602). if (previous == null || span.TextSpan != previous.TextSpan) { yield return span; } previous = span; current = span.TextSpan.End; } if (current < text.Length) { var textSpan = new TextSpan(current, text.Length - current); yield return CreateRange(text, textSpan, null); } } private Range CreateRange(SourceText text, TextSpan span, string classification) { return new Range(classification, span, text.GetSubText(span).ToString()); } private static readonly HashSet<string> ignoreClassifications = new HashSet<string>(new[] { "operator", "operator - overloaded", "number", "punctuation", "preprocessor text", "xml literal - text" }); private static readonly Dictionary<string, string> replaceClassifications = new Dictionary<string, string> { ["keyword"] = Constants.ClassificationKeyword, ["keyword - control"] = Constants.ClassificationKeyword, ["identifier"] = Constants.ClassificationIdentifier, ["field name"] = Constants.ClassificationIdentifier, ["enum member name"] = Constants.ClassificationIdentifier, ["constant name"] = Constants.ClassificationIdentifier, ["local name"] = Constants.ClassificationIdentifier, ["parameter name"] = Constants.ClassificationIdentifier, ["method name"] = Constants.ClassificationIdentifier, ["extension method name"] = Constants.ClassificationIdentifier, ["property name"] = Constants.ClassificationIdentifier, ["event name"] = Constants.ClassificationIdentifier, ["namespace name"] = Constants.ClassificationIdentifier, ["label name"] = Constants.ClassificationIdentifier, ["class name"] = Constants.ClassificationTypeName, ["struct name"] = Constants.ClassificationTypeName, ["interface name"] = Constants.ClassificationTypeName, ["enum name"] = Constants.ClassificationTypeName, ["delegate name"] = Constants.ClassificationTypeName, ["module name"] = Constants.ClassificationTypeName, ["type parameter name"] = Constants.ClassificationTypeName, ["preprocessor keyword"] = Constants.ClassificationPreprocessKeyword, ["xml doc comment - delimiter"] = Constants.ClassificationComment, ["xml doc comment - name"] = Constants.ClassificationComment, ["xml doc comment - text"] = Constants.ClassificationComment, ["xml doc comment - comment"] = Constants.ClassificationComment, ["xml doc comment - entity reference"] = Constants.ClassificationComment, ["xml doc comment - attribute name"] = Constants.ClassificationComment, ["xml doc comment - attribute quotes"] = Constants.ClassificationComment, ["xml doc comment - attribute value"] = Constants.ClassificationComment, ["xml doc comment - cdata section"] = Constants.ClassificationComment, ["xml literal - delimiter"] = Constants.ClassificationXmlLiteralDelimiter, ["xml literal - name"] = Constants.ClassificationXmlLiteralName, ["xml literal - attribute name"] = Constants.ClassificationXmlLiteralAttributeName, ["xml literal - attribute quotes"] = Constants.ClassificationXmlLiteralAttributeQuotes, ["xml literal - attribute value"] = Constants.ClassificationXmlLiteralAttributeValue, ["xml literal - entity reference"] = Constants.ClassificationXmlLiteralEntityReference, ["xml literal - cdata section"] = Constants.ClassificationXmlLiteralCDataSection, ["xml literal - processing instruction"] = Constants.ClassificationXmlLiteralProcessingInstruction, ["xml literal - embedded expression"] = Constants.ClassificationXmlLiteralEmbeddedExpression, ["xml literal - comment"] = Constants.ClassificationComment, ["comment"] = Constants.ClassificationComment, ["string"] = Constants.ClassificationLiteral, ["string - verbatim"] = Constants.ClassificationLiteral, ["excluded code"] = Constants.ClassificationExcludedCode, }; public string FilterClassificationType(string classificationType) { if (classificationType == null || ignoreClassifications.Contains(classificationType)) { return null; } if (classificationType == Constants.ClassificationKeyword) { return classificationType; } if (replaceClassifications.TryGetValue(classificationType, out string replacement)) { return replacement; } return Constants.ClassificationUnknown; } } }
using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Net.Http.Headers; using System.Web.Http; using System.Web.Http.Description; using MvcApplicationTest.Areas.HelpPage.Models; namespace MvcApplicationTest.Areas.HelpPage { public static class HelpPageConfigurationExtensions { private const string ApiModelPrefix = "MS_HelpPageApiModel_"; /// <summary> /// Sets the documentation provider for help page. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="documentationProvider">The documentation provider.</param> public static void SetDocumentationProvider(this HttpConfiguration config, IDocumentationProvider documentationProvider) { config.Services.Replace(typeof(IDocumentationProvider), documentationProvider); } /// <summary> /// Sets the objects that will be used by the formatters to produce sample requests/responses. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleObjects">The sample objects.</param> public static void SetSampleObjects(this HttpConfiguration config, IDictionary<Type, object> sampleObjects) { config.GetHelpPageSampleGenerator().SampleObjects = sampleObjects; } /// <summary> /// Sets the sample request directly for the specified media type and action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample request directly for the specified media type and action with parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample request.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleRequest(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Request, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample request directly for the specified media type of the action. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, new[] { "*" }), sample); } /// <summary> /// Sets the sample response directly for the specified media type of the action with specific parameters. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample response.</param> /// <param name="mediaType">The media type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetSampleResponse(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, SampleDirection.Response, controllerName, actionName, parameterNames), sample); } /// <summary> /// Sets the sample directly for all actions with the specified type and media type. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sample">The sample.</param> /// <param name="mediaType">The media type.</param> /// <param name="type">The parameter type or return type of an action.</param> public static void SetSampleForType(this HttpConfiguration config, object sample, MediaTypeHeaderValue mediaType, Type type) { config.GetHelpPageSampleGenerator().ActionSamples.Add(new HelpPageSampleKey(mediaType, type), sample); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate request samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualRequestType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Request, controllerName, actionName, parameterNames), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, new[] { "*" }), type); } /// <summary> /// Specifies the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> returned as part of the <see cref="System.Net.Http.HttpRequestMessage"/> in an action. /// The help page will use this information to produce more accurate response samples. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="type">The type.</param> /// <param name="controllerName">Name of the controller.</param> /// <param name="actionName">Name of the action.</param> /// <param name="parameterNames">The parameter names.</param> public static void SetActualResponseType(this HttpConfiguration config, Type type, string controllerName, string actionName, params string[] parameterNames) { config.GetHelpPageSampleGenerator().ActualHttpMessageTypes.Add(new HelpPageSampleKey(SampleDirection.Response, controllerName, actionName, parameterNames), type); } /// <summary> /// Gets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <returns>The help page sample generator.</returns> public static HelpPageSampleGenerator GetHelpPageSampleGenerator(this HttpConfiguration config) { return (HelpPageSampleGenerator)config.Properties.GetOrAdd( typeof(HelpPageSampleGenerator), k => new HelpPageSampleGenerator()); } /// <summary> /// Sets the help page sample generator. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="sampleGenerator">The help page sample generator.</param> public static void SetHelpPageSampleGenerator(this HttpConfiguration config, HelpPageSampleGenerator sampleGenerator) { config.Properties.AddOrUpdate( typeof(HelpPageSampleGenerator), k => sampleGenerator, (k, o) => sampleGenerator); } /// <summary> /// Gets the model that represents an API displayed on the help page. The model is initialized on the first call and cached for subsequent calls. /// </summary> /// <param name="config">The <see cref="HttpConfiguration"/>.</param> /// <param name="apiDescriptionId">The <see cref="ApiDescription"/> ID.</param> /// <returns> /// An <see cref="HelpPageApiModel"/> /// </returns> public static HelpPageApiModel GetHelpPageApiModel(this HttpConfiguration config, string apiDescriptionId) { object model; string modelId = ApiModelPrefix + apiDescriptionId; if (!config.Properties.TryGetValue(modelId, out model)) { Collection<ApiDescription> apiDescriptions = config.Services.GetApiExplorer().ApiDescriptions; ApiDescription apiDescription = apiDescriptions.FirstOrDefault(api => String.Equals(api.GetFriendlyId(), apiDescriptionId, StringComparison.OrdinalIgnoreCase)); if (apiDescription != null) { HelpPageSampleGenerator sampleGenerator = config.GetHelpPageSampleGenerator(); model = GenerateApiModel(apiDescription, sampleGenerator); config.Properties.TryAdd(modelId, model); } } return (HelpPageApiModel)model; } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as ErrorMessages.")] private static HelpPageApiModel GenerateApiModel(ApiDescription apiDescription, HelpPageSampleGenerator sampleGenerator) { HelpPageApiModel apiModel = new HelpPageApiModel(); apiModel.ApiDescription = apiDescription; try { foreach (var item in sampleGenerator.GetSampleRequests(apiDescription)) { apiModel.SampleRequests.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } foreach (var item in sampleGenerator.GetSampleResponses(apiDescription)) { apiModel.SampleResponses.Add(item.Key, item.Value); LogInvalidSampleAsError(apiModel, item.Value); } } catch (Exception e) { apiModel.ErrorMessages.Add(String.Format(CultureInfo.CurrentCulture, "An exception has occurred while generating the sample. Exception Message: {0}", e.Message)); } return apiModel; } private static void LogInvalidSampleAsError(HelpPageApiModel apiModel, object sample) { InvalidSample invalidSample = sample as InvalidSample; if (invalidSample != null) { apiModel.ErrorMessages.Add(invalidSample.ErrorMessage); } } } }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System.Diagnostics.Contracts; using System; using System.Collections; using System.Diagnostics; using System.Reflection; using System.Runtime.InteropServices; namespace System.Collections.Generic { // Summary: // Represents a collection of keys and values. public class Dictionary<TKey, TValue> : IDictionary<TKey, TValue> { // Summary: // Initializes a new instance of the System.Collections.Generic.Dictionary<TKey,TValue> // class that is empty, has the default initial capacity, and uses the default // equality comparer for the key type. public Dictionary() { Contract.Ensures(Count == 0); } // // Summary: // Initializes a new instance of the System.Collections.Generic.Dictionary<TKey,TValue> // class that contains elements copied from the specified System.Collections.Generic.IDictionary<TKey,TValue> // and uses the default equality comparer for the key type. // // Parameters: // dictionary: // The System.Collections.Generic.IDictionary<TKey,TValue> whose elements are // copied to the new System.Collections.Generic.Dictionary<TKey,TValue>. // // Exceptions: // System.ArgumentException: // dictionary contains one or more duplicate keys. // // System.ArgumentNullException: // dictionary is null. public Dictionary(IDictionary<TKey, TValue> dictionary) { Contract.Ensures(Count == dictionary.Count); } // // Summary: // Initializes a new instance of the System.Collections.Generic.Dictionary<TKey,TValue> // class that is empty, has the default initial capacity, and uses the specified // System.Collections.Generic.IEqualityComparer<T>. // // Parameters: // comparer: // The System.Collections.Generic.IEqualityComparer<T> implementation to use // when comparing keys, or null to use the default System.Collections.Generic.EqualityComparer<T> // for the type of the key. public Dictionary(IEqualityComparer<TKey> comparer) { Contract.Ensures(Count == 0); } // // Summary: // Initializes a new instance of the System.Collections.Generic.Dictionary<TKey,TValue> // class that is empty, has the specified initial capacity, and uses the default // equality comparer for the key type. // // Parameters: // capacity: // The initial number of elements that the System.Collections.Generic.Dictionary<TKey,TValue> // can contain. // // Exceptions: // System.ArgumentOutOfRangeException: // capacity is less than 0. public Dictionary(int capacity) { Contract.Ensures(Count == 0); } // // Summary: // Initializes a new instance of the System.Collections.Generic.Dictionary<TKey,TValue> // class that contains elements copied from the specified System.Collections.Generic.IDictionary<TKey,TValue> // and uses the specified System.Collections.Generic.IEqualityComparer<T>. // // Parameters: // dictionary: // The System.Collections.Generic.IDictionary<TKey,TValue> whose elements are // copied to the new System.Collections.Generic.Dictionary<TKey,TValue>. // // comparer: // The System.Collections.Generic.IEqualityComparer<T> implementation to use // when comparing keys, or null to use the default System.Collections.Generic.EqualityComparer<T> // for the type of the key. // // Exceptions: // System.ArgumentException: // dictionary contains one or more duplicate keys. // // System.ArgumentNullException: // dictionary is null. public Dictionary(IDictionary<TKey, TValue> dictionary, IEqualityComparer<TKey> comparer) { Contract.Ensures(Count == dictionary.Count); } // // Summary: // Initializes a new instance of the System.Collections.Generic.Dictionary<TKey,TValue> // class that is empty, has the specified initial capacity, and uses the specified // System.Collections.Generic.IEqualityComparer<T>. // // Parameters: // capacity: // The initial number of elements that the System.Collections.Generic.Dictionary<TKey,TValue> // can contain. // // comparer: // The System.Collections.Generic.IEqualityComparer<T> implementation to use // when comparing keys, or null to use the default System.Collections.Generic.EqualityComparer<T> // for the type of the key. // // Exceptions: // System.ArgumentOutOfRangeException: // capacity is less than 0. public Dictionary(int capacity, IEqualityComparer<TKey> comparer) { Contract.Ensures(Count == 0); } // // Summary: // Gets the System.Collections.Generic.IEqualityComparer<T> that is used to // determine equality of keys for the dictionary. // // Returns: // The System.Collections.Generic.IEqualityComparer<T> generic interface implementation // that is used to determine equality of keys for the current System.Collections.Generic.Dictionary<TKey,TValue> // and to provide hash values for the keys. public IEqualityComparer<TKey> Comparer { get { Contract.Ensures(Contract.Result<IEqualityComparer<TKey>>() != null); return default(IEqualityComparer<TKey>); } } // // Summary: // Gets the number of key/value pairs contained in the System.Collections.Generic.Dictionary<TKey,TValue>. // // Returns: // The number of key/value pairs contained in the System.Collections.Generic.Dictionary<TKey,TValue>. // public virtual int Count { get { return default(int); } } [Pure] public virtual bool ContainsKey(TKey key) { throw new NotImplementedException(); } [Pure] public bool ContainsValue(TValue value) { var @this = (IDictionary<TKey, TValue>)this; Contract.Ensures(!Contract.Result<bool>() || @this.Count > 0); throw new NotImplementedException(); } // // Summary: // Gets a collection containing the keys in the System.Collections.Generic.Dictionary<TKey,TValue>. // // Returns: // A System.Collections.Generic.Dictionary<TKey,TValue>.KeyCollection containing // the keys in the System.Collections.Generic.Dictionary<TKey,TValue>. public Dictionary<TKey, TValue>.KeyCollection Keys { get { Contract.Ensures(Contract.Result<Dictionary<TKey, TValue>.KeyCollection>() != null); return default(Dictionary<TKey, TValue>.KeyCollection); } } // // Summary: // Gets a collection containing the values in the System.Collections.Generic.Dictionary<TKey,TValue>. // // Returns: // A System.Collections.Generic.Dictionary<TKey,TValue>.ValueCollection containing // the values in the System.Collections.Generic.Dictionary<TKey,TValue>. public Dictionary<TKey, TValue>.ValueCollection Values { get { Contract.Ensures(Contract.Result<Dictionary<TKey, TValue>.ValueCollection>() != null); return default(Dictionary<TKey, TValue>.ValueCollection); } } // // Summary: // Implements the System.Runtime.Serialization.ISerializable interface and returns // the data needed to serialize the System.Collections.Generic.Dictionary<TKey,TValue> // instance. // // Parameters: // context: // A System.Runtime.Serialization.StreamingContext structure that contains the // source and destination of the serialized stream associated with the System.Collections.Generic.Dictionary<TKey,TValue> // instance. // // info: // A System.Runtime.Serialization.SerializationInfo object that contains the // information required to serialize the System.Collections.Generic.Dictionary<TKey,TValue> // instance. // // Exceptions: // System.ArgumentNullException: // info is null. #if !SILVERLIGHT // // Summary: // Implements the System.Runtime.Serialization.ISerializable interface and raises // the deserialization event when the deserialization is complete. // // Parameters: // sender: // The source of the deserialization event. // // Exceptions: // System.Runtime.Serialization.SerializationException: // The System.Runtime.Serialization.SerializationInfo object associated with // the current System.Collections.Generic.Dictionary<TKey,TValue> instance is // invalid. extern public virtual void OnDeserialization(object sender); #endif // Summary: // Represents the collection of keys in a System.Collections.Generic.Dictionary<TKey,TValue>. // This class cannot be inherited. public sealed class KeyCollection : IEnumerable<TKey> { // Summary: // Initializes a new instance of the System.Collections.Generic.Dictionary<TKey,TValue>.KeyCollection // class that reflects the keys in the specified System.Collections.Generic.Dictionary<TKey,TValue>. // // Parameters: // dictionary: // The System.Collections.Generic.Dictionary<TKey,TValue> whose keys are reflected // in the new System.Collections.Generic.Dictionary<TKey,TValue>.KeyCollection. // // Exceptions: // System.ArgumentNullException: // dictionary is null. public KeyCollection(Dictionary<TKey, TValue> dictionary) { //return default(KeyCollection(Dictionary<TKey,TValue>)); should this return anything? } IEnumerator<TKey> IEnumerable<TKey>.GetEnumerator() { throw new NotImplementedException(); } IEnumerator IEnumerable.GetEnumerator() { throw new NotImplementedException(); } [ContractModel] public object[] Model { get { throw new NotImplementedException(); } } public Dictionary<TKey, TValue>.KeyCollection.Enumerator GetEnumerator() { // since this is not related to the interfae implementation, we have to repeat the interface implementation post condition. Contract.Ensures(Contract.Result<Dictionary<TKey, TValue>.KeyCollection.Enumerator>().Model == this.Model); Contract.Ensures(Contract.Result<Dictionary<TKey, TValue>.KeyCollection.Enumerator>().CurrentIndex == -1); return default(Dictionary<TKey, TValue>.KeyCollection.Enumerator); } public struct Enumerator : IEnumerator<TKey> { public TKey Current { get { Contract.Ensures((object)Contract.Result<TKey>() == this.Model[this.CurrentIndex]); throw new NotImplementedException(); } } public void Dispose() { throw new NotImplementedException(); } object IEnumerator.Current { get { throw new NotImplementedException(); } } public bool MoveNext() { throw new NotImplementedException(); } void IEnumerator.Reset() { throw new NotImplementedException(); } [ContractModel] public object[] Model { get { throw new NotImplementedException(); } } [ContractModel] public int CurrentIndex { get { throw new NotImplementedException(); } } } } // Summary: // Represents the collection of values in a System.Collections.Generic.Dictionary<TKey,TValue>. // This class cannot be inherited. public sealed class ValueCollection : ICollection<TValue> { // Summary: // Initializes a new instance of the System.Collections.Generic.Dictionary<TKey,TValue>.ValueCollection // class that reflects the values in the specified System.Collections.Generic.Dictionary<TKey,TValue>. // // Parameters: // dictionary: // The System.Collections.Generic.Dictionary<TKey,TValue> whose values are reflected // in the new System.Collections.Generic.Dictionary<TKey,TValue>.ValueCollection. // // Exceptions: // System.ArgumentNullException: // dictionary is null. public ValueCollection(Dictionary<TKey, TValue> dictionary) { } IEnumerator<TValue> IEnumerable<TValue>.GetEnumerator() { throw new NotImplementedException(); } IEnumerator IEnumerable.GetEnumerator() { throw new NotImplementedException(); } public Dictionary<TKey, TValue>.ValueCollection.Enumerator GetEnumerator() { // since this is not related to the interfae implementation, we have to repeat the interface implementation post condition. Contract.Ensures(Contract.Result<Dictionary<TKey, TValue>.ValueCollection.Enumerator>().Model == this.Model); Contract.Ensures(Contract.Result<Dictionary<TKey, TValue>.ValueCollection.Enumerator>().CurrentIndex == -1); return default(Dictionary<TKey, TValue>.ValueCollection.Enumerator); } [ContractModel] public object[] Model { get { throw new NotImplementedException(); } } public struct Enumerator : IEnumerator<TValue> { public TValue Current { get { Contract.Ensures((object)Contract.Result<TValue>() == this.Model[this.CurrentIndex]); throw new NotImplementedException(); } } public void Dispose() { throw new NotImplementedException(); } object IEnumerator.Current { get { throw new NotImplementedException(); } } public bool MoveNext() { throw new NotImplementedException(); } void IEnumerator.Reset() { throw new NotImplementedException(); } [ContractModel] public object[] Model { get { throw new NotImplementedException(); } } [ContractModel] public int CurrentIndex { get { throw new NotImplementedException(); } } } public int Count { get { throw new NotImplementedException(); } } bool ICollection<TValue>.IsReadOnly { get { throw new NotImplementedException(); } } void ICollection<TValue>.Add(TValue item) { throw new NotImplementedException(); } void ICollection<TValue>.Clear() { throw new NotImplementedException(); } bool ICollection<TValue>.Contains(TValue item) { throw new NotImplementedException(); } public void CopyTo(TValue[] array, int arrayIndex) { throw new NotImplementedException(); } bool ICollection<TValue>.Remove(TValue item) { throw new NotImplementedException(); } } IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator() { throw new NotImplementedException(); } IEnumerator IEnumerable.GetEnumerator() { throw new NotImplementedException(); } // // Summary: // Returns an enumerator that iterates through the System.Collections.Generic.List<T>. // // Returns: // A System.Collections.Generic.List<T>.Enumerator for the System.Collections.Generic.List<T>. [Pure] [GlobalAccess(false)] [Escapes(true, false)] public Dictionary<TKey,TValue>.Enumerator GetEnumerator() { // since this is not related to the interfae implementation, we have to repeat the interface implementation post condition. Contract.Ensures(Contract.Result<Dictionary<TKey, TValue>.Enumerator>().Model == this.Model); Contract.Ensures(Contract.Result<Dictionary<TKey, TValue>.Enumerator>().CurrentIndex == -1); return default(Dictionary<TKey,TValue>.Enumerator); } [ContractModel] public object[] Model { get { throw new NotImplementedException(); } } public struct Enumerator : IEnumerator<KeyValuePair<TKey, TValue>> { public void Dispose() { throw new NotImplementedException(); } public KeyValuePair<TKey,TValue> Current { get { Contract.Ensures((object)Contract.Result<KeyValuePair<TKey, TValue>>() == this.Model[this.CurrentIndex]); throw new NotImplementedException(); } } object IEnumerator.Current { get { throw new NotImplementedException(); } } public bool MoveNext() { throw new NotImplementedException(); } void IEnumerator.Reset() { throw new NotImplementedException(); } #region IEnumerator Members [ContractModel] public object[] Model { get { throw new NotImplementedException(); } } [ContractModel] public int CurrentIndex { get { throw new NotImplementedException(); } } #endregion } ICollection<TKey> IDictionary<TKey, TValue>.Keys { get { throw new NotImplementedException(); } } ICollection<TValue> IDictionary<TKey, TValue>.Values { get { throw new NotImplementedException(); } } public TValue this[TKey key] { get { throw new NotImplementedException(); } set { throw new NotImplementedException(); } } public void Add(TKey key, TValue value) { throw new NotImplementedException(); } public bool Remove(TKey key) { Contract.Ensures(!this.ContainsKey(key)); return true; } public bool TryGetValue(TKey key, out TValue value) { throw new NotImplementedException(); } bool ICollection<KeyValuePair<TKey,TValue>>.IsReadOnly { get { throw new NotImplementedException(); } } void ICollection<KeyValuePair<TKey, TValue>>.Add(KeyValuePair<TKey, TValue> item) { throw new NotImplementedException(); } public void Clear() { throw new NotImplementedException(); } bool ICollection<KeyValuePair<TKey, TValue>>.Contains(KeyValuePair<TKey, TValue> item) { throw new NotImplementedException(); } void ICollection<KeyValuePair<TKey, TValue>>.CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex) { throw new NotImplementedException(); } bool ICollection<KeyValuePair<TKey, TValue>>.Remove(KeyValuePair<TKey, TValue> item) { throw new NotImplementedException(); } } }
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. // This class comes from https://github.com/aspnet/HttpAbstractions and was slightly modified to support good old .NET Framework 4.5.1 using System; using System.Diagnostics; using System.IO; using System.Text; using System.Threading; using System.Threading.Tasks; namespace Microsoft.AspNet.WebUtilities { internal class MultipartReaderStream : Stream { private readonly BufferedReadStream _innerStream; private readonly byte[] _boundaryBytes; private readonly int _finalBoundaryLength; private readonly long _innerOffset; private long _position; private long _observedLength; private bool _finished; /// <summary> /// Creates a stream that reads until it reaches the given boundary pattern. /// </summary> /// <param name="stream"></param> /// <param name="boundary"></param> public MultipartReaderStream(BufferedReadStream stream, string boundary, bool expectLeadingCrlf = true) { _innerStream = stream; _innerOffset = _innerStream.CanSeek ? _innerStream.Position : 0; if (expectLeadingCrlf) { _boundaryBytes = Encoding.UTF8.GetBytes("\r\n--" + boundary); } else { _boundaryBytes = Encoding.UTF8.GetBytes("--" + boundary); } _finalBoundaryLength = _boundaryBytes.Length + 2; // Include the final '--' terminator. } public bool FinalBoundaryFound { get; private set; } public override bool CanRead { get { return true; } } public override bool CanSeek { get { return _innerStream.CanSeek; } } public override bool CanWrite { get { return false; } } public override long Length { get { return _observedLength; } } public override long Position { get { return _position; } set { if (value < 0) { throw new ArgumentOutOfRangeException("value", value, "The Position must be positive."); } if (value > _observedLength) { throw new ArgumentOutOfRangeException("value", value, "The Position must be less than length."); } _position = value; if (_position < _observedLength) { _finished = false; } } } public override long Seek(long offset, SeekOrigin origin) { if (origin == SeekOrigin.Begin) { Position = offset; } else if (origin == SeekOrigin.Current) { Position = Position + offset; } else // if (origin == SeekOrigin.End) { Position = Length + offset; } return Position; } public override void SetLength(long value) { throw new NotSupportedException(); } public override void Write(byte[] buffer, int offset, int count) { throw new NotSupportedException(); } #if DNX451 public override IAsyncResult BeginWrite(byte[] buffer, int offset, int size, AsyncCallback callback, object state) { throw new NotSupportedException(); } public override void EndWrite(IAsyncResult asyncResult) { throw new NotSupportedException(); } #endif public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { throw new NotSupportedException(); } public override void Flush() { throw new NotSupportedException(); } private void PositionInnerStream() { if (_innerStream.CanSeek && _innerStream.Position != (_innerOffset + _position)) { _innerStream.Position = _innerOffset + _position; } } private int UpdatePosition(int read) { _position += read; if (_observedLength < _position) { _observedLength = _position; } return read; } #if DNX451 public override IAsyncResult BeginRead(byte[] buffer, int offset, int size, AsyncCallback callback, object state) { var tcs = new TaskCompletionSource<int>(state); InternalReadAsync(buffer, offset, size, callback, tcs); return tcs.Task; } private async void InternalReadAsync(byte[] buffer, int offset, int size, AsyncCallback callback, TaskCompletionSource<int> tcs) { try { int read = await ReadAsync(buffer, offset, size); tcs.TrySetResult(read); } catch (Exception ex) { tcs.TrySetException(ex); } if (callback != null) { try { callback(tcs.Task); } catch (Exception) { // Suppress exceptions on background threads. } } } public override int EndRead(IAsyncResult asyncResult) { var task = (Task<int>)asyncResult; return task.GetAwaiter().GetResult(); } #endif public override int Read(byte[] buffer, int offset, int count) { if (_finished) { return 0; } PositionInnerStream(); if (!_innerStream.EnsureBuffered(_finalBoundaryLength)) { throw new IOException("Unexpected end of stream."); } var bufferedData = _innerStream.BufferedData; // scan for a boundary match, full or partial. int matchOffset; int matchCount; int read; if (SubMatch(bufferedData, _boundaryBytes, out matchOffset, out matchCount)) { // We found a possible match, return any data before it. if (matchOffset > bufferedData.Offset) { read = _innerStream.Read(buffer, offset, Math.Min(count, matchOffset - bufferedData.Offset)); return UpdatePosition(read); } Debug.Assert(matchCount == _boundaryBytes.Length); // "The boundary may be followed by zero or more characters of // linear whitespace. It is then terminated by either another CRLF" // or -- for the final boundary. byte[] boundary = new byte[_boundaryBytes.Length]; read = _innerStream.Read(boundary, 0, boundary.Length); Debug.Assert(read == boundary.Length); // It should have all been buffered var remainder = _innerStream.ReadLine(lengthLimit: 100); // Whitespace may exceed the buffer. remainder = remainder.Trim(); if (string.Equals("--", remainder, StringComparison.Ordinal)) { FinalBoundaryFound = true; } Debug.Assert(FinalBoundaryFound || string.Equals(string.Empty, remainder, StringComparison.Ordinal), "Un-expected data found on the boundary line: " + remainder); _finished = true; return 0; } // No possible boundary match within the buffered data, return the data from the buffer. read = _innerStream.Read(buffer, offset, Math.Min(count, bufferedData.Count)); return UpdatePosition(read); } public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) { if (_finished) { return 0; } PositionInnerStream(); if (!await _innerStream.EnsureBufferedAsync(_finalBoundaryLength, cancellationToken)) { throw new IOException("Unexpected end of stream."); } var bufferedData = _innerStream.BufferedData; // scan for a boundary match, full or partial. int matchOffset; int matchCount; int read; if (SubMatch(bufferedData, _boundaryBytes, out matchOffset, out matchCount)) { // We found a possible match, return any data before it. if (matchOffset > bufferedData.Offset) { // Sync, it's already buffered read = _innerStream.Read(buffer, offset, Math.Min(count, matchOffset - bufferedData.Offset)); return UpdatePosition(read); } Debug.Assert(matchCount == _boundaryBytes.Length); // "The boundary may be followed by zero or more characters of // linear whitespace. It is then terminated by either another CRLF" // or -- for the final boundary. byte[] boundary = new byte[_boundaryBytes.Length]; read = _innerStream.Read(boundary, 0, boundary.Length); Debug.Assert(read == boundary.Length); // It should have all been buffered var remainder = await _innerStream.ReadLineAsync(lengthLimit: 100, cancellationToken: cancellationToken); // Whitespace may exceed the buffer. remainder = remainder.Trim(); if (string.Equals("--", remainder, StringComparison.Ordinal)) { FinalBoundaryFound = true; } Debug.Assert(FinalBoundaryFound || string.Equals(string.Empty, remainder, StringComparison.Ordinal), "Un-expected data found on the boundary line: " + remainder); _finished = true; return 0; } // No possible boundary match within the buffered data, return the data from the buffer. read = _innerStream.Read(buffer, offset, Math.Min(count, bufferedData.Count)); return UpdatePosition(read); } // Does Segment1 contain all of segment2, or does it end with the start of segment2? // 1: AAAAABBBBBCCCCC // 2: BBBBB // Or: // 1: AAAAABBB // 2: BBBBB private static bool SubMatch(ArraySegment<byte> segment1, byte[] matchBytes, out int matchOffset, out int matchCount) { matchCount = 0; for (matchOffset = segment1.Offset; matchOffset < segment1.Offset + segment1.Count; matchOffset++) { int countLimit = segment1.Offset - matchOffset + segment1.Count; for (matchCount = 0; matchCount < matchBytes.Length && matchCount < countLimit; matchCount++) { if (matchBytes[matchCount] != segment1.Array[matchOffset + matchCount]) { matchCount = 0; break; } } if (matchCount > 0) { break; } } return matchCount > 0; } } }
using Arma3BEClient.Common.Core; using Arma3BEClient.Common.Extensions; using Arma3BEClient.Common.Logging; using Arma3BEClient.Libs.Context; using Arma3BEClient.Libs.ModelCompact; using System; using System.Collections.Generic; using System.Data.Entity; using System.Data.Entity.Migrations; using System.Linq; using System.Linq.Expressions; using System.Threading.Tasks; namespace Arma3BEClient.Libs.Repositories.Players { public class PlayerRepository : DisposeObject, IPlayerRepository { private readonly ILog _log = new Log(); public async Task<IEnumerable<PlayerDto>> GetAllPlayersAsync() { using (var dc = new Arma3BeClientContext()) { return await dc.Player.ToListAsync(); } } public Task ImportPlayersAsync(List<PlayerDto> toAdd, List<PlayerDto> toUpdate) { return Task.Run(() => { _log.Info("ImportPlayersAsync - Import started"); _log.Info($"ImportPlayersAsync - ToAdd {toAdd.Count}, ToUpdate {toUpdate.Count}"); using (var dc = new Arma3BeClientContext()) { foreach (var _add in toAdd.Paged(2000)) { var toAddDto = _add.Select(Map).ToArray(); dc.Player.AddRange(toAddDto); dc.SaveChanges(); } } _log.Info($"Inserted {toAdd.Count}"); _log.Info("ImportPlayersAsync - Update started"); using (var dc = new Arma3BeClientContext()) { var allPlayers = dc.Player.ToArray().ToDictionary(x => x.Id); _log.Info($"Players loaded"); foreach (var player in toUpdate) { if (allPlayers.ContainsKey(player.Id)) { var dto = allPlayers[player.Id]; if (string.IsNullOrEmpty(dto.SteamId)) dto.SteamId = player.SteamId; if (string.IsNullOrEmpty(dto.Comment)) dto.Comment = player.Comment; } } dc.SaveChanges(); } _log.Info("ImportPlayersAsync - Update finished"); }); } public async Task<IEnumerable<PlayerDto>> GetPlayersAsync(Expression<Func<Player, bool>> expression) { return await Task.Run(() => { using (var dc = new Arma3BeClientContext()) { return dc.Player.Where(expression).ToList(); } }); } public async Task<IEnumerable<PlayerDto>> GetPlayersAsync(IEnumerable<string> guids) { return await Task.Run(() => { using (var dc = new Arma3BeClientContext()) { return dc.Player.Where(x => guids.Contains(x.GUID)).ToList(); } }); } public async Task<PlayerDto> GetPlayerAsync(string guid) { return await Task.Run(() => { using (var dc = new Arma3BeClientContext()) { return dc.Player.FirstOrDefault(x => x.GUID == guid); } }); } public Task<Player> GetPlayerInfoAsync(string guid) { return Task.Run(() => { using (var dc = new Arma3BeClientContext()) { return dc.Player.Where(x => x.GUID == guid) .Include(x => x.Bans) .Include(x => x.Bans.Select(b => b.ServerInfo)) .Include(x => x.Notes) .Include(x => x.PlayerHistory) .FirstOrDefault(); } }); } public Task UpdatePlayerCommentAsync(string guid, string comment) { return Task.Run(() => { using (var dc = new Arma3BeClientContext()) { var dbp = dc.Player.FirstOrDefault(x => x.GUID == guid); if (dbp != null) { dbp.Comment = comment; dc.SaveChanges(); } } }); } public Task UpdateCommentAsync(Dictionary<Guid, string> playersToUpdateComments) { return Task.Run(() => { var ids = playersToUpdateComments.Keys.ToArray(); using (var dc = new Arma3BeClientContext()) { var players = dc.Player.Where(x => ids.Contains(x.Id)); foreach (var player in players) { player.Comment = playersToUpdateComments[player.Id]; } dc.SaveChanges(); } }); } public Task AddOrUpdateAsync(IEnumerable<PlayerDto> players) { return Task.Run(() => { var playerList = players.Select(Map).ToArray(); using (var dc = new Arma3BeClientContext()) { dc.Player.AddOrUpdate(playerList); dc.SaveChanges(); } }); } public Task AddHistoryAsync(List<PlayerHistory> histories) { return Task.Run(() => { using (var dc = new Arma3BeClientContext()) { dc.PlayerHistory.AddOrUpdate(histories.ToArray()); dc.SaveChanges(); } }); } public Task AddNotesAsync(Guid id, string s) { return Task.Run(() => { using (var dc = new Arma3BeClientContext()) { dc.Comments.Add(new Note() { PlayerId = id, Text = s, }); dc.SaveChanges(); } }); } public async Task<IEnumerable<PlayerDto>> GetAllPlayersWithoutSteamAsync() { return await Task.Run(() => { using (var dc = new Arma3BeClientContext()) { return dc.Player.Where(x => string.IsNullOrEmpty(x.SteamId)).ToArray(); } }); } public Task SaveSteamIdAsync(Dictionary<Guid, string> found) { return Task.Run(() => { var guids = found.Keys.ToArray(); using (var dc = new Arma3BeClientContext()) { var players = dc.Player .Where(x => string.IsNullOrEmpty(x.GUID) == false && string.IsNullOrEmpty(x.SteamId)) .Where(x => guids.Contains(x.Id)).ToArray(); foreach (var player in players) { if (found.ContainsKey(player.Id)) { player.SteamId = found[player.Id]; } } dc.SaveChanges(); } }); } private Player Map(PlayerDto source) { return new Player() { Id = source.Id, Comment = source.Comment, GUID = source.GUID, LastIp = source.LastIp, LastSeen = source.LastSeen, Name = source.Name, SteamId = source.SteamId }; } } }
using System; using System.Collections.Generic; using System.IO; using System.Linq; using ERY.EMath; namespace TightBindingSuite { class BandPath { public static void Main (string[] args) { Directory.SetCurrentDirectory("/home/eylvisaker/Calculations/rpa/tests"); using (BootStrap b = new BootStrap()) { if (args.Length == 0) { Console.WriteLine("Must specify tight binding input and eigenvalues files on command line."); System.Environment.Exit(1); } string inputfile = b.GetInputFile("Band Path code", "bandpath", args); TightBinding tb = new TightBinding(inputfile); var argsList = args.ToList(); argsList.RemoveAt(0); new BandPath().Run(tb, argsList); } } void Run (TightBinding tb, List<string> args) { foreach(var arg in args) { Console.WriteLine("Reading file " + arg); CreateBands(tb, arg); } } BandTetrahedron GetTetrahedron (TightBinding tb, KPoint kpt, KptList kpts) { List<Pair<int, double>> lst = new List<Pair<int, double>>(); double[] weights = new double[kpts.Kpts.Count]; for (int j = 0; j < kpts.Kpts.Count; j++) { double distance = CalcDistance(tb, kpts.Kpts[j].Value, kpt.Value); weights[j] = 1 / (distance + 0.00001); } for (int j = 0; j < weights.Length; j++) { lst.Add(new Pair<int, double>(j, weights[j])); } lst.Sort((x,y) => { return y.Second.CompareTo(x.Second); }); lst.RemoveRange(4, lst.Count - 4); List<int> ilist = lst.Select(x => x.First).ToList(); BandTetrahedron retval = new BandTetrahedron(tb, kpt.Value, kpts, ilist); return retval; } double CalcDistance(TightBinding tb, Vector3 v1, Vector3 v2) { Vector3 delta = v1 - v2; ShiftDelta(ref delta, tb.Lattice.G1); ShiftDelta(ref delta, tb.Lattice.G2); ShiftDelta(ref delta, tb.Lattice.G3); return delta.Magnitude; } void ShiftDelta(ref Vector3 delta, Vector3 G) { if ((delta - G).Magnitude < delta.Magnitude) { delta -= G; } if ((delta + G).Magnitude < delta.Magnitude) { delta += G; } } void WriteBands (TightBinding tb, KptList kpts, StreamWriter w) { int bandCount = kpts.Kpts[0].Wavefunctions.Count; BandTetrahedron tet = null; for (int i = 0; i < tb.KPath.Kpts.Count; i++) { var kpt = tb.KPath.Kpts[i]; if (tet == null || tet.Contains(kpt) == false) { GetTetrahedron(tb, kpt, kpts); } w.Write(i); w.Write(" "); for (int band = 0; band < bandCount; band++) { double energy = tet.Interpolate(kpt); w.Write("{0} ", energy); } w.WriteLine(); } } int CountWeights(double[] weights) { return weights.Count(x => x > 0); } void DropWeakValues(double[] weights) { for (int i = 0; i < weights.Length; i++) { if (weights[i] < 0.9) weights[i] = 0; } } void MaximizeWeights(double[] weights) { double total = weights.Max(); for (int i = 0; i < weights.Length; i++) weights[i] /= total; } void NormalizeWeights(List<Pair<int,double>> weights) { double total = weights.Sum(x => x.Second); foreach(var x in weights) { x.Second /= total; } } void NormalizeWeights(double[] weights) { double total = weights.Sum(); for (int i = 0; i < weights.Length; i++) weights[i] /= total; } void ParseGrid (int[] grid, int[] shift, string line) { string[] elements = line.Split(new char[] { ' '}, StringSplitOptions.RemoveEmptyEntries ); for (int i = 0; i < 3; i++) grid[i] = int.Parse(elements[i]); for (int i = 0; i < 3; i++) shift[i] = int.Parse(elements[3+i]); } void CreateBands(TightBinding tb, string name) { using (StreamReader r = new StreamReader(name)) { string line = r.ReadLine(); if (line != "# Grid") { Console.WriteLine("Not an eigenvalues file!"); System.Environment.Exit(3); } int[] grid = new int[3]; int[] shift = new int[3]; ParseGrid(grid, shift, line); if (line != "# Eigenvalues") { Console.WriteLine("Not an eigenvalues file!"); System.Environment.Exit(2); } KptList kpts = new KptList(); kpts.Mesh = grid; kpts.Shift = shift; while (r.EndOfStream == false) { line = r.ReadLine(); string[] elements = line.Split(new char[] { ' '}, StringSplitOptions.RemoveEmptyEntries ); KPoint kpt = new KPoint(new Vector3(double.Parse(elements[0]), double.Parse(elements[1]), double.Parse(elements[2]))); for (int i = 3; i < elements.Length; i++) { Wavefunction wfk = new Wavefunction(0); wfk.Energy = double.Parse(elements[i]); kpt.Wavefunctions.Add(wfk); } kpts.Kpts.Add(kpt); } CreateTetrahedronMesh(kpts); string outputfile = name + ".bands"; using (StreamWriter w = new StreamWriter(outputfile)) { WriteBands(tb, kpts, w); } } } void CreateTetrahedronMesh(KptList kpts) { } } }
/* * Copyright (c) Contributors, http://opensimulator.org/ * See CONTRIBUTORS.TXT for a full list of copyright holders. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the OpenSimulator Project nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ using System; using System.Collections; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net; using System.Reflection; using System.Xml; using Nini.Config; using log4net; using OpenSim.Framework; using OpenSim.Framework.Console; using OpenSim.Data; using OpenSim.Server.Base; using OpenSim.Services.Interfaces; using OpenSim.Services.Connectors.Hypergrid; using GridRegion = OpenSim.Services.Interfaces.GridRegion; using OpenMetaverse; namespace OpenSim.Services.GridService { public class HypergridLinker : IHypergridLinker { private static readonly ILog m_log = LogManager.GetLogger( MethodBase.GetCurrentMethod().DeclaringType); private static uint m_autoMappingX = 0; private static uint m_autoMappingY = 0; private static bool m_enableAutoMapping = false; protected IRegionData m_Database; protected GridService m_GridService; protected IAssetService m_AssetService; protected GatekeeperServiceConnector m_GatekeeperConnector; protected UUID m_ScopeID = UUID.Zero; // protected bool m_Check4096 = true; protected string m_MapTileDirectory = string.Empty; protected string m_ThisGatekeeper = string.Empty; protected Uri m_ThisGatekeeperURI = null; protected GridRegion m_DefaultRegion; protected GridRegion DefaultRegion { get { if (m_DefaultRegion == null) { List<GridRegion> defs = m_GridService.GetDefaultHypergridRegions(m_ScopeID); if (defs != null && defs.Count > 0) m_DefaultRegion = defs[0]; else { // Get any region defs = m_GridService.GetRegionsByName(m_ScopeID, "", 1); if (defs != null && defs.Count > 0) m_DefaultRegion = defs[0]; else { // This shouldn't happen m_DefaultRegion = new GridRegion(1000, 1000); m_log.Error("[HYPERGRID LINKER]: Something is wrong with this grid. It has no regions?"); } } } return m_DefaultRegion; } } public HypergridLinker(IConfigSource config, GridService gridService, IRegionData db) { IConfig gridConfig = config.Configs["GridService"]; if (gridConfig == null) return; if (!gridConfig.GetBoolean("HypergridLinker", false)) return; m_Database = db; m_GridService = gridService; m_log.DebugFormat("[HYPERGRID LINKER]: Starting with db {0}", db.GetType()); string assetService = gridConfig.GetString("AssetService", string.Empty); Object[] args = new Object[] { config }; if (assetService != string.Empty) m_AssetService = ServerUtils.LoadPlugin<IAssetService>(assetService, args); string scope = gridConfig.GetString("ScopeID", string.Empty); if (scope != string.Empty) UUID.TryParse(scope, out m_ScopeID); // m_Check4096 = gridConfig.GetBoolean("Check4096", true); m_MapTileDirectory = gridConfig.GetString("MapTileDirectory", "maptiles"); m_ThisGatekeeper = Util.GetConfigVarFromSections<string>(config, "GatekeeperURI", new string[] { "Startup", "Hypergrid", "GridService" }, String.Empty); // Legacy. Remove soon! m_ThisGatekeeper = gridConfig.GetString("Gatekeeper", m_ThisGatekeeper); try { m_ThisGatekeeperURI = new Uri(m_ThisGatekeeper); } catch { m_log.WarnFormat("[HYPERGRID LINKER]: Malformed URL in [GridService], variable Gatekeeper = {0}", m_ThisGatekeeper); } m_ThisGatekeeper = m_ThisGatekeeperURI.AbsoluteUri; if(m_ThisGatekeeperURI.Port == 80) m_ThisGatekeeper = m_ThisGatekeeper.Trim(new char[] { '/', ' ' }) +":80/"; else if(m_ThisGatekeeperURI.Port == 443) m_ThisGatekeeper = m_ThisGatekeeper.Trim(new char[] { '/', ' ' }) +":443/"; m_GatekeeperConnector = new GatekeeperServiceConnector(m_AssetService); m_log.Debug("[HYPERGRID LINKER]: Loaded all services..."); if (!string.IsNullOrEmpty(m_MapTileDirectory)) { try { Directory.CreateDirectory(m_MapTileDirectory); } catch (Exception e) { m_log.WarnFormat("[HYPERGRID LINKER]: Could not create map tile storage directory {0}: {1}", m_MapTileDirectory, e); m_MapTileDirectory = string.Empty; } } if (MainConsole.Instance != null) { MainConsole.Instance.Commands.AddCommand("Hypergrid", false, "link-region", "link-region <Xloc> <Yloc> <ServerURI> [<RemoteRegionName>]", "Link a HyperGrid Region. Examples for <ServerURI>: http://grid.net:8002/ or http://example.org/path/foo.php", RunCommand); MainConsole.Instance.Commands.AddCommand("Hypergrid", false, "link-region", "link-region <Xloc> <Yloc> <RegionIP> <RegionPort> [<RemoteRegionName>]", "Link a hypergrid region (deprecated)", RunCommand); MainConsole.Instance.Commands.AddCommand("Hypergrid", false, "unlink-region", "unlink-region <local name>", "Unlink a hypergrid region", RunCommand); MainConsole.Instance.Commands.AddCommand("Hypergrid", false, "link-mapping", "link-mapping [<x> <y>]", "Set local coordinate to map HG regions to", RunCommand); MainConsole.Instance.Commands.AddCommand("Hypergrid", false, "show hyperlinks", "show hyperlinks", "List the HG regions", HandleShow); } } #region Link Region // from map search public GridRegion LinkRegion(UUID scopeID, string regionDescriptor) { string reason = string.Empty; uint xloc = Util.RegionToWorldLoc((uint)random.Next(0, Int16.MaxValue)); return TryLinkRegionToCoords(scopeID, regionDescriptor, (int)xloc, 0, out reason); } private static Random random = new Random(); // From the command line link-region (obsolete) and the map private GridRegion TryLinkRegionToCoords(UUID scopeID, string mapName, int xloc, int yloc, out string reason) { return TryLinkRegionToCoords(scopeID, mapName, xloc, yloc, UUID.Zero, out reason); } public bool IsLocalGrid(string serverURI) { return serverURI == m_ThisGatekeeper; } public GridRegion TryLinkRegionToCoords(UUID scopeID, string mapName, int xloc, int yloc, UUID ownerID, out string reason) { reason = string.Empty; GridRegion regInfo = null; string serverURI = string.Empty; string regionName = string.Empty; if(!Util.buildHGRegionURI(mapName, out serverURI, out regionName)) { reason = "Wrong URI format for link-region"; return null; } if (TryCreateLink(scopeID, xloc, yloc, regionName, 0, null, serverURI, ownerID, out regInfo, out reason)) { regInfo.RegionName = serverURI + regionName; return regInfo; } return null; } private bool TryCreateLink(UUID scopeID, int xloc, int yloc, string remoteRegionName, uint externalPort, string externalHostName, UUID ownerID, out GridRegion regInfo, out string reason) { return TryCreateLink(scopeID, xloc, yloc, remoteRegionName, externalPort, externalHostName, null, ownerID, out regInfo, out reason); } private bool TryCreateLink(UUID scopeID, int xloc, int yloc, string remoteRegionName, uint externalPort, string externalHostName, string serverURI, UUID ownerID, out GridRegion regInfo, out string reason) { lock (this) { return TryCreateLinkImpl(scopeID, xloc, yloc, remoteRegionName, externalPort, externalHostName, serverURI, ownerID, out regInfo, out reason); } } private bool TryCreateLinkImpl(UUID scopeID, int xloc, int yloc, string remoteRegionName, uint externalPort, string externalHostName, string serverURI, UUID ownerID, out GridRegion regInfo, out string reason) { m_log.InfoFormat("[HYPERGRID LINKER]: Link to {0} {1}, in <{2},{3}>", ((serverURI == null) ? (externalHostName + ":" + externalPort) : serverURI), remoteRegionName, Util.WorldToRegionLoc((uint)xloc), Util.WorldToRegionLoc((uint)yloc)); reason = string.Empty; Uri uri = null; regInfo = new GridRegion(); if (externalPort > 0) regInfo.HttpPort = externalPort; else regInfo.HttpPort = 80; if (externalHostName != null) regInfo.ExternalHostName = externalHostName; else regInfo.ExternalHostName = "0.0.0.0"; if (serverURI != null) { regInfo.ServerURI = serverURI; try { uri = new Uri(serverURI); regInfo.ExternalHostName = uri.Host; regInfo.HttpPort = (uint)uri.Port; } catch {} } if (remoteRegionName != string.Empty) regInfo.RegionName = remoteRegionName; regInfo.RegionLocX = xloc; regInfo.RegionLocY = yloc; regInfo.ScopeID = scopeID; regInfo.EstateOwner = ownerID; // Make sure we're not hyperlinking to regions on this grid! if (m_ThisGatekeeperURI != null) { if (regInfo.ExternalHostName == m_ThisGatekeeperURI.Host && regInfo.HttpPort == m_ThisGatekeeperURI.Port) { m_log.InfoFormat("[HYPERGRID LINKER]: Cannot hyperlink to regions on the same grid"); reason = "Cannot hyperlink to regions on the same grid"; return false; } } else m_log.WarnFormat("[HYPERGRID LINKER]: Please set this grid's Gatekeeper's address in [GridService]!"); // Check for free coordinates GridRegion region = m_GridService.GetRegionByPosition(regInfo.ScopeID, regInfo.RegionLocX, regInfo.RegionLocY); if (region != null) { m_log.WarnFormat("[HYPERGRID LINKER]: Coordinates <{0},{1}> are already occupied by region {2} with uuid {3}", Util.WorldToRegionLoc((uint)regInfo.RegionLocX), Util.WorldToRegionLoc((uint)regInfo.RegionLocY), region.RegionName, region.RegionID); reason = "Coordinates are already in use"; return false; } try { regInfo.InternalEndPoint = new IPEndPoint(IPAddress.Parse("0.0.0.0"), (int)0); } catch (Exception e) { m_log.Warn("[HYPERGRID LINKER]: Wrong format for link-region: " + e.Message); reason = "Internal error"; return false; } // Finally, link it ulong handle = 0; UUID regionID = UUID.Zero; string externalName = string.Empty; string imageURL = string.Empty; int sizeX = (int)Constants.RegionSize; int sizeY = (int)Constants.RegionSize; if (!m_GatekeeperConnector.LinkRegion(regInfo, out regionID, out handle, out externalName, out imageURL, out reason, out sizeX, out sizeY)) return false; if (regionID == UUID.Zero) { m_log.Warn("[HYPERGRID LINKER]: Unable to link region"); reason = "Remote region could not be found"; return false; } region = m_GridService.GetRegionByUUID(scopeID, regionID); if (region != null) { m_log.DebugFormat("[HYPERGRID LINKER]: Region already exists in coordinates <{0},{1}>", Util.WorldToRegionLoc((uint)region.RegionLocX), Util.WorldToRegionLoc((uint)region.RegionLocY)); regInfo = region; return true; } // We are now performing this check for each individual teleport in the EntityTransferModule instead. This // allows us to give better feedback when teleports fail because of the distance reason (which can't be // done here) and it also hypergrid teleports that are within range (possibly because the source grid // itself has regions that are very far apart). // uint x, y; // if (m_Check4096 && !Check4096(handle, out x, out y)) // { // //RemoveHyperlinkRegion(regInfo.RegionID); // reason = "Region is too far (" + x + ", " + y + ")"; // m_log.Info("[HYPERGRID LINKER]: Unable to link, region is too far (" + x + ", " + y + ")"); // //return false; // } regInfo.RegionID = regionID; regInfo.RegionSizeX = sizeX; regInfo.RegionSizeY = sizeY; if (externalName == string.Empty) regInfo.RegionName = regInfo.ServerURI; else regInfo.RegionName = externalName; m_log.DebugFormat("[HYPERGRID LINKER]: naming linked region {0}, handle {1}", regInfo.RegionName, handle.ToString()); // Get the map image regInfo.TerrainImage = GetMapImage(regionID, imageURL); // Store the origin's coordinates somewhere regInfo.RegionSecret = handle.ToString(); AddHyperlinkRegion(regInfo, handle); m_log.InfoFormat("[HYPERGRID LINKER]: Successfully linked to region {0} at <{1},{2}> with image {3}", regInfo.RegionName, Util.WorldToRegionLoc((uint)regInfo.RegionLocX), Util.WorldToRegionLoc((uint)regInfo.RegionLocY), regInfo.TerrainImage); return true; } public bool TryUnlinkRegion(string mapName) { m_log.DebugFormat("[HYPERGRID LINKER]: Request to unlink {0}", mapName); GridRegion regInfo = null; List<RegionData> regions = m_Database.Get(Util.EscapeForLike(mapName), m_ScopeID); if (regions != null && regions.Count > 0) { OpenSim.Framework.RegionFlags rflags = (OpenSim.Framework.RegionFlags)Convert.ToInt32(regions[0].Data["flags"]); if ((rflags & OpenSim.Framework.RegionFlags.Hyperlink) != 0) { regInfo = new GridRegion(); regInfo.RegionID = regions[0].RegionID; regInfo.ScopeID = m_ScopeID; } } if (regInfo != null) { RemoveHyperlinkRegion(regInfo.RegionID); return true; } else { m_log.InfoFormat("[HYPERGRID LINKER]: Region {0} not found", mapName); return false; } } // Not currently used // /// <summary> // /// Cope with this viewer limitation. // /// </summary> // /// <param name="regInfo"></param> // /// <returns></returns> // public bool Check4096(ulong realHandle, out uint x, out uint y) // { // uint ux = 0, uy = 0; // Utils.LongToUInts(realHandle, out ux, out uy); // x = Util.WorldToRegionLoc(ux); // y = Util.WorldToRegionLoc(uy); // // const uint limit = Util.RegionToWorldLoc(4096 - 1); // uint xmin = ux - limit; // uint xmax = ux + limit; // uint ymin = uy - limit; // uint ymax = uy + limit; // // World map boundary checks // if (xmin < 0 || xmin > ux) // xmin = 0; // if (xmax > int.MaxValue || xmax < ux) // xmax = int.MaxValue; // if (ymin < 0 || ymin > uy) // ymin = 0; // if (ymax > int.MaxValue || ymax < uy) // ymax = int.MaxValue; // // // Check for any regions that are within the possible teleport range to the linked region // List<GridRegion> regions = m_GridService.GetRegionRange(m_ScopeID, (int)xmin, (int)xmax, (int)ymin, (int)ymax); // if (regions.Count == 0) // { // return false; // } // else // { // // Check for regions which are not linked regions // List<GridRegion> hyperlinks = m_GridService.GetHyperlinks(m_ScopeID); // IEnumerable<GridRegion> availableRegions = regions.Except(hyperlinks); // if (availableRegions.Count() == 0) // return false; // } // // return true; // } private void AddHyperlinkRegion(GridRegion regionInfo, ulong regionHandle) { RegionData rdata = m_GridService.RegionInfo2RegionData(regionInfo); int flags = (int)OpenSim.Framework.RegionFlags.Hyperlink + (int)OpenSim.Framework.RegionFlags.NoDirectLogin + (int)OpenSim.Framework.RegionFlags.RegionOnline; rdata.Data["flags"] = flags.ToString(); m_Database.Store(rdata); } private void RemoveHyperlinkRegion(UUID regionID) { m_Database.Delete(regionID); } public UUID GetMapImage(UUID regionID, string imageURL) { return m_GatekeeperConnector.GetMapImage(regionID, imageURL, m_MapTileDirectory); } #endregion #region Console Commands public void HandleShow(string module, string[] cmd) { if (cmd.Length != 2) { MainConsole.Instance.Output("Syntax: show hyperlinks"); return; } List<RegionData> regions = m_Database.GetHyperlinks(UUID.Zero); if (regions == null || regions.Count < 1) { MainConsole.Instance.Output("No hyperlinks"); return; } MainConsole.Instance.Output("Region Name"); MainConsole.Instance.Output("Location Region UUID"); MainConsole.Instance.Output(new string('-', 72)); foreach (RegionData r in regions) { MainConsole.Instance.Output( String.Format("{0}\n{2,-32} {1}\n", r.RegionName, r.RegionID, String.Format("{0},{1} ({2},{3})", r.posX, r.posY, Util.WorldToRegionLoc((uint)r.posX), Util.WorldToRegionLoc((uint)r.posY) ) ) ); } return; } public void RunCommand(string module, string[] cmdparams) { List<string> args = new List<string>(cmdparams); if (args.Count < 1) return; string command = args[0]; args.RemoveAt(0); cmdparams = args.ToArray(); RunHGCommand(command, cmdparams); } private void RunLinkRegionCommand(string[] cmdparams) { int xloc, yloc; string serverURI; string remoteName = null; xloc = (int)Util.RegionToWorldLoc((uint)Convert.ToInt32(cmdparams[0])); yloc = (int)Util.RegionToWorldLoc((uint)Convert.ToInt32(cmdparams[1])); serverURI = cmdparams[2]; if (cmdparams.Length > 3) remoteName = string.Join(" ", cmdparams, 3, cmdparams.Length - 3); string reason = string.Empty; GridRegion regInfo; if (TryCreateLink(UUID.Zero, xloc, yloc, remoteName, 0, null, serverURI, UUID.Zero, out regInfo, out reason)) MainConsole.Instance.Output("Hyperlink established"); else MainConsole.Instance.Output("Failed to link region: " + reason); } private void RunHGCommand(string command, string[] cmdparams) { if (command.Equals("link-mapping")) { if (cmdparams.Length == 2) { try { m_autoMappingX = Convert.ToUInt32(cmdparams[0]); m_autoMappingY = Convert.ToUInt32(cmdparams[1]); m_enableAutoMapping = true; } catch (Exception) { m_autoMappingX = 0; m_autoMappingY = 0; m_enableAutoMapping = false; } } } else if (command.Equals("link-region")) { if (cmdparams.Length < 3) { if ((cmdparams.Length == 1) || (cmdparams.Length == 2)) { LoadXmlLinkFile(cmdparams); } else { LinkRegionCmdUsage(); } return; } //this should be the prefererred way of setting up hg links now if (cmdparams[2].StartsWith("http")) { RunLinkRegionCommand(cmdparams); } else if (cmdparams[2].Contains(":")) { // New format string[] parts = cmdparams[2].Split(':'); if (parts.Length > 2) { // Insert remote region name ArrayList parameters = new ArrayList(cmdparams); parameters.Insert(3, parts[2]); cmdparams = (string[])parameters.ToArray(typeof(string)); } cmdparams[2] = "http://" + parts[0] + ':' + parts[1]; RunLinkRegionCommand(cmdparams); } else { // old format GridRegion regInfo; uint xloc, yloc; uint externalPort; string externalHostName; try { xloc = Convert.ToUInt32(cmdparams[0]); yloc = Convert.ToUInt32(cmdparams[1]); externalPort = Convert.ToUInt32(cmdparams[3]); externalHostName = cmdparams[2]; //internalPort = Convert.ToUInt32(cmdparams[4]); //remotingPort = Convert.ToUInt32(cmdparams[5]); } catch (Exception e) { MainConsole.Instance.Output("[HGrid] Wrong format for link-region command: " + e.Message); LinkRegionCmdUsage(); return; } // Convert cell coordinates given by the user to meters xloc = Util.RegionToWorldLoc(xloc); yloc = Util.RegionToWorldLoc(yloc); string reason = string.Empty; if (TryCreateLink(UUID.Zero, (int)xloc, (int)yloc, string.Empty, externalPort, externalHostName, UUID.Zero, out regInfo, out reason)) { // What is this? The GridRegion instance will be discarded anyway, // which effectively ignores any local name given with the command. //if (cmdparams.Length >= 5) //{ // regInfo.RegionName = ""; // for (int i = 4; i < cmdparams.Length; i++) // regInfo.RegionName += cmdparams[i] + " "; //} } } return; } else if (command.Equals("unlink-region")) { if (cmdparams.Length < 1) { UnlinkRegionCmdUsage(); return; } string region = string.Join(" ", cmdparams); if (TryUnlinkRegion(region)) MainConsole.Instance.Output("Successfully unlinked " + region); else MainConsole.Instance.Output("Unable to unlink " + region + ", region not found."); } } private void LoadXmlLinkFile(string[] cmdparams) { //use http://www.hgurl.com/hypergrid.xml for test try { XmlReader r = XmlReader.Create(cmdparams[0]); XmlConfigSource cs = new XmlConfigSource(r); string[] excludeSections = null; if (cmdparams.Length == 2) { if (cmdparams[1].ToLower().StartsWith("excludelist:")) { string excludeString = cmdparams[1].ToLower(); excludeString = excludeString.Remove(0, 12); char[] splitter = { ';' }; excludeSections = excludeString.Split(splitter); } } for (int i = 0; i < cs.Configs.Count; i++) { bool skip = false; if ((excludeSections != null) && (excludeSections.Length > 0)) { for (int n = 0; n < excludeSections.Length; n++) { if (excludeSections[n] == cs.Configs[i].Name.ToLower()) { skip = true; break; } } } if (!skip) { ReadLinkFromConfig(cs.Configs[i]); } } } catch (Exception e) { m_log.Error(e.ToString()); } } private void ReadLinkFromConfig(IConfig config) { GridRegion regInfo; uint xloc, yloc; uint externalPort; string externalHostName; uint realXLoc, realYLoc; xloc = Convert.ToUInt32(config.GetString("xloc", "0")); yloc = Convert.ToUInt32(config.GetString("yloc", "0")); externalPort = Convert.ToUInt32(config.GetString("externalPort", "0")); externalHostName = config.GetString("externalHostName", ""); realXLoc = Convert.ToUInt32(config.GetString("real-xloc", "0")); realYLoc = Convert.ToUInt32(config.GetString("real-yloc", "0")); if (m_enableAutoMapping) { xloc = (xloc % 100) + m_autoMappingX; yloc = (yloc % 100) + m_autoMappingY; } if (((realXLoc == 0) && (realYLoc == 0)) || (((realXLoc - xloc < 3896) || (xloc - realXLoc < 3896)) && ((realYLoc - yloc < 3896) || (yloc - realYLoc < 3896)))) { xloc = Util.RegionToWorldLoc(xloc); yloc = Util.RegionToWorldLoc(yloc); string reason = string.Empty; if (TryCreateLink(UUID.Zero, (int)xloc, (int)yloc, string.Empty, externalPort, externalHostName, UUID.Zero, out regInfo, out reason)) { regInfo.RegionName = config.GetString("localName", ""); } else MainConsole.Instance.Output("Unable to link " + externalHostName + ": " + reason); } } private void LinkRegionCmdUsage() { MainConsole.Instance.Output("Usage: link-region <Xloc> <Yloc> <ServerURI> [<RemoteRegionName>]"); MainConsole.Instance.Output("Usage (deprecated): link-region <Xloc> <Yloc> <HostName>:<HttpPort>[:<RemoteRegionName>]"); MainConsole.Instance.Output("Usage (deprecated): link-region <Xloc> <Yloc> <HostName> <HttpPort> [<LocalName>]"); MainConsole.Instance.Output("Usage: link-region <URI_of_xml> [<exclude>]"); } private void UnlinkRegionCmdUsage() { MainConsole.Instance.Output("Usage: unlink-region <LocalName>"); } #endregion } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator 1.0.1.0 // Changes may cause incorrect behavior and will be lost if the code is // regenerated. namespace Microsoft.Azure.Management.RecoveryServices.Backup { using Microsoft.Azure; using Microsoft.Azure.Management; using Microsoft.Azure.Management.RecoveryServices; using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// Operations operations. /// </summary> internal partial class Operations : IServiceOperations<RecoveryServicesBackupClient>, IOperations { /// <summary> /// Initializes a new instance of the Operations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal Operations(RecoveryServicesBackupClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the RecoveryServicesBackupClient /// </summary> public RecoveryServicesBackupClient Client { get; private set; } /// <summary> /// Returns the list of available operations. /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group where the recovery services vault is /// present. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<ClientDiscoveryValueForSingleApi>>> ListWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/operations").ToString(); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<ClientDiscoveryValueForSingleApi>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<ClientDiscoveryValueForSingleApi>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Returns the list of available operations. /// </summary> /// <param name='nextPageLink'> /// The NextLink from the previous successful call to List operation. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<IPage<ClientDiscoveryValueForSingleApi>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (nextPageLink == null) { throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("nextPageLink", nextPageLink); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters); } // Construct URL string _url = "{nextLink}"; _url = _url.Replace("{nextLink}", nextPageLink); List<string> _queryParameters = new List<string>(); if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } System.Net.HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<IPage<ClientDiscoveryValueForSingleApi>>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<ClientDiscoveryValueForSingleApi>>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace Sample.CommandService.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { private const int DefaultCollectionSize = 3; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
using System; using System.Collections; using System.Diagnostics; namespace OTFontFile { public struct BigUn { uint m_char32; BigUn(char c) { m_char32 = c; } BigUn(uint char32) { m_char32 = char32; } BigUn(char SurrogateHigh, char SurrogateLow) { m_char32 = (uint)SurrogatePairToUnicodeScalar(SurrogateHigh, SurrogateLow); } public static bool IsHighSurrogate(char c) { return (c >= 0xd800 && c <= 0xdbff); } public static bool IsLowSurrogate(char c) { return (c >= 0xdc00 && c <= 0xdfff); } public static BigUn SurrogatePairToUnicodeScalar(char SurrogateHigh, char SurrogateLow) { // validate parameters if (!IsHighSurrogate(SurrogateHigh)) { throw new ArgumentOutOfRangeException("SurrogateHigh"); } if (!IsLowSurrogate(SurrogateLow)) { throw new ArgumentOutOfRangeException("SurrogateLow"); } // calculate and return value uint retval = ((uint)SurrogateHigh - 0xd800) * 0x0400 + ((uint)SurrogateLow - 0xdc00) + 0x10000; return (BigUn)retval; } static public explicit operator uint(BigUn char32) { return char32.m_char32; } static public explicit operator BigUn(uint char32) { BigUn bu = new BigUn(char32); return bu; } static public bool operator < ( BigUn bg1, BigUn bg2 ) { return bg1.m_char32 < bg2.m_char32; } static public bool operator > ( BigUn bg1, BigUn bg2 ) { return bg1.m_char32 > bg2.m_char32; } static public bool operator == ( BigUn bg1, BigUn bg2 ) { return bg1.m_char32 == bg2.m_char32; } static public bool operator != ( BigUn bg1, BigUn bg2 ) { return bg1.m_char32 != bg2.m_char32; } public override bool Equals( Object o ) { return this == (BigUn)o; } public override int GetHashCode() { return this.GetHashCode(); } } public struct OTF2Dot14 { private short valAsShort; public OTF2Dot14(short valAsShort) { this.valAsShort=valAsShort; } public short ValAsShort { get { return this.valAsShort; } set { this.valAsShort=value; } } public ushort Mantissa { get { int sh=Math.Abs(this.valAsShort); int mantissa=(sh>>14); return (ushort)mantissa; } } public ushort Fraction { get { int sh=Math.Abs(this.valAsShort); int fraction=sh&0x3fff; return (ushort)fraction; } } public static explicit operator double(OTF2Dot14 valAsShort) { return ((double)valAsShort.ValAsShort/(double)0x4000); } public override int GetHashCode() { return (int)(uint)this; } static public bool operator == (OTF2Dot14 f1, OTF2Dot14 f2) { if ((object)f1 == null && (object)f2 == null) { return true; } else if ((object)f1 == null || (object)f2 == null) { return false; } else { return (f1.valAsShort == f2.valAsShort); } } static public bool operator != (OTF2Dot14 f1, OTF2Dot14 f2) { return (!(f1 == f2)); } public override bool Equals(object obj) { return (this == (OTF2Dot14)obj); } } public struct OTFixed { public short mantissa; public ushort fraction; public OTFixed(short Mantissa, ushort Fraction) { mantissa = Mantissa; fraction = Fraction; } public OTFixed(double fixValue) { mantissa = (short)Math.Round(fixValue, 0); fraction = (ushort)Math.Round((fixValue - mantissa) * 65536, 0); } public uint GetUint() { return (uint)(mantissa<<16 | fraction); } public double GetDouble() { return (double)mantissa + (double)fraction/65536.0; } public string GetHexString() { return "0x" + mantissa.ToString("X") + fraction.ToString("X"); } public override string ToString() { double number = Math.Round(this.GetDouble(), 3); return number.ToString(); } static public bool operator == (OTFixed f1, OTFixed f2) { if ((object)f1 == null && (object)f2 == null) { return true; } else if ((object)f1 == null || (object)f2 == null) { return false; } else { return (f1.GetUint() == f2.GetUint()); } } static public bool operator != (OTFixed f1, OTFixed f2) { return (!(f1 == f2)); } public override bool Equals(object obj) { return (this == (OTFixed)obj); } public override int GetHashCode() { return (int)this.GetUint(); } } public class OTTag { /*************** * constructors */ public OTTag(byte[] tagbuf) { m_tag = new byte[4]; for (int i=0; i<4; i++) { m_tag[i] = tagbuf[i]; } } public OTTag(byte[] tagbuf, uint offset) { m_tag = new byte[4]; for (int i=0; i<4; i++) { m_tag[i] = tagbuf[i+offset]; } } /************************ * operators */ static public implicit operator byte[](OTTag tag) { return tag.GetBytes(); } static public implicit operator OTTag(uint tagvalue) { byte[] buf = new byte[4]; buf[0] = (byte)((tagvalue&0xff000000) >> 24); buf[1] = (byte)((tagvalue&0x00ff0000) >> 16); buf[2] = (byte)((tagvalue&0x0000ff00) >> 8); buf[3] = (byte)(tagvalue&0x000000ff); return new OTTag(buf); } static public implicit operator uint(OTTag tag) { byte[] buf = tag.GetBytes(); uint tagValue = 0; tagValue += (uint)(buf[0]<<24); tagValue += (uint)(buf[1]<<16); tagValue += (uint)(buf[2]<<8); tagValue += (uint)(buf[3]); return tagValue; } static public implicit operator OTTag(string value) { byte[] tagbuf = new byte[4]; for (int i=0; i<4; i++) { tagbuf[i] = (byte)value[i]; } return new OTTag(tagbuf); } static public implicit operator string (OTTag tag) { byte[] buf = tag.GetBytes(); string s = ""; for (int i=0; i<4; i++) { s += (char)buf[i]; } return s; } static public bool operator == (OTTag t1, OTTag t2) { if ((object)t1 == null && (object)t2 == null) { return true; } else if ((object)t1 == null || (object)t2 == null) { return false; } else { uint u1 = t1; uint u2 = t2; return (u1 == u2); } } static public bool operator != (OTTag t1, OTTag t2) { return (!(t1 == t2)); } /***************** * public methods */ public override bool Equals(object obj) { return (this == (OTTag)obj); } public override int GetHashCode() { return (int)(uint)this; } public byte[] GetBytes() { return m_tag; } public bool IsValid() { bool bRet = true; for (int i=0; i<4; i++) { if (m_tag[i] < 32 || m_tag[i] > 126) { bRet = false; break; } } return bRet; } /************** * member data */ byte[] m_tag; } public class DirectoryEntry { public DirectoryEntry() { m_buf = new MBOBuffer(16); } public DirectoryEntry(MBOBuffer buf) { Debug.Assert(buf.GetLength() == 16); m_buf = buf; } public enum FieldOffsets { tag = 0, checkSum = 4, offset = 8, length = 12 } public DirectoryEntry(DirectoryEntry obj) { tag = new OTTag(obj.tag.GetBytes()); checkSum = obj.checkSum; offset = obj.offset; length = obj.length; } public OTTag tag { get {return new OTTag(m_buf.GetBuffer());} set {m_buf.SetTag(value, (uint)FieldOffsets.tag);} } public uint checkSum { get {return m_buf.GetUint((uint)FieldOffsets.checkSum);} set {m_buf.SetUint(value, (uint)FieldOffsets.checkSum);} } public uint offset { get {return m_buf.GetUint((uint)FieldOffsets.offset);} set {m_buf.SetUint(value, (uint)FieldOffsets.offset);} } public uint length { get {return m_buf.GetUint((uint)FieldOffsets.length);} set {m_buf.SetUint(value, (uint)FieldOffsets.length);} } public MBOBuffer m_buf; } public class OffsetTable { // constructor public OffsetTable(MBOBuffer buf) { Debug.Assert(buf.GetLength() == 12); m_buf = buf; DirectoryEntries = new System.Collections.ArrayList(); } public OffsetTable(OTFixed version, ushort nTables) { m_buf = new MBOBuffer(12); sfntVersion = version; numTables = nTables; if (nTables != 0) { // these values are truly undefined when numTables is zero // since there is no power of 2 that is less that or equal to zero searchRange = (ushort)(util.MaxPower2LE(nTables) * 16); entrySelector = util.Log2(util.MaxPower2LE(nTables)); rangeShift = (ushort)(nTables*16 - searchRange); } DirectoryEntries = new System.Collections.ArrayList(); } public enum FieldOffsets { sfntVersion = 0, numTables = 4, searchRange = 6, entrySelector = 8, rangeShift = 10 } public uint CalcOffsetTableChecksum() { return m_buf.CalcChecksum(); } public uint CalcDirectoryEntriesChecksum() { uint sum = 0; for (int i=0; i<DirectoryEntries.Count; i++) { DirectoryEntry de = (DirectoryEntry)DirectoryEntries[i]; sum += de.tag + de.checkSum + de.offset + de.length; } return sum; } // accessors public OTFixed sfntVersion { get {return m_buf.GetFixed((uint)FieldOffsets.sfntVersion);} set {m_buf.SetFixed(value, (uint)FieldOffsets.sfntVersion);} } public ushort numTables { get {return m_buf.GetUshort((uint)FieldOffsets.numTables);} set {m_buf.SetUshort(value, (uint)FieldOffsets.numTables);} } public ushort searchRange { get {return m_buf.GetUshort((uint)FieldOffsets.searchRange);} set {m_buf.SetUshort(value, (uint)FieldOffsets.searchRange);} } public ushort entrySelector { get {return m_buf.GetUshort((uint)FieldOffsets.entrySelector);} set {m_buf.SetUshort(value, (uint)FieldOffsets.entrySelector);} } public ushort rangeShift { get {return m_buf.GetUshort((uint)FieldOffsets.rangeShift);} set {m_buf.SetUshort(value, (uint)FieldOffsets.rangeShift);} } // member data public MBOBuffer m_buf; public System.Collections.ArrayList DirectoryEntries; } }
using Qwack.Core.Basic; using Qwack.Dates; using Qwack.Math; using Qwack.Math.Interpolation; using static Qwack.Math.Statistics; using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Qwack.Transport.BasicTypes; using static System.Math; using Qwack.Transport.TransportObjects.MarketData.VolSurfaces; namespace Qwack.Options.VolSurfaces { /// <summary> /// A volatility surface which returns a value interpolated from a grid of vols for varying strikes and maturities /// Strikes can be either absolute or delta type /// Interpolation method for strike and time dimensions can be specified seperately /// </summary> public class GridVolSurface : IVolSurface, IATMVolSurface { private readonly bool _allowCaching = true; private readonly ConcurrentDictionary<string, double> _absVolCache = new(); private readonly ConcurrentDictionary<string, double> _deltaVolCache = new(); public Frequency OverrideSpotLag { get; set; } public string Name { get; set; } public DateTime OriginDate { get; set; } public double[] Strikes { get; set; } public StrikeType StrikeType { get; set; } public Interpolator1DType StrikeInterpolatorType { get; set; } = Interpolator1DType.LinearFlatExtrap; public double[] ExpiriesDouble { get; set; } public Interpolator1DType TimeInterpolatorType { get; set; } = Interpolator1DType.LinearInVariance; public double[][] Volatilities { get; set; } public DateTime[] Expiries { get; set; } public string[] PillarLabels { get; set; } public DayCountBasis TimeBasis { get; set; } = DayCountBasis.Act365F; public bool FlatDeltaSmileInExtreme { get; set; } public double FlatDeltaPoint { get; set; } = 0.001; public Currency Currency { get; set; } public string AssetId { get; set; } public IInterpolator2D LocalVolGrid { get; set; } internal IInterpolator1D[] _interpolators; public GridVolSurface() { } public GridVolSurface(DateTime originDate, double[] strikes, DateTime[] expiries, double[][] vols, StrikeType strikeType, Interpolator1DType strikeInterpType, Interpolator1DType timeInterpType, DayCountBasis timeBasis, string[] pillarLabels = null):base() { StrikeType = strikeType; StrikeInterpolatorType = strikeInterpType; TimeInterpolatorType = timeInterpType; TimeBasis = timeBasis; if (pillarLabels == null) PillarLabels = expiries.Select(x => x.ToString("yyyy-MM-dd")).ToArray(); else PillarLabels = pillarLabels; Build(originDate, strikes, expiries, vols); } public GridVolSurface(TO_GridVolSurface transportObject, ICurrencyProvider currencyProvider) :this(transportObject.OriginDate,transportObject.Strikes,transportObject.Expiries,transportObject.Volatilities,transportObject.StrikeType, transportObject.StrikeInterpolatorType, transportObject.TimeInterpolatorType, transportObject.TimeBasis, transportObject.PillarLabels) { Currency = currencyProvider.GetCurrency(transportObject.Currency); AssetId = transportObject.AssetId; Name = transportObject.Name; } public void Build(DateTime originDate, double[] strikes, DateTime[] expiries, double[][] vols) { OriginDate = originDate; Strikes = strikes; Expiries = expiries; Volatilities = vols; ExpiriesDouble = Expiries.Select(t => TimeBasis.CalculateYearFraction(originDate, t)).ToArray(); _interpolators = vols.Select((v, ix) => InterpolatorFactory.GetInterpolator(Strikes, v, StrikeInterpolatorType)).ToArray(); } public double GetVolForAbsoluteStrike(double strike, double maturity, double forward) { var key = $"{strike:f6}~{maturity:f3}~{forward:f6}"; if (_allowCaching && _absVolCache.TryGetValue(key, out var vol)) return vol; if (StrikeType == StrikeType.Absolute) { var interpForStrike = InterpolatorFactory.GetInterpolator(ExpiriesDouble, _interpolators.Select(x => x.Interpolate(strike)).ToArray(), TimeInterpolatorType); vol = interpForStrike.Interpolate(maturity); } else { var fwd = forward; //var cp = strike < 0 ? OptionType.Put : OptionType.Call; Func<double, double> testFunc = (deltaK => { var dkModified = FlatDeltaSmileInExtreme ? Min(1.0-FlatDeltaPoint,Max(deltaK, FlatDeltaPoint)) : deltaK; var interpForStrike = InterpolatorFactory.GetInterpolator(ExpiriesDouble, _interpolators.Select(x => x.Interpolate(-dkModified)).ToArray(), TimeInterpolatorType); var vol2 = interpForStrike.Interpolate(maturity); var absK = BlackFunctions.AbsoluteStrikefromDeltaKAnalytic(fwd, deltaK, 0, maturity, vol2); return absK - strike; }); var hiK = FlatDeltaSmileInExtreme ? 1.0 - FlatDeltaPoint : 0.999999999; var loK = FlatDeltaSmileInExtreme ? FlatDeltaPoint : 0.000000001; var solvedStrike = -Math.Solvers.Brent.BrentsMethodSolve(testFunc, -hiK, -loK, 1e-12); if (solvedStrike == loK || solvedStrike == hiK) //out of bounds { var upperK = testFunc(-loK); var lowerK = testFunc(-hiK); if (Abs(upperK - fwd) < Abs(lowerK - fwd)) solvedStrike = loK; else solvedStrike = hiK; } var interpForSolvedStrike = InterpolatorFactory.GetInterpolator(ExpiriesDouble, _interpolators.Select(x => x.Interpolate(solvedStrike)).ToArray(), TimeInterpolatorType); vol = interpForSolvedStrike.Interpolate(maturity); } if (_allowCaching) _absVolCache[key] = vol; return vol; } public double GetVolForAbsoluteStrike(double strike, DateTime expiry, double forward) => GetVolForAbsoluteStrike(strike, TimeBasis.CalculateYearFraction(OriginDate, expiry), forward); public double RiskReversal(double deltaStrike, double maturity, double forward) { var callVol = GetVolForDeltaStrike(deltaStrike, maturity, forward); var putVol = GetVolForDeltaStrike(-deltaStrike, maturity, forward); return callVol - putVol; } public double GetVolForDeltaStrike(double deltaStrike, double maturity, double forward) { if (deltaStrike > 1.0 || deltaStrike < -1.0) throw new ArgumentOutOfRangeException($"Delta strike must be in range -1.0 < x < 1.0 - value was {deltaStrike}"); var key = $"{deltaStrike:f6}~{maturity:f3}~{forward:f6}"; if (_allowCaching && _deltaVolCache.TryGetValue(key, out var vol)) return vol; if (StrikeType == StrikeType.ForwardDelta) { var interpForStrike = InterpolatorFactory.GetInterpolator(ExpiriesDouble, _interpolators.Select(x => x.Interpolate(deltaStrike)).ToArray(), TimeInterpolatorType); vol = interpForStrike.Interpolate(maturity); } else { var fwd = forward; var cp = deltaStrike < 0 ? OptionType.Put : OptionType.Call; Func<double, double> testFunc = (absK => { var interpForStrike = InterpolatorFactory.GetInterpolator(ExpiriesDouble, _interpolators.Select(x => x.Interpolate(absK)).ToArray(), TimeInterpolatorType); var vol2 = interpForStrike.Interpolate(maturity); var deltaK = BlackFunctions.BlackDelta(fwd, absK, 0, maturity, vol2, cp); return deltaK - Abs(deltaStrike); }); var solvedStrike = Math.Solvers.Brent.BrentsMethodSolve(testFunc, 0.000000001, 10 * fwd, 1e-8); var interpForSolvedStrike = InterpolatorFactory.GetInterpolator(ExpiriesDouble, _interpolators.Select(x => x.Interpolate(solvedStrike)).ToArray(), TimeInterpolatorType); vol = interpForSolvedStrike.Interpolate(maturity); } if (_allowCaching) _deltaVolCache[key] = vol; return vol; } private double GetAbsStrikeForDelta(double fwd, double deltaStrike, double maturity) { var cp = deltaStrike < 0 ? OptionType.Put : OptionType.Call; Func<double, double> testFunc = (absK => { var interpForStrike = InterpolatorFactory.GetInterpolator(ExpiriesDouble, ExpiriesDouble.Select(e => GetVolForAbsoluteStrike(absK, e, fwd)).ToArray(), TimeInterpolatorType); var vol2 = interpForStrike.Interpolate(maturity); var deltaK = BlackFunctions.BlackDelta(fwd, absK, 0, maturity, vol2, cp); return deltaK - deltaStrike; }); var solvedStrike = Math.Solvers.Brent.BrentsMethodSolve(testFunc, 0.000000001, 50 * fwd, 1e-8); return solvedStrike; } private double GetDeltaStrikeForAbs(double fwd, double strike, double maturity) { var cp = strike < 0 ? OptionType.Put : OptionType.Call; Func<double, double> testFunc = (deltaK => { var interpForStrike = InterpolatorFactory.GetInterpolator(ExpiriesDouble, ExpiriesDouble.Select(e => GetVolForDeltaStrike(deltaK, e, fwd)).ToArray(), TimeInterpolatorType); var vol2 = interpForStrike.Interpolate(maturity); var absK = BlackFunctions.AbsoluteStrikefromDeltaKAnalytic(fwd, deltaK, 0, maturity, vol2); return absK - strike; }); var solvedStrike = -Math.Solvers.Brent.BrentsMethodSolve(testFunc, -0.99999999999, -0.00000000001, 1e-8); if (solvedStrike == 0.00000000001 || solvedStrike == 0.99999999999) //out of bounds { var upperK = testFunc(-0.00000000001); var lowerK = testFunc(-0.99999999999); if (Abs(upperK - fwd) < Abs(lowerK - fwd)) solvedStrike = 0.00000000001; else solvedStrike = 0.99999999999; } return solvedStrike; } public double GetVolForDeltaStrike(double strike, DateTime expiry, double forward) => GetVolForDeltaStrike(strike, TimeBasis.CalculateYearFraction(OriginDate, expiry), forward); public Dictionary<string, IVolSurface> GetATMVegaScenarios(double bumpSize, DateTime? LastSensitivityDate) { var o = new Dictionary<string, IVolSurface>(); var lastBumpIx = Expiries.Length; if (LastSensitivityDate.HasValue) { var ix = Array.BinarySearch(Expiries, LastSensitivityDate.Value); ix = (ix < 0) ? ~ix : ix; ix += 2; lastBumpIx = Min(ix, lastBumpIx); //cap at last pillar } for (var i=0;i< lastBumpIx; i++) { var volsBumped = (double[][])Volatilities.Clone(); volsBumped[i] = volsBumped[i].Select(x => x + bumpSize).ToArray(); o.Add(PillarLabels[i], new GridVolSurface(OriginDate, Strikes, Expiries, volsBumped, StrikeType, StrikeInterpolatorType, TimeInterpolatorType, TimeBasis, PillarLabels) { Currency = Currency, AssetId = AssetId }); } return o; } public DateTime PillarDatesForLabel(string label) { var labelIx = Array.IndexOf(PillarLabels, label); if (labelIx == -1) return OriginDate; return Expiries[labelIx]; } public double GetForwardATMVol(DateTime startDate, DateTime endDate) => GetForwardATMVol(TimeBasis.CalculateYearFraction(OriginDate, startDate), TimeBasis.CalculateYearFraction(OriginDate, endDate)); public double GetForwardATMVol(double start, double end) { if (start > end) throw new Exception("Start must be strictly less than end"); if (StrikeType==StrikeType.ForwardDelta) { if (start == end) return GetVolForDeltaStrike(0.5,start,1.0); var vStart = GetVolForDeltaStrike(0.5, start, 1.0); vStart *= vStart * start; var vEnd = GetVolForDeltaStrike(0.5, end, 1.0); vEnd *= vEnd * end; var vDiff = vEnd - vStart; if (vDiff < 0) if (_suppressVarianceErrors) vDiff = 0.0; else throw new Exception("Negative forward variance detected"); return Sqrt(vDiff / (end - start)); } throw new Exception("Only Forward-Delta type supported for fwd vol calcs"); } internal bool _suppressVarianceErrors; public virtual IVolSurface RollSurface(DateTime newOrigin) { _suppressVarianceErrors = true; var newMaturities = Expiries.Where(x => x > newOrigin).ToArray(); var newVols = new double[newMaturities.Length][]; var newATMs = newMaturities.Select(m => GetForwardATMVol(newOrigin, m)).ToArray(); var oldATMs = newMaturities.Select(m => GetForwardATMVol(OriginDate, m)).ToArray(); var numDropped = Expiries.Length - newMaturities.Length; for (var i=0;i<newMaturities.Length;i++) { newVols[i] = new double[Strikes.Length]; for (var j = 0; j < Strikes.Length; j++) newVols[i][j] = Volatilities[i+ numDropped][j] / oldATMs[i] * newATMs[i]; } return new GridVolSurface(newOrigin, Strikes, newMaturities, newVols, StrikeType, StrikeInterpolatorType, TimeInterpolatorType, TimeBasis, PillarLabels) { AssetId = AssetId, Currency = Currency, Name = Name, }; } public double Dvdk(double strike, DateTime expiry, double fwd) { if (StrikeType == StrikeType.ForwardDelta) { var t = TimeBasis.CalculateYearFraction(OriginDate, expiry); var pillarIx = Array.BinarySearch(Expiries, expiry); var interpForMaturity = pillarIx > 0 ? _interpolators[pillarIx] : InterpolatorFactory.GetInterpolator(Strikes, Strikes.Select(k => GetVolForDeltaStrike(k, expiry, fwd)).ToArray(), StrikeInterpolatorType); var deltaK = GetDeltaStrikeForAbs(fwd, strike, t); var vol = GetVolForAbsoluteStrike(strike, expiry, fwd); var gamma = BlackFunctions.BlackGamma(fwd, strike, 0.0, t, vol); return interpForMaturity.FirstDerivative(deltaK) * gamma; } else { var interpForMaturity = InterpolatorFactory.GetInterpolator(Strikes, Strikes.Select(k => GetVolForAbsoluteStrike(k, expiry, fwd)).ToArray(), StrikeInterpolatorType); return interpForMaturity.FirstDerivative(strike); } } public double CDF(DateTime expiry, double fwd, double strike) { var t = TimeBasis.CalculateYearFraction(OriginDate, expiry); var vol = GetVolForAbsoluteStrike(strike, expiry, fwd); var nu = vol * Sqrt(t); (var d1, var d2) = BlackFunctions.D1d2(fwd, strike, t, vol); var vega = BlackFunctions.BlackVega(fwd, strike, 0.0, t, vol) / 0.01; var digi = BlackFunctions.BlackDigitalPV(fwd, strike, 0.0, t, vol, OptionType.P); var dvdk = Dvdk(strike, expiry, fwd); return digi + vega * dvdk; //var dk = fwd * 1e-10; //var volU = GetVolForAbsoluteStrike(strike + dk, expiry, fwd); //var volD = GetVolForAbsoluteStrike(strike - dk, expiry, fwd); //var pU = BlackFunctions.BlackPV(fwd, strike + dk, 0.0, t, volU, OptionType.P); //var pD = BlackFunctions.BlackPV(fwd, strike - dk, 0.0, t, volD, OptionType.P); //var dPdK = (pU - pD) / (2.0 * dk); //return dPdK; } public double InverseCDF(DateTime expiry, double fwd, double p) { var t = TimeBasis.CalculateYearFraction(OriginDate, expiry); var targetFunc = new Func<double, double>(k => p - CDF(expiry, fwd, k)); var minK = GetAbsStrikeForDelta(fwd, -1e-10, t) / 2.0; var maxK = GetAbsStrikeForDelta(fwd, -(1.0 - 1e-10), t) * 10.0; var breakCount = 0; while (targetFunc(minK) < 0) { maxK = minK; minK /= 10.0; breakCount++; if (breakCount == 10) return minK; } breakCount = 0; while (targetFunc(maxK) > 0) { minK = maxK; maxK *= 2.0; breakCount++; if (breakCount == 10) return maxK; } //var b = Math.Solvers.Newton1D.MethodSolve2(targetFunc, fwd, 1e-8, 1000, fwd * 0.000000001); var b = Math.Solvers.Brent.BrentsMethodSolve(targetFunc, minK, maxK, 1e-10); if (b == minK || b == maxK) throw new Exception("Solution outside of solving bounds"); return b; } public TO_GridVolSurface GetTransportObject() => new() { AssetId = AssetId, Name = Name, OriginDate = OriginDate, Currency = Currency.Ccy, Expiries = Expiries, FlatDeltaPoint = FlatDeltaPoint, FlatDeltaSmileInExtreme = FlatDeltaSmileInExtreme, OverrideSpotLag = OverrideSpotLag.ToString(), PillarLabels = PillarLabels, StrikeInterpolatorType = StrikeInterpolatorType, StrikeType = StrikeType, TimeBasis = TimeBasis, TimeInterpolatorType = TimeInterpolatorType, Strikes = Strikes, Volatilities = new MultiDimArray<double>(Volatilities) }; } }
/* * Location Intelligence APIs * * Incorporate our extensive geodata into everyday applications, business processes and workflows. * * OpenAPI spec version: 8.5.0 * * Generated by: https://github.com/swagger-api/swagger-codegen.git * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Linq; using System.IO; using System.Text; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; namespace pb.locationIntelligence.Model { /// <summary> /// AHJmailingAddress /// </summary> [DataContract] public partial class AHJmailingAddress : IEquatable<AHJmailingAddress> { /// <summary> /// Initializes a new instance of the <see cref="AHJmailingAddress" /> class. /// </summary> /// <param name="FormattedAddress">FormattedAddress.</param> /// <param name="MainAddressLine">MainAddressLine.</param> /// <param name="AddressLastLine">AddressLastLine.</param> /// <param name="PlaceName">PlaceName.</param> /// <param name="AreaName1">AreaName1.</param> /// <param name="AreaName2">AreaName2.</param> /// <param name="AreaName3">AreaName3.</param> /// <param name="AreaName4">AreaName4.</param> /// <param name="PostCode">PostCode.</param> /// <param name="PostCodeExt">PostCodeExt.</param> /// <param name="Country">Country.</param> /// <param name="AddressNumber">AddressNumber.</param> /// <param name="StreetName">StreetName.</param> /// <param name="UnitType">UnitType.</param> /// <param name="UnitValue">UnitValue.</param> public AHJmailingAddress(string FormattedAddress = null, string MainAddressLine = null, string AddressLastLine = null, string PlaceName = null, string AreaName1 = null, string AreaName2 = null, string AreaName3 = null, string AreaName4 = null, string PostCode = null, string PostCodeExt = null, string Country = null, string AddressNumber = null, string StreetName = null, string UnitType = null, string UnitValue = null) { this.FormattedAddress = FormattedAddress; this.MainAddressLine = MainAddressLine; this.AddressLastLine = AddressLastLine; this.PlaceName = PlaceName; this.AreaName1 = AreaName1; this.AreaName2 = AreaName2; this.AreaName3 = AreaName3; this.AreaName4 = AreaName4; this.PostCode = PostCode; this.PostCodeExt = PostCodeExt; this.Country = Country; this.AddressNumber = AddressNumber; this.StreetName = StreetName; this.UnitType = UnitType; this.UnitValue = UnitValue; } /// <summary> /// Gets or Sets FormattedAddress /// </summary> [DataMember(Name="formattedAddress", EmitDefaultValue=false)] public string FormattedAddress { get; set; } /// <summary> /// Gets or Sets MainAddressLine /// </summary> [DataMember(Name="mainAddressLine", EmitDefaultValue=false)] public string MainAddressLine { get; set; } /// <summary> /// Gets or Sets AddressLastLine /// </summary> [DataMember(Name="addressLastLine", EmitDefaultValue=false)] public string AddressLastLine { get; set; } /// <summary> /// Gets or Sets PlaceName /// </summary> [DataMember(Name="placeName", EmitDefaultValue=false)] public string PlaceName { get; set; } /// <summary> /// Gets or Sets AreaName1 /// </summary> [DataMember(Name="areaName1", EmitDefaultValue=false)] public string AreaName1 { get; set; } /// <summary> /// Gets or Sets AreaName2 /// </summary> [DataMember(Name="areaName2", EmitDefaultValue=false)] public string AreaName2 { get; set; } /// <summary> /// Gets or Sets AreaName3 /// </summary> [DataMember(Name="areaName3", EmitDefaultValue=false)] public string AreaName3 { get; set; } /// <summary> /// Gets or Sets AreaName4 /// </summary> [DataMember(Name="areaName4", EmitDefaultValue=false)] public string AreaName4 { get; set; } /// <summary> /// Gets or Sets PostCode /// </summary> [DataMember(Name="postCode", EmitDefaultValue=false)] public string PostCode { get; set; } /// <summary> /// Gets or Sets PostCodeExt /// </summary> [DataMember(Name="postCodeExt", EmitDefaultValue=false)] public string PostCodeExt { get; set; } /// <summary> /// Gets or Sets Country /// </summary> [DataMember(Name="country", EmitDefaultValue=false)] public string Country { get; set; } /// <summary> /// Gets or Sets AddressNumber /// </summary> [DataMember(Name="addressNumber", EmitDefaultValue=false)] public string AddressNumber { get; set; } /// <summary> /// Gets or Sets StreetName /// </summary> [DataMember(Name="streetName", EmitDefaultValue=false)] public string StreetName { get; set; } /// <summary> /// Gets or Sets UnitType /// </summary> [DataMember(Name="unitType", EmitDefaultValue=false)] public string UnitType { get; set; } /// <summary> /// Gets or Sets UnitValue /// </summary> [DataMember(Name="unitValue", EmitDefaultValue=false)] public string UnitValue { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class AHJmailingAddress {\n"); sb.Append(" FormattedAddress: ").Append(FormattedAddress).Append("\n"); sb.Append(" MainAddressLine: ").Append(MainAddressLine).Append("\n"); sb.Append(" AddressLastLine: ").Append(AddressLastLine).Append("\n"); sb.Append(" PlaceName: ").Append(PlaceName).Append("\n"); sb.Append(" AreaName1: ").Append(AreaName1).Append("\n"); sb.Append(" AreaName2: ").Append(AreaName2).Append("\n"); sb.Append(" AreaName3: ").Append(AreaName3).Append("\n"); sb.Append(" AreaName4: ").Append(AreaName4).Append("\n"); sb.Append(" PostCode: ").Append(PostCode).Append("\n"); sb.Append(" PostCodeExt: ").Append(PostCodeExt).Append("\n"); sb.Append(" Country: ").Append(Country).Append("\n"); sb.Append(" AddressNumber: ").Append(AddressNumber).Append("\n"); sb.Append(" StreetName: ").Append(StreetName).Append("\n"); sb.Append(" UnitType: ").Append(UnitType).Append("\n"); sb.Append(" UnitValue: ").Append(UnitValue).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="obj">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object obj) { // credit: http://stackoverflow.com/a/10454552/677735 return this.Equals(obj as AHJmailingAddress); } /// <summary> /// Returns true if AHJmailingAddress instances are equal /// </summary> /// <param name="other">Instance of AHJmailingAddress to be compared</param> /// <returns>Boolean</returns> public bool Equals(AHJmailingAddress other) { // credit: http://stackoverflow.com/a/10454552/677735 if (other == null) return false; return ( this.FormattedAddress == other.FormattedAddress || this.FormattedAddress != null && this.FormattedAddress.Equals(other.FormattedAddress) ) && ( this.MainAddressLine == other.MainAddressLine || this.MainAddressLine != null && this.MainAddressLine.Equals(other.MainAddressLine) ) && ( this.AddressLastLine == other.AddressLastLine || this.AddressLastLine != null && this.AddressLastLine.Equals(other.AddressLastLine) ) && ( this.PlaceName == other.PlaceName || this.PlaceName != null && this.PlaceName.Equals(other.PlaceName) ) && ( this.AreaName1 == other.AreaName1 || this.AreaName1 != null && this.AreaName1.Equals(other.AreaName1) ) && ( this.AreaName2 == other.AreaName2 || this.AreaName2 != null && this.AreaName2.Equals(other.AreaName2) ) && ( this.AreaName3 == other.AreaName3 || this.AreaName3 != null && this.AreaName3.Equals(other.AreaName3) ) && ( this.AreaName4 == other.AreaName4 || this.AreaName4 != null && this.AreaName4.Equals(other.AreaName4) ) && ( this.PostCode == other.PostCode || this.PostCode != null && this.PostCode.Equals(other.PostCode) ) && ( this.PostCodeExt == other.PostCodeExt || this.PostCodeExt != null && this.PostCodeExt.Equals(other.PostCodeExt) ) && ( this.Country == other.Country || this.Country != null && this.Country.Equals(other.Country) ) && ( this.AddressNumber == other.AddressNumber || this.AddressNumber != null && this.AddressNumber.Equals(other.AddressNumber) ) && ( this.StreetName == other.StreetName || this.StreetName != null && this.StreetName.Equals(other.StreetName) ) && ( this.UnitType == other.UnitType || this.UnitType != null && this.UnitType.Equals(other.UnitType) ) && ( this.UnitValue == other.UnitValue || this.UnitValue != null && this.UnitValue.Equals(other.UnitValue) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { // credit: http://stackoverflow.com/a/263416/677735 unchecked // Overflow is fine, just wrap { int hash = 41; // Suitable nullity checks etc, of course :) if (this.FormattedAddress != null) hash = hash * 59 + this.FormattedAddress.GetHashCode(); if (this.MainAddressLine != null) hash = hash * 59 + this.MainAddressLine.GetHashCode(); if (this.AddressLastLine != null) hash = hash * 59 + this.AddressLastLine.GetHashCode(); if (this.PlaceName != null) hash = hash * 59 + this.PlaceName.GetHashCode(); if (this.AreaName1 != null) hash = hash * 59 + this.AreaName1.GetHashCode(); if (this.AreaName2 != null) hash = hash * 59 + this.AreaName2.GetHashCode(); if (this.AreaName3 != null) hash = hash * 59 + this.AreaName3.GetHashCode(); if (this.AreaName4 != null) hash = hash * 59 + this.AreaName4.GetHashCode(); if (this.PostCode != null) hash = hash * 59 + this.PostCode.GetHashCode(); if (this.PostCodeExt != null) hash = hash * 59 + this.PostCodeExt.GetHashCode(); if (this.Country != null) hash = hash * 59 + this.Country.GetHashCode(); if (this.AddressNumber != null) hash = hash * 59 + this.AddressNumber.GetHashCode(); if (this.StreetName != null) hash = hash * 59 + this.StreetName.GetHashCode(); if (this.UnitType != null) hash = hash * 59 + this.UnitType.GetHashCode(); if (this.UnitValue != null) hash = hash * 59 + this.UnitValue.GetHashCode(); return hash; } } } }
using UnityEngine; using System.Collections; using System.Collections.Generic; namespace Xft { public class RopeData { public List<EffectNode> NodeList = new List<EffectNode>(); public VertexPool.VertexSegment Vertexsegment; public EffectLayer Owner; public EffectNode dummyNode;//will not be added to the activenode list. so will not be rendered, just to calc uv anim. public void Init(EffectLayer owner) { Owner = owner; Vertexsegment = owner.GetVertexPool().GetRopeVertexSeg(owner.MaxENodes); dummyNode = new EffectNode(0, owner.ClientTransform, false, owner); List<Affector> afts = owner.InitAffectors(dummyNode); dummyNode.SetAffectorList(afts); dummyNode.SetRenderType(4); //use infinite life. dummyNode.Init(Vector3.zero, 0f, -1f, 0, 1f, 1f, Color.clear, Vector2.zero, Vector2.one); } protected void RefreshData() { NodeList.Clear(); for (int i = 0; i < Owner.MaxENodes; i++) { EffectNode node = Owner.ActiveENodes[i]; if (node == null) continue; NodeList.Add(node); } NodeList.Sort(); } public void Update(float deltaTime) { RefreshData(); if (NodeList.Count < 2) return; //just use for calculating uv change. dummyNode.Update(deltaTime); //force first node to the start point //if (Owner.RopeFixStartPos) //{ // EffectNode firstNode = NodeList[NodeList.Count - 1]; // firstNode.CurWorldPos = Owner.ClientTransform.position + Owner.EmitPoint; //} ClearDeadVerts(); UpdateVertices(); UpdateIndices(); } protected void ClearNodeVert(EffectNode node) { int baseIdx = Vertexsegment.VertStart + node.Index * 2; VertexPool pool = Vertexsegment.Pool; pool.Vertices[baseIdx] = Owner.ClientTransform.position; pool.Colors[baseIdx] = Color.clear; pool.Vertices[baseIdx + 1] = Owner.ClientTransform.position; pool.Colors[baseIdx + 1] = Color.clear; pool.VertChanged = true; pool.ColorChanged = true; } public void ClearDeadVerts() { for (int i = 0; i < Owner.MaxENodes; i++) { EffectNode node = Owner.AvailableENodes[i]; if (node == null) continue; ClearNodeVert(node); } Vertexsegment.ClearIndices(); } public void UpdateIndices() { int ecount = 0; VertexPool pool = Vertexsegment.Pool; for (int i = NodeList.Count - 1; i >= 0; i--) { EffectNode node = NodeList[i]; EffectNode nextNode = i - 1 >= 0 ? NodeList[i - 1] : null; if (nextNode == null) break; int lastBaseIdx = Vertexsegment.VertStart + node.Index * 2; int baseIdx = Vertexsegment.VertStart + nextNode.Index * 2; int iidx = Vertexsegment.IndexStart + ecount * 6; pool.Indices[iidx + 0] = lastBaseIdx; pool.Indices[iidx + 1] = lastBaseIdx + 1; pool.Indices[iidx + 2] = baseIdx; pool.Indices[iidx + 3] = lastBaseIdx + 1; pool.Indices[iidx + 4] = baseIdx + 1; pool.Indices[iidx + 5] = baseIdx; ecount++; } pool.IndiceChanged = true; } public void UpdateVertices() { float uvSegment = 0f; float uvLen = 0f; //NOTE: ONLY USE THE DUMMY NODE'S UV CHANGE Vector2 LowerLeftUV = dummyNode.LowerLeftUV; Vector2 UVDimensions = dummyNode.UVDimensions; // change to lower left coord? UVDimensions.y = -UVDimensions.y; LowerLeftUV.y = 1f - LowerLeftUV.y; float totalUVLen = Owner.RopeUVLen; if (Owner.RopeFixUVLen) { float t = 0; for (int i = 0; i < NodeList.Count - 1; i++) { t += (NodeList[i + 1].GetWorldPos() - NodeList[i].GetWorldPos()).magnitude; } totalUVLen = t; } for (int i = NodeList.Count - 1; i >= 0; i--) { EffectNode node = NodeList[i]; EffectNode prevNode = i + 1 < NodeList.Count ? NodeList[i + 1] : null; EffectNode nextNode = i - 1 >= 0 ? NodeList[i - 1] : null; Vector3 chainTangent; if (nextNode == null) { //tail node chainTangent = node.GetWorldPos() - prevNode.GetWorldPos(); } else if (prevNode == null) { //head node chainTangent = nextNode.GetWorldPos() - node.GetWorldPos(); } else { chainTangent = nextNode.GetWorldPos() - prevNode.GetWorldPos(); } Vector3 eyePos = Owner.MyCamera.transform.position; Vector3 vP1ToEye = eyePos - node.GetWorldPos(); Vector3 vPerpendicular = Vector3.Cross(chainTangent, vP1ToEye); vPerpendicular.Normalize(); vPerpendicular *= (Owner.RopeWidth * node.OriScaleX * 0.5f * node.Scale.x); //Debug.DrawRay(node.GetWorldPos(), vPerpendicular, Color.red, 1f); Vector3 pos0 = node.GetWorldPos() - vPerpendicular; Vector3 pos1 = node.GetWorldPos() + vPerpendicular; VertexPool pool = Vertexsegment.Pool; //if (Owner.StretchType == 0) uvSegment = (uvLen / totalUVLen) * Mathf.Abs(UVDimensions.y); // else // uvSegment = (uvLen / totalUVLen) * Mathf.Abs(UVDimensions.x); Vector2 uvCoord = Vector2.zero; int baseIdx = Vertexsegment.VertStart + node.Index * 2; pool.Vertices[baseIdx] = pos0; pool.Colors[baseIdx] = node.Color; //if (Owner.StretchType == 0) // { uvCoord.x = LowerLeftUV.x + UVDimensions.x; uvCoord.y = LowerLeftUV.y - uvSegment; // } // else // { // uvCoord.x = LowerLeftUV.x + uvSegment; // uvCoord.y = LowerLeftUV.y; // } pool.UVs[baseIdx] = uvCoord; //pos1 pool.Vertices[baseIdx + 1] = pos1; pool.Colors[baseIdx + 1] = node.Color; //if (Owner.StretchType == 0) // { uvCoord.x = LowerLeftUV.x; uvCoord.y = LowerLeftUV.y - uvSegment; // } //else //{ // uvCoord.x = LowerLeftUV.x + uvSegment; // uvCoord.y = LowerLeftUV.y - Mathf.Abs(UVDimensions.y); // } pool.UVs[baseIdx + 1] = uvCoord; if (nextNode != null) uvLen += (nextNode.GetWorldPos() - node.GetWorldPos()).magnitude; else uvLen += (node.GetWorldPos() - prevNode.GetWorldPos()).magnitude; } Vertexsegment.Pool.UVChanged = true; Vertexsegment.Pool.VertChanged = true; Vertexsegment.Pool.ColorChanged = true; } } }
// Copyright (c) Microsoft Corporation // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace Microsoft.Xbox.Services.Statistics.Manager { using global::System; using global::System.Collections.Generic; using global::System.Linq; using global::System.Threading.Tasks; using Microsoft.Xbox.Services.Leaderboard; using Microsoft.Xbox.Services.Shared; public class StatsManager : IStatsManager { private static readonly object instanceLock = new object(); private static readonly TimeSpan TimePerCall = TimeSpan.FromSeconds(30); private static readonly TimeSpan StatsPollTime = TimeSpan.FromMinutes(5); private static IStatsManager instance; private readonly Dictionary<string, StatsValueDocument> userDocumentMap; private readonly List<StatEvent> eventList; private readonly CallBufferTimer<XboxLiveUser> statTimer; private readonly CallBufferTimer<XboxLiveUser> statPriorityTimer; private readonly StatsService statsService; private readonly LeaderboardService leaderboardService; private void CheckUserValid(XboxLiveUser user) { if (user == null || user.XboxUserId == null || !this.userDocumentMap.ContainsKey(user.XboxUserId)) { throw new ArgumentException("user"); } } internal static IStatsManager Instance { get { if (instance == null) { lock (instanceLock) { if (instance == null) { instance = XboxLive.UseMockServices ? new MockStatsManager() : (IStatsManager)new StatsManager(); } } } return instance; } } private StatsManager() { this.userDocumentMap = new Dictionary<string, StatsValueDocument>(); this.eventList = new List<StatEvent>(); this.statTimer = new CallBufferTimer<XboxLiveUser>(TimePerCall); this.statTimer.Completed += this.TimerCompleteCallback; this.statPriorityTimer = new CallBufferTimer<XboxLiveUser>(TimePerCall); this.statPriorityTimer.Completed += this.TimerCompleteCallback; this.statsService = new StatsService(); this.leaderboardService = new LeaderboardService(); RunFlushTimer(); } public void AddLocalUser(XboxLiveUser user) { if (user == null) { throw new ArgumentException("user"); } string xboxUserId = user.XboxUserId; if (this.userDocumentMap.ContainsKey(xboxUserId)) { throw new ArgumentException("User already in map"); } this.userDocumentMap.Add(xboxUserId, new StatsValueDocument(null)); this.statsService.GetStatsValueDocument(user).ContinueWith(statsValueDocTask => { if (user.IsSignedIn) { lock (this.userDocumentMap) { if (this.userDocumentMap.ContainsKey(xboxUserId)) { StatsValueDocument document; if (statsValueDocTask.IsFaulted) // if there was an error, but the user is signed in, we assume offline sign in { document = this.userDocumentMap[xboxUserId]; document.State = StatsValueDocument.StatValueDocumentState.OfflineNotLoaded; } else { document = statsValueDocTask.Result; this.userDocumentMap[xboxUserId].MergeStatDocument(document); } document.FlushEvent += (sender, e) => { if (this.userDocumentMap.ContainsKey(xboxUserId)) { this.FlushToService(user, document); } }; this.userDocumentMap[xboxUserId] = document; } } } this.AddEvent(new StatEvent(StatEventType.LocalUserAdded, user, statsValueDocTask.Exception, new StatEventArgs())); }); } public void RemoveLocalUser(XboxLiveUser user) { this.CheckUserValid(user); var xboxUserId = user.XboxUserId; var svd = this.userDocumentMap[xboxUserId]; svd.DoWork(); // before removing the user, apply any pending changes for this user. if (svd.IsDirty) { this.statsService.UpdateStatsValueDocument(user, svd).ContinueWith((continuationTask) => { lock (this.userDocumentMap) { if (this.userDocumentMap.ContainsKey(xboxUserId)) { if (continuationTask.IsFaulted && this.ShouldWriteOffline(continuationTask.Exception)) { this.WriteOffline(user, svd); } this.AddEvent(new StatEvent(StatEventType.LocalUserRemoved, user, continuationTask.Exception, new StatEventArgs())); this.userDocumentMap.Remove(xboxUserId); } } }); } else { this.AddEvent(new StatEvent(StatEventType.LocalUserRemoved, user, null, new StatEventArgs())); lock (this.userDocumentMap) { this.userDocumentMap.Remove(xboxUserId); } } } public StatValue GetStat(XboxLiveUser user, string statName) { this.CheckUserValid(user); if (statName == null) { throw new ArgumentException("statName"); } return this.userDocumentMap[user.XboxUserId].GetStat(statName); } public List<string> GetStatNames(XboxLiveUser user) { this.CheckUserValid(user); return this.userDocumentMap[user.XboxUserId].GetStatNames(); } public void SetStatAsNumber(XboxLiveUser user, string statName, double value) { this.CheckUserValid(user); if (statName == null) { throw new ArgumentException("statName"); } this.userDocumentMap[user.XboxUserId].SetStat(statName, value); } public void SetStatAsInteger(XboxLiveUser user, string statName, Int64 value) { this.CheckUserValid(user); if (statName == null) { throw new ArgumentException("statName"); } this.userDocumentMap[user.XboxUserId].SetStat(statName, value); } public void SetStatAsString(XboxLiveUser user, string statName, string value) { this.CheckUserValid(user); if (statName == null) { throw new ArgumentException("statName"); } this.userDocumentMap[user.XboxUserId].SetStat(statName, value); } public void DeleteStat(XboxLiveUser user, string statName) { this.CheckUserValid(user); if (statName == null) { throw new ArgumentException("statName"); } this.userDocumentMap[user.XboxUserId].DeleteStat(statName); } public void RequestFlushToService(XboxLiveUser user, bool isHighPriority = false) { this.CheckUserValid(user); List<XboxLiveUser> userVec = new List<XboxLiveUser>(1) { user }; if (isHighPriority) { this.statPriorityTimer.Fire(userVec); } else { this.statTimer.Fire(userVec); } } public List<StatEvent> DoWork() { lock (this.userDocumentMap) { var copyList = this.eventList.ToList(); foreach (var userContextPair in this.userDocumentMap) { userContextPair.Value.DoWork(); } this.eventList.Clear(); return copyList; } } private bool ShouldWriteOffline(AggregateException exception) { return false; // offline not implemented yet } private void WriteOffline(XboxLiveUser user, StatsValueDocument document) { // TODO: implement } private void FlushToService(XboxLiveUser user, StatsValueDocument document) { if (user == null) { // User could have been removed. return; } document.ClearDirtyState(); if (document.State != StatsValueDocument.StatValueDocumentState.Loaded) // if not loaded, try and get the SVD from the service { this.statsService.GetStatsValueDocument(user).ContinueWith((continuationTask) => { lock (this.userDocumentMap) { if (this.userDocumentMap.ContainsKey(user.XboxUserId)) { if (!continuationTask.IsFaulted) { var updatedSvd = continuationTask.Result; this.userDocumentMap[user.XboxUserId].MergeStatDocument(updatedSvd); UpdateStatsValueDocument(user, updatedSvd); } else { UpdateStatsValueDocument(user, this.userDocumentMap[user.XboxUserId]); } } else { // log error: User not found in flush_to_service lambda } } }); } else { UpdateStatsValueDocument(user, document); } } private void UpdateStatsValueDocument(XboxLiveUser user, StatsValueDocument document) { if (user == null) { // User could have been removed. return; } this.statsService.UpdateStatsValueDocument(user, document).ContinueWith((continuationTask) => { lock (this.userDocumentMap) { if (this.userDocumentMap.ContainsKey(user.XboxUserId)) { if (continuationTask.IsFaulted) { if (this.ShouldWriteOffline(continuationTask.Exception)) { var userSvd = this.userDocumentMap[user.XboxUserId]; if (userSvd.State == StatsValueDocument.StatValueDocumentState.Loaded) { userSvd.State = StatsValueDocument.StatValueDocumentState.OfflineLoaded; } this.WriteOffline(user, userSvd); } else { // log error: Stats manager could not write stats value document } } this.AddEvent(new StatEvent(StatEventType.StatUpdateComplete, user, continuationTask.Exception, new StatEventArgs())); } } }); } internal void AddEvent(StatEvent statEvent) { lock (this.eventList) { this.eventList.Add(statEvent); } } private void RunFlushTimer() { // Setup another refresh for the future. Task.Delay(StatsPollTime).ContinueWith( delayTask => { try { lock (this.userDocumentMap) { foreach (var statValueDoc in this.userDocumentMap.Values) { if (statValueDoc.IsDirty) { this.FlushToService(statValueDoc.User, statValueDoc); } } } } finally { this.RunFlushTimer(); } }); } private void TimerCompleteCallback(object caller, CallBufferEventArgs<XboxLiveUser> returnObject) { if (returnObject.Elements.Count != 0) { this.RequestFlushToServiceCallback(returnObject.Elements[0]); } } private void RequestFlushToServiceCallback(XboxLiveUser user) { StatsValueDocument document; if (this.userDocumentMap.TryGetValue(user.XboxUserId, out document) && document.IsDirty) { document.DoWork(); this.FlushToService(user, document); } } public void GetLeaderboard(XboxLiveUser user, LeaderboardQuery query) { this.CheckUserValid(user); this.leaderboardService.GetLeaderboardAsync(user, query).ContinueWith(responseTask => { this.AddEvent( new StatEvent(StatEventType.GetLeaderboardComplete, user, responseTask.Exception, new LeaderboardResultEventArgs(responseTask.Result) )); }); } } }
// Copyright (C) 2014 dot42 // // Original filename: Android.Graphics.Drawable.Shapes.cs // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #pragma warning disable 1717 namespace Android.Graphics.Drawable.Shapes { /// <summary> /// <para>Creates an arc shape. The arc shape starts at a specified angle and sweeps clockwise, drawing slices of pie. The arc can be drawn to a Canvas with its own draw() method, but more graphical control is available if you instead pass the ArcShape to a android.graphics.drawable.ShapeDrawable. </para> /// </summary> /// <java-name> /// android/graphics/drawable/shapes/ArcShape /// </java-name> [Dot42.DexImport("android/graphics/drawable/shapes/ArcShape", AccessFlags = 33)] public partial class ArcShape : global::Android.Graphics.Drawable.Shapes.RectShape /* scope: __dot42__ */ { /// <summary> /// <para>ArcShape constructor.</para><para></para> /// </summary> [Dot42.DexImport("<init>", "(FF)V", AccessFlags = 1)] public ArcShape(float startAngle, float sweepAngle) /* MethodBuilder.Create */ { } /// <summary> /// <para>Draw this shape into the provided Canvas, with the provided Paint. Before calling this, you must call resize(float,float).</para><para></para> /// </summary> /// <java-name> /// draw /// </java-name> [Dot42.DexImport("draw", "(Landroid/graphics/Canvas;Landroid/graphics/Paint;)V", AccessFlags = 1)] public override void Draw(global::Android.Graphics.Canvas canvas, global::Android.Graphics.Paint paint) /* MethodBuilder.Create */ { } [global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)] internal ArcShape() /* TypeBuilder.AddDefaultConstructor */ { } } /// <summary> /// <para>Defines a rectangle shape. The rectangle can be drawn to a Canvas with its own draw() method, but more graphical control is available if you instead pass the RectShape to a android.graphics.drawable.ShapeDrawable. </para> /// </summary> /// <java-name> /// android/graphics/drawable/shapes/RectShape /// </java-name> [Dot42.DexImport("android/graphics/drawable/shapes/RectShape", AccessFlags = 33)] public partial class RectShape : global::Android.Graphics.Drawable.Shapes.Shape /* scope: __dot42__ */ { /// <summary> /// <para>RectShape constructor. </para> /// </summary> [Dot42.DexImport("<init>", "()V", AccessFlags = 1)] public RectShape() /* MethodBuilder.Create */ { } /// <summary> /// <para>Draw this shape into the provided Canvas, with the provided Paint. Before calling this, you must call resize(float,float).</para><para></para> /// </summary> /// <java-name> /// draw /// </java-name> [Dot42.DexImport("draw", "(Landroid/graphics/Canvas;Landroid/graphics/Paint;)V", AccessFlags = 1)] public override void Draw(global::Android.Graphics.Canvas canvas, global::Android.Graphics.Paint paint) /* MethodBuilder.Create */ { } /// <summary> /// <para>Callback method called when resize(float,float) is executed.</para><para></para> /// </summary> /// <java-name> /// onResize /// </java-name> [Dot42.DexImport("onResize", "(FF)V", AccessFlags = 4)] protected internal override void OnResize(float width, float height) /* MethodBuilder.Create */ { } /// <summary> /// <para>Returns the RectF that defines this rectangle's bounds. </para> /// </summary> /// <java-name> /// rect /// </java-name> [Dot42.DexImport("rect", "()Landroid/graphics/RectF;", AccessFlags = 20)] protected internal global::Android.Graphics.RectF Rect() /* MethodBuilder.Create */ { return default(global::Android.Graphics.RectF); } /// <java-name> /// clone /// </java-name> [Dot42.DexImport("clone", "()Landroid/graphics/drawable/shapes/RectShape;", AccessFlags = 1)] public new virtual global::Android.Graphics.Drawable.Shapes.RectShape Clone() /* MethodBuilder.Create */ { return default(global::Android.Graphics.Drawable.Shapes.RectShape); } } /// <summary> /// <para>Creates a rounded-corner rectangle. Optionally, an inset (rounded) rectangle can be included (to make a sort of "O" shape). The rounded rectangle can be drawn to a Canvas with its own draw() method, but more graphical control is available if you instead pass the RoundRectShape to a android.graphics.drawable.ShapeDrawable. </para> /// </summary> /// <java-name> /// android/graphics/drawable/shapes/RoundRectShape /// </java-name> [Dot42.DexImport("android/graphics/drawable/shapes/RoundRectShape", AccessFlags = 33)] public partial class RoundRectShape : global::Android.Graphics.Drawable.Shapes.RectShape /* scope: __dot42__ */ { /// <summary> /// <para>RoundRectShape constructor. Specifies an outer (round)rect and an optional inner (round)rect.</para><para></para> /// </summary> [Dot42.DexImport("<init>", "([FLandroid/graphics/RectF;[F)V", AccessFlags = 1)] public RoundRectShape(float[] outerRadii, global::Android.Graphics.RectF inset, float[] innerRadii) /* MethodBuilder.Create */ { } /// <summary> /// <para>Draw this shape into the provided Canvas, with the provided Paint. Before calling this, you must call resize(float,float).</para><para></para> /// </summary> /// <java-name> /// draw /// </java-name> [Dot42.DexImport("draw", "(Landroid/graphics/Canvas;Landroid/graphics/Paint;)V", AccessFlags = 1)] public override void Draw(global::Android.Graphics.Canvas canvas, global::Android.Graphics.Paint paint) /* MethodBuilder.Create */ { } /// <summary> /// <para>Callback method called when resize(float,float) is executed.</para><para></para> /// </summary> /// <java-name> /// onResize /// </java-name> [Dot42.DexImport("onResize", "(FF)V", AccessFlags = 4)] protected internal override void OnResize(float width, float height) /* MethodBuilder.Create */ { } /// <java-name> /// clone /// </java-name> [Dot42.DexImport("clone", "()Landroid/graphics/drawable/shapes/RoundRectShape;", AccessFlags = 1)] public new virtual global::Android.Graphics.Drawable.Shapes.RoundRectShape Clone() /* MethodBuilder.Create */ { return default(global::Android.Graphics.Drawable.Shapes.RoundRectShape); } [global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)] internal RoundRectShape() /* TypeBuilder.AddDefaultConstructor */ { } } /// <summary> /// <para>Defines a generic graphical "shape." Any Shape can be drawn to a Canvas with its own draw() method, but more graphical control is available if you instead pass it to a android.graphics.drawable.ShapeDrawable. </para> /// </summary> /// <java-name> /// android/graphics/drawable/shapes/Shape /// </java-name> [Dot42.DexImport("android/graphics/drawable/shapes/Shape", AccessFlags = 1057)] public abstract partial class Shape : global::Java.Lang.ICloneable /* scope: __dot42__ */ { [Dot42.DexImport("<init>", "()V", AccessFlags = 1)] public Shape() /* MethodBuilder.Create */ { } /// <summary> /// <para>Returns the width of the Shape. </para> /// </summary> /// <java-name> /// getWidth /// </java-name> [Dot42.DexImport("getWidth", "()F", AccessFlags = 17)] public float GetWidth() /* MethodBuilder.Create */ { return default(float); } /// <summary> /// <para>Returns the height of the Shape. </para> /// </summary> /// <java-name> /// getHeight /// </java-name> [Dot42.DexImport("getHeight", "()F", AccessFlags = 17)] public float GetHeight() /* MethodBuilder.Create */ { return default(float); } /// <summary> /// <para>Draw this shape into the provided Canvas, with the provided Paint. Before calling this, you must call resize(float,float).</para><para></para> /// </summary> /// <java-name> /// draw /// </java-name> [Dot42.DexImport("draw", "(Landroid/graphics/Canvas;Landroid/graphics/Paint;)V", AccessFlags = 1025)] public abstract void Draw(global::Android.Graphics.Canvas canvas, global::Android.Graphics.Paint paint) /* MethodBuilder.Create */ ; /// <summary> /// <para>Resizes the dimensions of this shape. Must be called before draw(Canvas,Paint).</para><para></para> /// </summary> /// <java-name> /// resize /// </java-name> [Dot42.DexImport("resize", "(FF)V", AccessFlags = 17)] public void Resize(float width, float height) /* MethodBuilder.Create */ { } /// <summary> /// <para>Checks whether the Shape is opaque. Default impl returns true. Override if your subclass can be opaque.</para><para></para> /// </summary> /// <returns> /// <para>true if any part of the drawable is <b>not</b> opaque. </para> /// </returns> /// <java-name> /// hasAlpha /// </java-name> [Dot42.DexImport("hasAlpha", "()Z", AccessFlags = 1)] public virtual bool HasAlpha() /* MethodBuilder.Create */ { return default(bool); } /// <summary> /// <para>Callback method called when resize(float,float) is executed.</para><para></para> /// </summary> /// <java-name> /// onResize /// </java-name> [Dot42.DexImport("onResize", "(FF)V", AccessFlags = 4)] protected internal virtual void OnResize(float width, float height) /* MethodBuilder.Create */ { } /// <java-name> /// clone /// </java-name> [Dot42.DexImport("clone", "()Landroid/graphics/drawable/shapes/Shape;", AccessFlags = 1)] public virtual global::Android.Graphics.Drawable.Shapes.Shape Clone() /* MethodBuilder.Create */ { return default(global::Android.Graphics.Drawable.Shapes.Shape); } /// <summary> /// <para>Returns the width of the Shape. </para> /// </summary> /// <java-name> /// getWidth /// </java-name> public float Width { [Dot42.DexImport("getWidth", "()F", AccessFlags = 17)] get{ return GetWidth(); } } /// <summary> /// <para>Returns the height of the Shape. </para> /// </summary> /// <java-name> /// getHeight /// </java-name> public float Height { [Dot42.DexImport("getHeight", "()F", AccessFlags = 17)] get{ return GetHeight(); } } } /// <summary> /// <para>Creates geometric paths, utilizing the android.graphics.Path class. The path can be drawn to a Canvas with its own draw() method, but more graphical control is available if you instead pass the PathShape to a android.graphics.drawable.ShapeDrawable. </para> /// </summary> /// <java-name> /// android/graphics/drawable/shapes/PathShape /// </java-name> [Dot42.DexImport("android/graphics/drawable/shapes/PathShape", AccessFlags = 33)] public partial class PathShape : global::Android.Graphics.Drawable.Shapes.Shape /* scope: __dot42__ */ { /// <summary> /// <para>PathShape constructor.</para><para></para> /// </summary> [Dot42.DexImport("<init>", "(Landroid/graphics/Path;FF)V", AccessFlags = 1)] public PathShape(global::Android.Graphics.Path path, float stdWidth, float stdHeight) /* MethodBuilder.Create */ { } /// <summary> /// <para>Draw this shape into the provided Canvas, with the provided Paint. Before calling this, you must call resize(float,float).</para><para></para> /// </summary> /// <java-name> /// draw /// </java-name> [Dot42.DexImport("draw", "(Landroid/graphics/Canvas;Landroid/graphics/Paint;)V", AccessFlags = 1)] public override void Draw(global::Android.Graphics.Canvas canvas, global::Android.Graphics.Paint paint) /* MethodBuilder.Create */ { } /// <summary> /// <para>Callback method called when resize(float,float) is executed.</para><para></para> /// </summary> /// <java-name> /// onResize /// </java-name> [Dot42.DexImport("onResize", "(FF)V", AccessFlags = 4)] protected internal override void OnResize(float width, float height) /* MethodBuilder.Create */ { } /// <java-name> /// clone /// </java-name> [Dot42.DexImport("clone", "()Landroid/graphics/drawable/shapes/PathShape;", AccessFlags = 1)] public new virtual global::Android.Graphics.Drawable.Shapes.PathShape Clone() /* MethodBuilder.Create */ { return default(global::Android.Graphics.Drawable.Shapes.PathShape); } [global::System.ComponentModel.EditorBrowsable(global::System.ComponentModel.EditorBrowsableState.Never)] internal PathShape() /* TypeBuilder.AddDefaultConstructor */ { } } /// <summary> /// <para>Defines an oval shape. The oval can be drawn to a Canvas with its own draw() method, but more graphical control is available if you instead pass the OvalShape to a android.graphics.drawable.ShapeDrawable. </para> /// </summary> /// <java-name> /// android/graphics/drawable/shapes/OvalShape /// </java-name> [Dot42.DexImport("android/graphics/drawable/shapes/OvalShape", AccessFlags = 33)] public partial class OvalShape : global::Android.Graphics.Drawable.Shapes.RectShape /* scope: __dot42__ */ { /// <summary> /// <para>OvalShape constructor. </para> /// </summary> [Dot42.DexImport("<init>", "()V", AccessFlags = 1)] public OvalShape() /* MethodBuilder.Create */ { } /// <summary> /// <para>Draw this shape into the provided Canvas, with the provided Paint. Before calling this, you must call resize(float,float).</para><para></para> /// </summary> /// <java-name> /// draw /// </java-name> [Dot42.DexImport("draw", "(Landroid/graphics/Canvas;Landroid/graphics/Paint;)V", AccessFlags = 1)] public override void Draw(global::Android.Graphics.Canvas canvas, global::Android.Graphics.Paint paint) /* MethodBuilder.Create */ { } } }
/******************************************************************** The Multiverse Platform is made available under the MIT License. Copyright (c) 2012 The Multiverse Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *********************************************************************/ #region Using directives using System; using System.Threading; using System.Collections.Generic; using System.Text; using System.Diagnostics; using System.Net; using System.Net.Sockets; using log4net; // using SortedDictionary<K, V> = Multiverse.Utility.SortedDictionary<K, V>; #endregion namespace Multiverse.Network.Rdp { public class ConnectionCallback { public AsyncCallback pfnCallback; public object state; public RdpPacket packet; } /// <summary> /// Exception that is thrown when we try to send a packet /// that is too large for this connection. /// </summary> public class RdpFragmentationException : Exception { int dataLength; int dataCapacity; public RdpFragmentationException(string msg, int dataLength, int dataCapacity) : base(msg) { this.dataLength = dataLength; this.dataCapacity = dataCapacity; } public int DataCapacity { get { return dataCapacity; } } public int DataLength { get { return dataLength; } } } public enum ConnectionState { /// <summary> /// The CLOSED state exists when no connection exists and there /// is no connection record allocated. /// </summary> Closed, /// <summary> /// The LISTEN state is entered after a passive Open request is /// processed. A connection record is allocated and RDP waits /// for an active request to establish a connection from a /// remote site. /// </summary> Listen, /// <summary> /// The SYN-SENT state is entered after processing an active /// Open request. A connection record is allocated, an initial /// sequence number is generated, and a SYN segment is sent to /// the remote site. RDP then waits in the SYN-SENT state for /// acknowledgement of its Open request. /// </summary> SynSent, /// <summary> /// The SYN-RCVD state may be reached from either the LISTEN /// state or from the SYN-SENT state. SYN-RCVD is reached from /// the LISTEN state when a SYN segment requesting a connection /// is received from a remote host. In reply, the local RDP /// generates an initial sequence number for its side of the /// connection, and then sends the sequence number and an /// acknowledgement of the SYN segment to the remote site. It /// then waits for an acknowledgement. /// /// The SYN-RCVD state is reached from the SYN-SENT state when a /// SYN segment is received from the remote host without an /// accompanying acknowledgement of the SYN segment sent to that /// remote host by the local RDP. This situation is caused by /// simultaneous attempts to open a connection, with the SYN /// segments passing each other in transit. The action is to /// repeat the SYN segment with the same sequence number, but /// now including an ACK of the remote host's SYN segment to /// indicate acceptance of the Open request. /// </summary> SynRcvd, /// <summary> /// The OPEN state exists when a connection has been established /// by the successful exchange of state information between the /// two sides of the connection. Each side has exchanged and /// received such data as initial sequence number, maximum /// segment size, and maximum number of unacknowledged segments /// that may be outstanding. In the Open state data may be sent /// between the two parties of the connection. /// </summary> Open, /// <summary> /// The CLOSE-WAIT state is entered from either a Close request /// or from the receipt of an RST segment from the remote site. /// RDP has sent an RST segment and is waiting a delay period /// for activity on the connection to complete. /// </summary> CloseWait } /// <summary> /// This class contains a modified form of RDP. It is implemented on /// top of UDP, so it does not itself include port numbers or checksums. /// /// The ack behavior has been modified so that for retransmitted /// packets, the ack number is updated to the latest (though eacks are /// not updated). /// /// The behavior in the close_wait state has also been changed to send /// a reset response to any packets received while in close wait (same /// behavior as for the closed state). /// </summary> public class RdpConnection { // Create a logger for use in this class private static readonly log4net.ILog log = log4net.LogManager.GetLogger(typeof(RdpConnection)); ConnectionState state = ConnectionState.Closed; object stateLock = new object(); const double CloseWaitTimeout = 30 * 1000; // 30 seconds const double RetransmissionTimeout = 1 * 1000; // 1 second #region Rdp Fields /// <summary> /// The sequence number of the next segment that is to be sent. /// </summary> int sndNxt; /// <summary> /// The sequence number of the oldest unacknowledged segment. /// </summary> int sndUna; /// <summary> /// The maximum number of outstanding (unacknowledged) segments /// that can be sent. The sender should not send more than this /// number of segments without getting an acknowledgement. /// </summary> int sndMax; /// <summary> /// The initial send sequence number. This is the sequence /// number that was sent in the SYN segment. /// </summary> int sndIss; /// <summary> /// The sequence number of the last segment received correctly /// and in sequence. /// </summary> int rcvCur; /// <summary> /// The maximum number of segments that can be buffered for this /// connection. /// </summary> int rcvMax; /// <summary> /// The initial receive sequence number. This is the sequence /// number of the SYN segment that established this connection. /// </summary> int rcvIrs; /// <summary> /// The array of sequence numbers of segments that have been /// received and acknowledged out of sequence. /// </summary> List<int> rcvdSeqNos = new List<int>(); /// <summary> /// The largest possible segment (in octets) that can legally be /// sent. This variable is specified by the foreign host in the /// SYN segment during connection establishment. /// </summary> int sbufMax; /// <summary> /// The largest possible segment (in octets) that can be /// received. This variable is specified by the user when the /// connection is opened. The variable is sent to the foreign /// host in the SYN segment. /// </summary> int rbufMax; /// <summary> /// The sequence number of the segment currently being /// processed. /// </summary> /// segSeq => inPacket.SeqNumber /// <summary> /// The acknowledgement sequence number in the segment currently /// being processed. /// </summary> /// segAck => inPacket.AckNumber /// <summary> /// The maximum number of outstanding segments the receiver is /// willing to hold, as specified in the SYN segment that /// established the connection. /// </summary> /// segMax => inPacket.MaxSegments /// <summary> /// The maximum segment size (in octets) accepted by the foreign /// host on a connection, as specified in the SYN segment that /// established the connection. /// </summary> /// segBmax => inPacket.MaxSegmentSize #endregion // Udp Fields UdpClient udpConn; IPEndPoint remoteEP; // Time when we should leave close wait, and be released DateTime closeWaitTime; RdpConnectionManager connManager; bool valid; // flag set after we have fallen into the closed state bool passiveOpen; bool outOfOrderAllowed; private int packetsSentCount = 0; private int packetsReceivedCount = 0; private int bytesSentCount = 0; private int bytesReceivedCount = 0; static long totalBytesSentCount = 0; static long totalBytesReceivedCount = 0; private int startTick = Environment.TickCount; Dictionary<int, DateTime> retransmissionTimer = new Dictionary<int, DateTime>(); Dictionary<int, RdpPacket> unacknowledgedPackets = new Dictionary<int, RdpPacket>(); SortedList<int, RdpPacket> outOfOrderPackets = new SortedList<int, RdpPacket>(); List<RdpPacket> availablePackets = new List<RdpPacket>(); SortedList<int, RdpPacket> availableOutOfOrderPackets = new SortedList<int, RdpPacket>(); /// <summary> /// Constructor - should only be called by the connection manager object /// </summary> public RdpConnection(RdpConnectionManager connManager, bool passive, UdpClient udpConn, IPEndPoint remoteEP, int rcvMax, int rbufMax, bool sequenced) { this.connManager = connManager; this.udpConn = udpConn; this.remoteEP = remoteEP; this.rcvMax = rcvMax; this.rbufMax = rbufMax; outOfOrderAllowed = !sequenced; valid = true; sndIss = 0; // TODO: replace with a random number? sndNxt = sndIss + 1; sndUna = sndIss; InternalOpen(passive); } private int[] EakArray { get { try { Monitor.Enter(this); int eakLen = outOfOrderPackets.Count; int[] eakArray = new int[eakLen]; outOfOrderPackets.Keys.CopyTo(eakArray, 0); return eakArray; } finally { Monitor.Exit(this); } } } /// <summary> /// This is the version of EakArray that should actually be used, /// since it limits the number of eaks to the number that will fit /// in the rdp packet. /// </summary> private int[] AbridgedEakArray { get { int[] eakArray = EakArray; if (eakArray.Length > RdpPacket.MaxEaks) { int[] abridgedArray = new int[RdpPacket.MaxEaks]; Array.Copy(eakArray, 0, abridgedArray, 0, abridgedArray.Length); eakArray = abridgedArray; } return eakArray; } } /// <summary> /// Cleanup our retransmissionTimer and unacknowledgedPackets /// </summary> private void ClearQueues() { retransmissionTimer.Clear(); unacknowledgedPackets.Clear(); } /// <summary> /// Process the packet, adding it to either the out of order list /// or the available list as appropriate. We already have the lock. /// This will also update rcvCur to be appropriate. /// </summary> /// <param name="packet"></param> private void HandleDataPacket(RdpPacket packet) { outOfOrderPackets[packet.SeqNumber] = packet; if (outOfOrderAllowed) availableOutOfOrderPackets[packet.SeqNumber] = packet; int[] sortedSequence = new int[outOfOrderPackets.Keys.Count]; outOfOrderPackets.Keys.CopyTo(sortedSequence, 0); foreach (int segSeq in sortedSequence) { if (segSeq == rcvCur + 1) { RdpPacket currentPacket = outOfOrderPackets[segSeq]; log.DebugFormat("Queued packet {0} : {1}", currentPacket.SeqNumber, segSeq); availablePackets.Add(currentPacket); if (outOfOrderAllowed) availableOutOfOrderPackets.Remove(segSeq); rcvCur = segSeq; outOfOrderPackets.Remove(segSeq); } } Monitor.PulseAll(this); } /// <summary> /// Version of Open that bypasses the lock (used internally) /// Generally with these methods, we already hold the lock, but /// in this case, we don't need to hold the lock, since we are /// still in the constructor. /// </summary> /// <param name="passiveOpen"></param> /// <param name="rcvMax"></param> /// <param name="rmaxBuf"></param> /// <param name="isSequenced"></param> private void InternalOpen(bool passiveOpen) { if (state != ConnectionState.Closed) throw new Exception("Error - connection already open"); this.passiveOpen = passiveOpen; State = ConnectionState.Listen; // Create a connection record if (!passiveOpen) { /// Send <SEQ=SND.ISS><MAX=SND.MAX><MAXBUF=RMAX.BUF><SYN> RdpPacket packet = new RdpPacket(0, RdpPacket.OpenLength); packet.SeqNumber = sndIss; packet.Syn = true; packet.MaxSegments = (short)rcvMax; packet.MaxSegmentSize = (short)rbufMax; packet.Sequenced = !outOfOrderAllowed; SendPacket(packet); State = ConnectionState.SynSent; } } /// <summary> /// Method to send the packet. If this is a client, we will pass in /// null as the remoteEP, since the UdpClient object will have /// called connect. We should already hold the lock on the connection. /// </summary> /// <param name="packet"></param> private void SendPacket(RdpPacket packet) { // count the bytes of outgoing packet bytesSentCount += packet.PacketLength; totalBytesSentCount += packet.PacketLength; packetsSentCount++; if (packet.HasData) { const int IpHeaderLength = 20; const int UdpHeaderLength = 8; // If the packet is too large, throw if (packet.PacketLength + IpHeaderLength + UdpHeaderLength > sbufMax) throw new RdpFragmentationException("packet size {0} is too large for connection with max of {1}", packet.DataLength, sbufMax); // If we have already sent as many packets as we can, throw if (unacknowledgedPackets.Count > rcvMax) throw new Exception("maximum unacknowledged packets exceeds limit"); DateTime now = DateTime.Now; unacknowledgedPackets[packet.SeqNumber] = packet; retransmissionTimer[packet.SeqNumber] = now.AddMilliseconds(RetransmissionTimeout); log.DebugFormat("SendPacket: {0} - packet {1}", now, packet); } if (packet.Eak) log.DebugFormat("sending packet with eak set: {0}", packet); udpConn.Send(packet.PacketData, packet.PacketData.Length, remoteEP); } public void Send(byte[] data) { if (!valid) throw new Exception("Called Send on closed connection"); RdpPacket packet = new RdpPacket(data.Length); Array.Copy(data, 0, packet.PacketData, packet.DataOffset, data.Length); Send(packet); } /// <summary> /// Takes a packet with nothing but the data portion filled, /// and does what is needed to send it out. /// </summary> /// <param name="packet"></param> public void Send(RdpPacket packet) { if (!valid) throw new Exception("Called Send on closed connection"); try { Monitor.Enter(this); InternalSend(packet); } finally { Monitor.Exit(this); } } /// <summary> /// Internal version of the send method. For this version, we already hold the lock. /// </summary> /// <param name="packet"></param> private void InternalSend(RdpPacket packet) { switch (state) { case ConnectionState.Open: if (sndNxt >= sndUna + sndMax) throw new Exception("Error - insufficient resources to send data"); /// Send <ACK=RCV.CUR><SEQ=SND.NXT><ACK><Data>; packet.AckNumber = rcvCur; packet.SeqNumber = sndNxt; packet.Ack = true; SendPacket(packet); sndNxt = sndNxt + 1; break; case ConnectionState.Listen: case ConnectionState.SynRcvd: case ConnectionState.SynSent: case ConnectionState.Closed: case ConnectionState.CloseWait: throw new Exception("Error - connection not open"); } } /// <summary> /// Receive a packet (blocks, and throws an exception if the connection is not open) /// </summary> /// <returns>null if there are no packets ready</returns> public byte[] Receive(ref IPEndPoint remoteEP) { ConnectionState connState = this.State; switch (connState) { case ConnectionState.Open: byte[] rv = null; try { Monitor.Enter(this); while (true) { remoteEP = this.RemoteEndPoint; if (availablePackets.Count > 0) { log.DebugFormat("Receive called for packet {0}", availablePackets[0].SeqNumber); rv = availablePackets[0].Data; availablePackets.RemoveAt(0); return rv; } else if (outOfOrderAllowed && availableOutOfOrderPackets.Count > 0) { rv = availableOutOfOrderPackets.Values[0].Data; availableOutOfOrderPackets.RemoveAt(0); return rv; } Monitor.Wait(this); } } finally { Monitor.Exit(this); } case ConnectionState.Listen: case ConnectionState.SynRcvd: case ConnectionState.SynSent: return null; case ConnectionState.Closed: case ConnectionState.CloseWait: throw new Exception("Error - connection not open"); } return null; } public void Close() { try { Monitor.Enter(this); InternalClose(); } finally { Monitor.Exit(this); } } /// <summary> /// Internal version of close. For this version, we already hold the lock. /// </summary> private void InternalClose() { int ticks = Environment.TickCount - startTick; log.InfoFormat("Sent {0} bytes, Received {1} bytes in {2} seconds", bytesSentCount, bytesReceivedCount, ticks / 1000); switch (state) { case ConnectionState.Open: { /// Send <SEQ=SND.NXT><RST>; RdpPacket packet = new RdpPacket(0); packet.SeqNumber = sndNxt; packet.Rst = true; SendPacket(packet); } State = ConnectionState.CloseWait; // TODO: Start TIMWAIT Timer break; case ConnectionState.Listen: State = ConnectionState.Closed; break; case ConnectionState.SynRcvd: case ConnectionState.SynSent: { /// Send <SEQ=SND.NXT><RST> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = sndNxt; packet.Rst = true; SendPacket(packet); } State = ConnectionState.Closed; break; case ConnectionState.CloseWait: throw new Exception("Error - Connection closing"); case ConnectionState.Closed: throw new Exception("Error - Connection not open"); } } /// <summary> /// Method to handle segment arrival /// </summary> /// <param name="inPacket"></param> /// <param name="remote"></param> public void OnSegmentArrival(RdpPacket inPacket, IPEndPoint remoteEP) { try { Monitor.Enter(this); InternalOnSegmentArrival(inPacket, remoteEP); } finally { Monitor.Exit(this); } } /// <summary> /// Internal method to handle segment arrival. For this version, we already hold the lock. /// </summary> /// <param name="inPacket"></param> /// <param name="remoteEP"></param> private void InternalOnSegmentArrival(RdpPacket inPacket, IPEndPoint remoteEP) { DateTime now = DateTime.Now; log.DebugFormat("OnSegmentArrival: {0} - packet {1}", now, inPacket); // count received bytes bytesReceivedCount += inPacket.PacketLength; totalBytesReceivedCount += inPacket.PacketLength; packetsReceivedCount++; switch (state) { case ConnectionState.Closed: case ConnectionState.CloseWait: if (inPacket.Rst) return; else if (inPacket.Ack || inPacket.Nul) { /// Send <SEQ=SEG.ACK + 1><RST> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = inPacket.AckNumber + 1; packet.Rst = true; SendPacket(packet); } else { /// Send <SEQ=0><RST><ACK=SEG.SEQ><ACK> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = 0; packet.AckNumber = inPacket.SeqNumber; packet.Rst = true; packet.Ack = true; SendPacket(packet); } break; case ConnectionState.Listen: if (inPacket.Rst) return; if (inPacket.Ack || inPacket.Nul) { /// Send <SEQ=SEG.ACK + 1><RST> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = inPacket.AckNumber + 1; packet.Rst = true; SendPacket(packet); return; } if (inPacket.Syn) { rcvCur = inPacket.SeqNumber; rcvIrs = inPacket.SeqNumber; sndMax = inPacket.MaxSegments; sbufMax = inPacket.MaxSegmentSize; /// Send <SEQ=SND.ISS><ACK=RCV.CUR><MAX=RCV.MAX><BUFMAX=RBUF.MAX> /// <ACK><SYN> RdpPacket packet = new RdpPacket(0, RdpPacket.OpenLength); packet.SeqNumber = sndIss; packet.AckNumber = rcvCur; packet.Ack = true; packet.Syn = true; packet.MaxSegments = (short)rcvMax; packet.MaxSegmentSize = (short)rbufMax; packet.Sequenced = inPacket.Sequenced; SendPacket(packet); State = ConnectionState.SynRcvd; return; } log.Warn("Shouldn't have gotten here"); break; case ConnectionState.SynSent: if (inPacket.Rst) { if (inPacket.Ack) { State = ConnectionState.Closed; log.Warn("Connection Refused"); // TODO: deallocate connection } return; } if (inPacket.Syn) { rcvCur = inPacket.SeqNumber; rcvIrs = inPacket.SeqNumber; sndMax = inPacket.MaxSegments; sbufMax = inPacket.MaxSegmentSize; if (inPacket.Ack) { sndUna = inPacket.AckNumber + 1; // per rfc 1151 State = ConnectionState.Open; /// Send <SEQ=SND.NXT><ACK=RCV.CUR><ACK> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = sndNxt; packet.AckNumber = rcvCur; packet.Ack = true; SendPacket(packet); } else { State = ConnectionState.SynRcvd; /// Send <SEQ=SND.ISS><ACK=RCV.CUR><MAX=RCV.MAX><BUFMAX=RBUF.MAX> /// <SYN><ACK> RdpPacket packet = new RdpPacket(0, RdpPacket.OpenLength); packet.SeqNumber = sndIss; packet.AckNumber = rcvCur; packet.Ack = true; packet.Syn = true; packet.MaxSegments = (short)rcvMax; packet.MaxSegmentSize = (short)rbufMax; packet.Sequenced = inPacket.Sequenced; SendPacket(packet); } return; } if (inPacket.Ack) { if (!inPacket.Rst && inPacket.AckNumber != sndIss) { /// Send <SEQ=SEG.ACK + 1><RST> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = inPacket.AckNumber + 1; packet.Rst = true; SendPacket(packet); State = ConnectionState.Closed; log.Warn("Connection Reset (by invalid ACK)"); // TODO: deallocate connection return; } } if (inPacket.Nul) { log.Warn("Shouldn't have gotten here"); break; } log.Error("Shouldn't have gotten here"); break; case ConnectionState.SynRcvd: if (rcvIrs >= inPacket.SeqNumber || inPacket.SeqNumber > (rcvCur + rcvMax * 2)) { /// Send <SEQ=SND.NXT><ACK=RCV.CUR><ACK> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = sndNxt; packet.AckNumber = rcvCur; packet.Ack = true; SendPacket(packet); return; } if (inPacket.Rst) { if (passiveOpen) State = ConnectionState.Listen; else { State = ConnectionState.Closed; throw new Exception("Connection Refused"); } return; } if (inPacket.Syn) { /// Send <SEQ=SEG.ACK + 1><RST> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = inPacket.AckNumber + 1; packet.Rst = true; SendPacket(packet); State = ConnectionState.Closed; log.Warn("Connection Reset (by SYN)"); return; } if (inPacket.Eak) { /// Send <SEQ=SEG.ACK + 1><RST> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = inPacket.AckNumber + 1; packet.Rst = true; SendPacket(packet); return; } if (inPacket.Ack) { if (inPacket.AckNumber == sndIss) State = ConnectionState.Open; else { /// Send <SEQ=SEG.ACK + 1><RST> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = inPacket.AckNumber + 1; packet.Rst = true; SendPacket(packet); return; } } else return; if (inPacket.HasData || inPacket.Nul) { HandleDataPacket(inPacket); /// Send <SEQ=SND.NXT><ACK=RCV.CUR><ACK><EACK><RCVDSEQNO1> /// ...<RCVDSEQNOn> int[] eakArray = AbridgedEakArray; RdpPacket packet = new RdpPacket(0, eakArray.Length * 4); packet.SeqNumber = sndNxt; packet.AckNumber = rcvCur; packet.Ack = true; if (eakArray.Length > 0) { packet.Eak = true; packet.EakEntries = eakArray; } SendPacket(packet); } break; case ConnectionState.Open: if (inPacket.Rst) { State = ConnectionState.CloseWait; log.Warn("Connection Reset"); return; } if (rcvCur >= inPacket.SeqNumber || inPacket.SeqNumber > (rcvCur + rcvMax * 2)) { /// Send <SEQ=SND.NXT><ACK=RCV.CUR><ACK> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = sndNxt; packet.AckNumber = rcvCur; packet.Ack = true; SendPacket(packet); log.Debug("Acking packet that was already received"); return; } #if STRICT_SPEC if (inPacket.Nul) { rcvCur = inPacket.SeqNumber; /// Send <SEQ=SND.NXT><ACK=RCV.CUR><ACK> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = sndNxt; packet.AckNumber = rcvCur; packet.Ack = true; SendPacket(packet); Logger.Log(1, "Got Nul packet"); return; } #endif if (inPacket.Syn) { /// Send <SEQ=SEG.ACK + 1><RST> RdpPacket packet = new RdpPacket(0); packet.SeqNumber = inPacket.AckNumber + 1; packet.Rst = true; SendPacket(packet); State = ConnectionState.Closed; log.Warn("Connection Reset (by SYN)"); // TODO: deallocate connection return; } if (inPacket.Ack) { if (sndUna <= inPacket.AckNumber && inPacket.AckNumber < sndNxt) { sndUna = inPacket.AckNumber + 1; // per rfc 1151 List<int> removeList = new List<int>(); int segAck = inPacket.AckNumber; foreach (int segSeq in unacknowledgedPackets.Keys) if (segSeq <= segAck) removeList.Add(segSeq); foreach (int segSeq in removeList) unacknowledgedPackets.Remove(segSeq); } } if (inPacket.Eak) { int[] eakEntries = inPacket.EakEntries; log.DebugFormat("Received eack packet: {0}", inPacket); foreach (int segSeq in eakEntries) unacknowledgedPackets.Remove(segSeq); } #if STRICT_SPEC if (inPacket.HasData) { #else if (inPacket.HasData || inPacket.Nul) { #endif HandleDataPacket(inPacket); /// Send <SEQ=SND.NXT><ACK=RCV.CUR><ACK><EACK><RCVDSEQNO1> /// ...<RCVDSEQNOn> int[] eakArray = AbridgedEakArray; RdpPacket packet = new RdpPacket(0, eakArray.Length * 4); packet.SeqNumber = sndNxt; packet.AckNumber = rcvCur; packet.Ack = true; if (eakArray.Length > 0) { packet.Eak = true; packet.EakEntries = eakArray; } SendPacket(packet); } break; } } /// <summary> /// This is called periodically by the connection manager. /// Check the timers of each of the unacknowledged packets to see if they are due for retransmission. /// </summary> /// <param name="now"></param> public void OnRetransmissionTick(DateTime now) { List<int> retransmitList = new List<int>(); try { Monitor.Enter(this); foreach (KeyValuePair<int, DateTime> pair in retransmissionTimer) if (now > pair.Value) retransmitList.Add(pair.Key); retransmitList.Sort(); // this should generally perform better foreach (int segSeq in retransmitList) { RdpPacket packet = null; if (unacknowledgedPackets.ContainsKey(segSeq)) packet = unacknowledgedPackets[segSeq]; // Remove these from the unacknowledged packets and retransmit timer, // since we will call SendPacket and handle these again. unacknowledgedPackets.Remove(segSeq); retransmissionTimer.Remove(segSeq); if (packet == null) continue; // Update the ack numbers on the packets // this is an optimization to prevent unnecessary // retransmits from our peer. if (packet.Ack) packet.AckNumber = rcvCur; SendPacket(packet); } } finally { Monitor.Exit(this); } } /// <summary> /// This is called by the connection manager when the close wait timer has expired. /// </summary> public void OnCloseWaitTimeout() { try { Monitor.Enter(this); State = ConnectionState.Closed; } finally { Monitor.Exit(this); } } /// <summary> /// Wait until the connection shifts into the given state /// </summary> /// <param name="waitForState"></param> public bool WaitForState(ConnectionState waitForState) { return WaitForState(waitForState, -1); } /// <summary> /// Wait until the connection shifts into the given state /// </summary> /// <param name="waitForState">state that we are waiting for</param> /// <param name="millisecondsTimeout">number of milliseconds to wait, or -1 to wait indefinitely</param> /// <returns>true if we transitioned to the given state, or false if we timed out</returns> public bool WaitForState(ConnectionState waitForState, int millisecondsTimeout) { try { int untilMillis = Environment.TickCount + millisecondsTimeout; Monitor.Enter(stateLock); while (true) { if (state == waitForState) return true; else if (IsClosed) throw new Exception("Connection closed"); if (millisecondsTimeout >= 0) { int timeout = untilMillis - Environment.TickCount; if (timeout <= 0) return false; Monitor.Wait(stateLock, timeout); } else { Monitor.Wait(stateLock); } } } finally { Monitor.Exit(stateLock); } } #region Properties public bool IsClosed { get { return ((state == ConnectionState.Closed) || (state == ConnectionState.CloseWait)); } } private ConnectionState State { get { try { Monitor.Enter(stateLock); return state; } finally { Monitor.Exit(stateLock); } } // For all the set methods, we should alread have a lock on the connection. set { try { Monitor.Enter(stateLock); if ((state != ConnectionState.CloseWait) && (value == ConnectionState.CloseWait)) { DateTime now = DateTime.Now; closeWaitTime = now.AddMilliseconds(CloseWaitTimeout); connManager.ReleaseConnection(this); ClearQueues(); } if ((state != ConnectionState.Closed) && (value == ConnectionState.Closed)) { connManager.CloseConnection(this); ClearQueues(); } state = value; Monitor.PulseAll(stateLock); } finally { Monitor.Exit(stateLock); } } } public DateTime CloseWaitTime { get { return closeWaitTime; } } public ConnectionState ConnectionState { get { return State; } } public int UnackedCount { get { try { Monitor.Enter(this); return unacknowledgedPackets.Count; } finally { Monitor.Exit(this); } } } public int AvailableCount { get { try { Monitor.Enter(this); return availablePackets.Count + availableOutOfOrderPackets.Count; } finally { Monitor.Exit(this); } } } public int MaxReceiveSegment { get { return rbufMax; } } public int MaxSendSegment { get { return sndMax; } } public IPEndPoint RemoteEndPoint { get { return remoteEP; } } public int MaxSegments { get { return rcvMax; } } public long PacketsSentCounter { get { return packetsSentCount; } } public long PacketsReceivedCounter { get { return packetsReceivedCount; } } public long BytesSentCounter { get { return bytesSentCount; } } public long BytesReceivedCounter { get { return bytesReceivedCount; } } public static long TotalBytesSentCounter { get { return totalBytesSentCount; } } public static long TotalBytesReceivedCounter { get { return totalBytesReceivedCount; } } #endregion // Properties } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace WebApiAngularJsAzureUploader.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Xml.Linq; using SIL.WritingSystems.Migration; namespace SIL.WritingSystems { /// <summary> /// A folder-based, LDML writing system repository. /// </summary> public class LdmlInFolderWritingSystemRepository : LdmlInFolderWritingSystemRepository<WritingSystemDefinition> { public static LdmlInFolderWritingSystemRepository Initialize(string basePath) { return Initialize(basePath, Enumerable.Empty<ICustomDataMapper<WritingSystemDefinition>>()); } /// <summary> /// Returns an instance of an ldml in folder writing system reposistory. /// </summary> /// <param name="basePath">base location of the global writing system repository</param> /// <param name="customDataMappers">The custom data mappers.</param> /// <param name="globalRepository">The global repository.</param> /// <param name="migrationHandler">Callback if during the initialization any writing system id's are changed</param> /// <param name="loadProblemHandler">Callback if during the initialization any writing systems cannot be loaded</param> /// <returns></returns> public static LdmlInFolderWritingSystemRepository Initialize( string basePath, IEnumerable<ICustomDataMapper<WritingSystemDefinition>> customDataMappers, GlobalWritingSystemRepository globalRepository = null, Action<int, IEnumerable<LdmlMigrationInfo>> migrationHandler = null, Action<IEnumerable<WritingSystemRepositoryProblem>> loadProblemHandler = null ) { var migrator = new LdmlInFolderWritingSystemRepositoryMigrator(basePath, migrationHandler); migrator.Migrate(); var instance = new LdmlInFolderWritingSystemRepository(basePath, customDataMappers, globalRepository); migrator.ResetRemovedProperties(instance); // Call the loadProblemHandler with both migration problems and load problems var loadProblems = new List<WritingSystemRepositoryProblem>(); loadProblems.AddRange(migrator.MigrationProblems); loadProblems.AddRange(instance.LoadProblems); if (loadProblems.Count > 0 && loadProblemHandler != null) { loadProblemHandler(loadProblems); } return instance; } protected internal LdmlInFolderWritingSystemRepository(string basePath, GlobalWritingSystemRepository<WritingSystemDefinition> globalRepository = null) : base(basePath, globalRepository) { } protected internal LdmlInFolderWritingSystemRepository(string basePath, IEnumerable<ICustomDataMapper<WritingSystemDefinition>> customDataMappers, GlobalWritingSystemRepository globalRepository = null) : base(basePath, customDataMappers, globalRepository) { } protected override IWritingSystemFactory<WritingSystemDefinition> CreateWritingSystemFactory() { return new LdmlInFolderWritingSystemFactory(this); } } /// <summary> /// A folder-based, LDML writing system repository. /// </summary> public abstract class LdmlInFolderWritingSystemRepository<T> : LocalWritingSystemRepositoryBase<T> where T : WritingSystemDefinition { private const string Extension = ".ldml"; private string _path; private IEnumerable<T> _systemWritingSystemProvider; private readonly WritingSystemChangeLog _changeLog; private readonly IList<WritingSystemRepositoryProblem> _loadProblems = new List<WritingSystemRepositoryProblem>(); private readonly ICustomDataMapper<T>[] _customDataMappers; private readonly GlobalWritingSystemRepository<T> _globalRepository; protected internal LdmlInFolderWritingSystemRepository(string basePath, GlobalWritingSystemRepository<T> globalRepository = null) : this(basePath, Enumerable.Empty<ICustomDataMapper<T>>(), globalRepository) { } protected internal LdmlInFolderWritingSystemRepository(string basePath, IEnumerable<ICustomDataMapper<T>> customDataMappers, GlobalWritingSystemRepository<T> globalRepository = null) : base(globalRepository) { _customDataMappers = customDataMappers.ToArray(); _globalRepository = globalRepository; PathToWritingSystems = basePath; _changeLog = new WritingSystemChangeLog(new WritingSystemChangeLogDataMapper(Path.Combine(PathToWritingSystems, "idchangelog.xml"))); ReadGlobalWritingSystemsToIgnore(); } /// <summary> /// Gets the load problems. /// </summary> public IList<WritingSystemRepositoryProblem> LoadProblems { get { return _loadProblems; } } public new GlobalWritingSystemRepository<T> GlobalWritingSystemRepository { get { return _globalRepository; } } /// <summary> /// Gets or sets the path to the writing systems folder. /// </summary> public string PathToWritingSystems { get { return _path; } set { _path = value; if (!Directory.Exists(_path)) { string parent = Directory.GetParent(_path).FullName; if (!Directory.Exists(parent)) { throw new ApplicationException( "The writing system repository cannot be created because its parent folder, " + parent + ", does not exist."); } Directory.CreateDirectory(_path); } LoadAllDefinitions(); } } public IEnumerable<ICustomDataMapper<T>> CustomDataMappers { get { return _customDataMappers; } } ///<summary> /// Returns the full path to the underlying store for this writing system. ///</summary> public string GetFilePathFromLanguageTag(string langTag) { return Path.Combine(PathToWritingSystems, GetFileNameFromLanguageTag(langTag)); } /// <summary> /// Gets the file name from the specified identifier. /// </summary> protected static string GetFileNameFromLanguageTag(string langTag) { return langTag + Extension; } /// <summary> /// Loads all writing system definitions. /// </summary> protected void LoadAllDefinitions() { _loadProblems.Clear(); ChangedIds.Clear(); Clear(); foreach (string filePath in Directory.GetFiles(_path, "*.ldml")) LoadDefinition(filePath); LoadChangedIdsFromExistingWritingSystems(); } protected virtual void LoadDefinition(string filePath) { T wsFromFile; try { wsFromFile = WritingSystemFactory.Create(); var ldmlDataMapper = new LdmlDataMapper(WritingSystemFactory); if (File.Exists(filePath)) { ldmlDataMapper.Read(filePath, wsFromFile); foreach (ICustomDataMapper<T> customDataMapper in _customDataMappers) customDataMapper.Read(wsFromFile); wsFromFile.Id = Path.GetFileNameWithoutExtension(filePath); } } catch (Exception e) { // Add the exception to our list of problems and continue loading var problem = new WritingSystemRepositoryProblem { Consequence = WritingSystemRepositoryProblem.ConsequenceType.WSWillNotBeAvailable, Exception = e, FilePath = filePath }; _loadProblems.Add(problem); return; } if (!StringComparer.InvariantCultureIgnoreCase.Equals(wsFromFile.Id, wsFromFile.LanguageTag)) { // Add the exception to our list of problems and continue loading var problem = new WritingSystemRepositoryProblem { Consequence = WritingSystemRepositoryProblem.ConsequenceType.WSWillNotBeAvailable, Exception = new ApplicationException( String.Format( "The writing system file {0} seems to be named inconsistently. It contains the IETF language tag: '{1}'. The name should have been made consistent with its content upon migration of the writing systems.", filePath, wsFromFile.LanguageTag)), FilePath = filePath }; _loadProblems.Add(problem); } try { Set(wsFromFile); } catch (Exception e) { // Add the exception to our list of problems and continue loading var problem = new WritingSystemRepositoryProblem { Consequence = WritingSystemRepositoryProblem.ConsequenceType.WSWillNotBeAvailable, Exception = e, FilePath = filePath }; _loadProblems.Add(problem); } } private bool HaveMatchingDefinitionInTrash(string identifier) { string path = PathToWritingSystemTrash(); path = Path.Combine(path, GetFileNameFromLanguageTag(identifier)); return File.Exists(path); } private void AddActiveOSLanguages() { foreach (T ws in _systemWritingSystemProvider) { if (null == FindAlreadyLoadedWritingSystem(ws.LanguageTag)) { if (!HaveMatchingDefinitionInTrash(ws.LanguageTag)) { Set(ws); } } } } /// <summary> /// Provides writing systems from a repository that comes, for example, with the OS /// </summary> public IEnumerable<T> SystemWritingSystemProvider { get{ return _systemWritingSystemProvider;} set { if (_systemWritingSystemProvider != value) { _systemWritingSystemProvider = value; AddActiveOSLanguages(); } } } private T FindAlreadyLoadedWritingSystem(string wsID) { return AllWritingSystems.FirstOrDefault(ws => ws.LanguageTag == wsID); } /// <summary> /// Saves a writing system definition. /// </summary> protected internal virtual void SaveDefinition(T ws) { Set(ws); string writingSystemFilePath = GetFilePathFromLanguageTag(ws.LanguageTag); if (!File.Exists(writingSystemFilePath) && !string.IsNullOrEmpty(ws.Template)) { // this is a new writing system that was generated from a template, so copy the template over before saving File.Copy(ws.Template, writingSystemFilePath); ws.Template = null; } if (!ws.IsChanged && File.Exists(writingSystemFilePath)) return; // no need to save (better to preserve the modified date) ws.DateModified = DateTime.UtcNow; MemoryStream oldData = null; if (File.Exists(writingSystemFilePath)) { // load old data to preserve stuff in LDML that we don't use, but don't throw up an error if it fails try { oldData = new MemoryStream(File.ReadAllBytes(writingSystemFilePath), false); } catch {} // What to do? Assume that the UI has already checked for existing, asked, and allowed the overwrite. File.Delete(writingSystemFilePath); //!!! Should this be move to trash? } var ldmlDataMapper = new LdmlDataMapper(WritingSystemFactory); ldmlDataMapper.Write(writingSystemFilePath, ws, oldData); foreach (ICustomDataMapper<T> customDataMapper in _customDataMappers) customDataMapper.Write(ws); ws.AcceptChanges(); if (ChangedIds.Any(p => p.Value == ws.Id)) { // log this id change to the writing system change log KeyValuePair<string, string> pair = ChangedIds.First(p => p.Value == ws.Id); _changeLog.LogChange(pair.Key, pair.Value); } else { // log this addition _changeLog.LogAdd(ws.Id); } } public override void Conflate(string wsToConflate, string wsToConflateWith) { //conflation involves deleting the old writing system. That deletion should not appear int he log. which is what the "_conflating" is used for base.Conflate(wsToConflate, wsToConflateWith); _changeLog.LogConflate(wsToConflate, wsToConflateWith); } public override void Remove(string id) { base.Remove(id); _changeLog.LogDelete(id); } protected override void RemoveDefinition(T ws) { int wsIgnoreCount = WritingSystemsToIgnore.Count; //we really need to get it in the trash, else, if was auto-provided, //it'll keep coming back! if (!File.Exists(GetFilePathFromLanguageTag(ws.LanguageTag))) SaveDefinition(ws); if (File.Exists(GetFilePathFromLanguageTag(ws.LanguageTag))) { Directory.CreateDirectory(PathToWritingSystemTrash()); string destination = Path.Combine(PathToWritingSystemTrash(), GetFileNameFromLanguageTag(ws.LanguageTag)); //clear out any old on already in the trash if (File.Exists(destination)) File.Delete(destination); File.Move(GetFilePathFromLanguageTag(ws.LanguageTag), destination); } base.RemoveDefinition(ws); foreach (ICustomDataMapper<T> customDataMapper in _customDataMappers) customDataMapper.Remove(ws.LanguageTag); if (wsIgnoreCount != WritingSystemsToIgnore.Count) WriteGlobalWritingSystemsToIgnore(); } private string PathToWritingSystemTrash() { return Path.Combine(_path, "trash"); } /// <summary> /// Return true if it will be possible (absent someone changing permissions while we aren't looking) /// to save changes to the specified writing system. /// </summary> public override bool CanSave(T ws) { string filePath = GetFilePathFromLanguageTag(ws.LanguageTag); if (File.Exists(filePath)) { try { using (FileStream stream = File.Open(filePath, FileMode.Open)) stream.Close(); // don't really want to change anything } catch (UnauthorizedAccessException) { return false; } } else if (Directory.Exists(PathToWritingSystems)) { try { // See whether we're allowed to create the file (but if so, get rid of it). // Pathologically we might have create but not delete permission...if so, // we'll create an empty file and report we can't save. I don't see how to // do better. using (FileStream stream = File.Create(filePath)) stream.Close(); File.Delete(filePath); } catch (UnauthorizedAccessException) { return false; } } else { try { Directory.CreateDirectory(PathToWritingSystems); // Don't try to clean it up again. This is a vanishingly rare case, // I don't think it's even possible to create a writing system store without // the directory existing. } catch (UnauthorizedAccessException) { return false; } } return true; } public override void Save() { int wsIgnoreCount = WritingSystemsToIgnore.Count; //delete anything we're going to delete first, to prevent losing //a WS we want by having it deleted by an old WS we don't want //(but which has the same identifier) foreach (string id in AllWritingSystems.Where(ws => ws.MarkedForDeletion).Select(ws => ws.Id).ToArray()) Remove(id); // make a copy and then go through that list - SaveDefinition calls Set which // may delete and then insert the same writing system - which would change WritingSystemDefinitions // and not be allowed in a foreach loop foreach (T ws in AllWritingSystems.Where(CanSet).ToArray()) { SaveDefinition(ws); OnChangeNotifySharedStore(ws); } LoadChangedIdsFromExistingWritingSystems(); if (wsIgnoreCount != WritingSystemsToIgnore.Count) WriteGlobalWritingSystemsToIgnore(); base.Save(); } public override void Set(T ws) { if (ws == null) { throw new ArgumentNullException("ws"); } string oldStoreId = ws.Id; base.Set(ws); //Renaming the file here is a bit ugly as the content has not yet been updated. Thus there //may be a mismatch between the filename and the contained rfc5646 tag. Doing it here however //helps us avoid having to deal with situations where a writing system id is changed to be //identical with the old id of another writing sytsem. This could otherwise lead to dataloss. //The inconsistency is resolved on Save() if (oldStoreId != ws.Id && File.Exists(GetFilePathFromLanguageTag(oldStoreId))) File.Move(GetFilePathFromLanguageTag(oldStoreId), GetFilePathFromLanguageTag(ws.Id)); } public override bool WritingSystemIdHasChanged(string id) { return _changeLog.HasChangeFor(id); } public override string WritingSystemIdHasChangedTo(string id) { return AllWritingSystems.Any(ws => ws.LanguageTag.Equals(id)) ? id : _changeLog.GetChangeFor(id); } protected override void LastChecked(string identifier, DateTime dateModified) { base.LastChecked(identifier, dateModified); WriteGlobalWritingSystemsToIgnore(); } private void WriteGlobalWritingSystemsToIgnore() { if (_globalRepository == null) return; string path = Path.Combine(PathToWritingSystems, "WritingSystemsToIgnore.xml"); if (WritingSystemsToIgnore.Count == 0) { if (File.Exists(path)) File.Delete(path); } else { var doc = new XDocument(new XDeclaration("1.0", "utf-8", "yes"), new XElement("WritingSystems", WritingSystemsToIgnore.Select(ignoredWs => new XElement("WritingSystem", new XAttribute("id", ignoredWs.Key), new XAttribute("dateModified", ignoredWs.Value.ToString("s")))))); doc.Save(path); } } private void ReadGlobalWritingSystemsToIgnore() { string path = Path.Combine(PathToWritingSystems, "WritingSystemsToIgnore.xml"); if (_globalRepository == null || !File.Exists(path)) return; XElement wssElem = XElement.Load(path); foreach (XElement wsElem in wssElem.Elements("WritingSystem")) { DateTime dateModified = DateTime.ParseExact((string) wsElem.Attribute("dateModified"), "s", null, DateTimeStyles.AdjustToUniversal); WritingSystemsToIgnore[(string)wsElem.Attribute("id")] = dateModified; } } public override IEnumerable<T> CheckForNewerGlobalWritingSystems() { foreach (T ws in base.CheckForNewerGlobalWritingSystems()) { // load local settings using custom data mappers, so these settings won't be lost if these writing systems are used to // replace the existing local writing systems foreach (ICustomDataMapper<T> customDataMapper in _customDataMappers) customDataMapper.Read(ws); yield return ws; } } } }
using Microsoft.VisualStudio.Services.Agent.Util; using System.Collections.Generic; using System.IO; using System.Linq; using System.Text.RegularExpressions; using Xunit; using System; namespace Microsoft.VisualStudio.Services.Agent.Tests { public sealed class LocStringsL0 { private static readonly Regex ValidKeyRegex = new Regex("^[_a-zA-Z0-9]+$"); [Fact] [Trait("Level", "L0")] [Trait("Category", "Common")] public void IsNotMissingCommonLocStrings() { ValidateLocStrings(new TestHostContext(this), project: "Microsoft.VisualStudio.Services.Agent"); } [Fact] [Trait("Level", "L0")] [Trait("Category", "Agent")] public void IsNotMissingListenerLocStrings() { ValidateLocStrings(new TestHostContext(this), project: "Agent.Listener"); } [Fact] [Trait("Level", "L0")] [Trait("Category", "Worker")] public void IsNotMissingWorkerLocStrings() { ValidateLocStrings(new TestHostContext(this), project: "Agent.Worker"); } [Fact] [Trait("Level", "L0")] [Trait("Category", "LocString")] public void IsLocStringsPrettyPrint() { // Load the strings. string stringsFile = Path.Combine(TestUtil.GetSrcPath(), "Misc", "layoutbin", "en-US", "strings.json"); Assert.True(File.Exists(stringsFile), $"File does not exist: {stringsFile}"); var resourceDictionary = IOUtil.LoadObject<Dictionary<string, object>>(stringsFile); // sort the dictionary. Dictionary<string, object> sortedResourceDictionary = new Dictionary<string, object>(); foreach (var res in resourceDictionary.OrderBy(r => r.Key)) { sortedResourceDictionary[res.Key] = res.Value; } // print to file. string prettyStringsFile = Path.Combine(TestUtil.GetSrcPath(), "Misc", "layoutbin", "en-US", "strings.json.pretty"); IOUtil.SaveObject(sortedResourceDictionary, prettyStringsFile); Assert.True(string.Equals(File.ReadAllText(stringsFile), File.ReadAllText(prettyStringsFile)), $"Orginal string.json file: {stringsFile} is not pretty printed, replace it with: {prettyStringsFile}"); // delete file on succeed File.Delete(prettyStringsFile); } [Fact] [Trait("Level", "L0")] [Trait("Category", "LocString")] public void FindExtraLocStrings() { // Load the strings. string stringsFile = Path.Combine(TestUtil.GetSrcPath(), "Misc", "layoutbin", "en-US", "strings.json"); Assert.True(File.Exists(stringsFile), $"File does not exist: {stringsFile}"); var resourceDictionary = IOUtil.LoadObject<Dictionary<string, object>>(stringsFile); // Find all loc string key in source file. // // Note, narrow the search to each project folder only. Otherwise intermittent errors occur // when recursively searching due to parallel tests are deleting temp folders (DirectoryNotFoundException). var keys = new List<string>(); string[] sourceFiles = Directory.GetFiles(TestUtil.GetProjectPath("Microsoft.VisualStudio.Services.Agent"), "*.cs", SearchOption.AllDirectories) .Concat(Directory.GetFiles(TestUtil.GetProjectPath("Agent.Listener"), "*.cs", SearchOption.AllDirectories)) .Concat(Directory.GetFiles(TestUtil.GetProjectPath("Agent.Worker"), "*.cs", SearchOption.AllDirectories)) .Concat(Directory.GetFiles(TestUtil.GetProjectPath("Agent.Plugins"), "*.cs", SearchOption.AllDirectories)) .Concat(Directory.GetFiles(TestUtil.GetProjectPath("Agent.Sdk"), "*.cs", SearchOption.AllDirectories)) .ToArray(); foreach (string sourceFile in sourceFiles) { // Skip files in the obj directory. if (sourceFile.Contains(StringUtil.Format("{0}obj{0}", Path.DirectorySeparatorChar))) { continue; } foreach (string line in File.ReadAllLines(sourceFile)) { // Search for calls to the StringUtil.Loc method within the line. const string Pattern = "StringUtil.Loc("; int searchIndex = 0; int patternIndex; while (searchIndex < line.Length && (patternIndex = line.IndexOf(Pattern, searchIndex)) >= 0) { // Bump the search index in preparation for the for the next iteration within the same line. searchIndex = patternIndex + Pattern.Length; // Extract the resource key. int keyStartIndex = patternIndex + Pattern.Length; int keyEndIndex; if (keyStartIndex + 2 < line.Length && // Key should start with a ", be followed by at least line[keyStartIndex] == '"' && // one character, and end with a ". (keyEndIndex = line.IndexOf('"', keyStartIndex + 1)) > 0) { // Remove the first and last double quotes. keyStartIndex++; keyEndIndex--; string key = line.Substring( startIndex: keyStartIndex, length: keyEndIndex - keyStartIndex + 1); if (ValidKeyRegex.IsMatch(key)) { // A valid key was extracted. keys.Add(key); continue; } } } } } // find extra loc strings. var extraKeys = resourceDictionary.Keys.Where(x => !keys.Contains(x))?.ToList(); if (extraKeys != null) { Assert.True(extraKeys.Count == 0, $"Please save company's money by removing extra loc strings:{Environment.NewLine}{string.Join(Environment.NewLine, extraKeys)}"); } } private void ValidateLocStrings(TestHostContext hc, string project) { using (hc) { Tracing trace = hc.GetTrace(); var keys = new List<string>(); var badLines = new List<BadLineInfo>(); // Search for source files within the project. trace.Verbose("Searching source files:"); string[] sourceFiles = Directory.GetFiles( TestUtil.GetProjectPath(project), "*.cs", SearchOption.AllDirectories); foreach (string sourceFile in sourceFiles) { // Skip files in the obj directory. if (sourceFile.Contains(StringUtil.Format("{0}obj{0}", Path.DirectorySeparatorChar))) { continue; } trace.Verbose($" {sourceFile}"); foreach (string line in File.ReadAllLines(sourceFile)) { // Search for calls to the StringUtil.Loc method within the line. const string Pattern = "StringUtil.Loc("; int searchIndex = 0; int patternIndex; while (searchIndex < line.Length && (patternIndex = line.IndexOf(Pattern, searchIndex)) >= 0) { // Bump the search index in preparation for the for the next iteration within the same line. searchIndex = patternIndex + Pattern.Length; // Extract the resource key. int keyStartIndex = patternIndex + Pattern.Length; int keyEndIndex; if (keyStartIndex + 2 < line.Length && // Key should start with a ", be followed by at least line[keyStartIndex] == '"' && // one character, and end with a ". (keyEndIndex = line.IndexOf('"', keyStartIndex + 1)) > 0) { // Remove the first and last double quotes. keyStartIndex++; keyEndIndex--; string key = line.Substring( startIndex: keyStartIndex, length: keyEndIndex - keyStartIndex + 1); if (ValidKeyRegex.IsMatch(key)) { // A valid key was extracted. keys.Add(key); continue; } } // Something went wrong. The pattern was found, but the resource key could not be determined. badLines.Add(new BadLineInfo { File = sourceFile, Line = line }); } } } // Load the strings. string stringsFile = Path.Combine(TestUtil.GetSrcPath(), "Misc", "layoutbin", "en-US", "strings.json"); Assert.True(File.Exists(stringsFile), $"File does not exist: {stringsFile}"); var resourceDictionary = IOUtil.LoadObject<Dictionary<string, object>>(stringsFile); // Find missing keys. string[] missingKeys = keys .Where(x => !resourceDictionary.ContainsKey(x)) .OrderBy(x => x) .ToArray(); if (missingKeys.Length > 0) { trace.Error("One or more resource keys missing from resources file:"); foreach (string missingKey in missingKeys) { trace.Error($" {missingKey}"); } } // Validate whether resource keys couldn't be interpreted. if (badLines.Count > 0) { trace.Error("Bad lines detected. Unable to interpret resource key(s)."); IEnumerable<IGrouping<string, BadLineInfo>> badLineGroupings = badLines .GroupBy(x => x.File) .OrderBy(x => x.Key) .ToArray(); foreach (IGrouping<string, BadLineInfo> badLineGrouping in badLineGroupings) { trace.Error($"File: {badLineGrouping.First().File}"); foreach (BadLineInfo badLine in badLineGrouping) { trace.Error($" Line: {badLine.Line}"); } } } Assert.True(missingKeys.Length == 0, $"One or more resource keys missing from resources files. Consult the trace log: {hc.TraceFileName}"); Assert.True(badLines.Count == 0, $"Unable to determine one or more resource keys. Consult the trace log: {hc.TraceFileName}"); } } private sealed class BadLineInfo { public string File { get; set; } public string Line { get; set; } } } }
// // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Warning: This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if the // code is regenerated. using System; using System.Linq; using System.Threading; using System.Threading.Tasks; using Microsoft.Azure; using Microsoft.WindowsAzure.Management.Storage.Models; namespace Microsoft.WindowsAzure.Management.Storage { /// <summary> /// The Service Management API includes operations for managing the storage /// accounts beneath your subscription. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460790.aspx for /// more information) /// </summary> public partial interface IStorageAccountOperations { /// <summary> /// Abort storage account migration api validates and aborts the given /// storage account for IaaS Classic to ARM migration. /// </summary> /// <param name='storageAccountName'> /// Name of storage account to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request and error information regarding /// the failure. /// </returns> Task<OperationStatusResponse> AbortMigrationAsync(string storageAccountName, CancellationToken cancellationToken); /// <summary> /// Abort storage account migration api validates and aborts the given /// storage account for IaaS Classic to ARM migration. /// </summary> /// <param name='storageAccountName'> /// Name of storage account to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginAbortMigrationAsync(string storageAccountName, CancellationToken cancellationToken); /// <summary> /// Commit storage account migration api validates and commits the /// given storage account for IaaS Classic to ARM migration. /// </summary> /// <param name='storageAccountName'> /// Name of storage account to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginCommitMigrationAsync(string storageAccountName, CancellationToken cancellationToken); /// <summary> /// The Begin Creating Storage Account operation creates a new storage /// account in Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264518.aspx /// for more information) /// </summary> /// <param name='parameters'> /// Parameters supplied to the Begin Creating Storage Account operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginCreatingAsync(StorageAccountCreateParameters parameters, CancellationToken cancellationToken); /// <summary> /// Prepare storage account migration api validates and prepares the /// given storage account for IaaS Classic to ARM migration. /// </summary> /// <param name='storageAccountName'> /// Name of storage account to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> BeginPrepareMigrationAsync(string storageAccountName, CancellationToken cancellationToken); /// <summary> /// The Check Name Availability operation checks if a storage account /// name is available for use in Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/jj154125.aspx /// for more information) /// </summary> /// <param name='accountName'> /// The desired storage account name to check for availability. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response to a storage account check name availability request. /// </returns> Task<CheckNameAvailabilityResponse> CheckNameAvailabilityAsync(string accountName, CancellationToken cancellationToken); /// <summary> /// Commit storage account migration api validates and commits the /// given storage account for IaaS Classic to ARM migration. /// </summary> /// <param name='storageAccountName'> /// Name of storage account to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request and error information regarding /// the failure. /// </returns> Task<OperationStatusResponse> CommitMigrationAsync(string storageAccountName, CancellationToken cancellationToken); /// <summary> /// The Create Storage Account operation creates a new storage account /// in Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264518.aspx /// for more information) /// </summary> /// <param name='parameters'> /// Parameters supplied to the Create Storage Account operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request and error information regarding /// the failure. /// </returns> Task<OperationStatusResponse> CreateAsync(StorageAccountCreateParameters parameters, CancellationToken cancellationToken); /// <summary> /// The Delete Storage Account operation deletes the specified storage /// account from Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264517.aspx /// for more information) /// </summary> /// <param name='accountName'> /// The name of the storage account to be deleted. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> DeleteAsync(string accountName, CancellationToken cancellationToken); /// <summary> /// The Get Storage Account Properties operation returns system /// properties for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460802.aspx /// for more information) /// </summary> /// <param name='accountName'> /// Name of the storage account to get properties for. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The Get Storage Account Properties operation response. /// </returns> Task<StorageAccountGetResponse> GetAsync(string accountName, CancellationToken cancellationToken); /// <summary> /// The Get Storage Keys operation returns the primary and secondary /// access keys for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460785.aspx /// for more information) /// </summary> /// <param name='accountName'> /// The name of the desired storage account. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The primary and secondary access keys for a storage account. /// </returns> Task<StorageAccountGetKeysResponse> GetKeysAsync(string accountName, CancellationToken cancellationToken); /// <summary> /// The List Storage Accounts operation lists the storage accounts /// available under the current subscription. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460787.aspx /// for more information) /// </summary> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The List Storage Accounts operation response. /// </returns> Task<StorageAccountListResponse> ListAsync(CancellationToken cancellationToken); /// <summary> /// Prepare storage account migration api validates and prepares the /// given storage account for IaaS Classic to ARM migration. /// </summary> /// <param name='storageAccountName'> /// Name of storage account to be migrated. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The response body contains the status of the specified asynchronous /// operation, indicating whether it has succeeded, is inprogress, or /// has failed. Note that this status is distinct from the HTTP status /// code returned for the Get Operation Status operation itself. If /// the asynchronous operation succeeded, the response body includes /// the HTTP status code for the successful request. If the /// asynchronous operation failed, the response body includes the HTTP /// status code for the failed request and error information regarding /// the failure. /// </returns> Task<OperationStatusResponse> PrepareMigrationAsync(string storageAccountName, CancellationToken cancellationToken); /// <summary> /// The Regenerate Keys operation regenerates the primary or secondary /// access key for the specified storage account. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/ee460795.aspx /// for more information) /// </summary> /// <param name='parameters'> /// Parameters supplied to the Regenerate Keys operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// The primary and secondary access keys for a storage account. /// </returns> Task<StorageAccountRegenerateKeysResponse> RegenerateKeysAsync(StorageAccountRegenerateKeysParameters parameters, CancellationToken cancellationToken); /// <summary> /// The Update Storage Account operation updates the label and the /// description, and enables or disables the geo-replication status /// for a storage account in Azure. (see /// http://msdn.microsoft.com/en-us/library/windowsazure/hh264516.aspx /// for more information) /// </summary> /// <param name='accountName'> /// Name of the storage account to update. /// </param> /// <param name='parameters'> /// Parameters supplied to the Update Storage Account operation. /// </param> /// <param name='cancellationToken'> /// Cancellation token. /// </param> /// <returns> /// A standard service response including an HTTP status code and /// request ID. /// </returns> Task<AzureOperationResponse> UpdateAsync(string accountName, StorageAccountUpdateParameters parameters, CancellationToken cancellationToken); } }
using System; using BitManipulator; using Obscur.Core.Cryptography.Support; using PerfCopy; namespace Obscur.Core.Cryptography.Ciphers.Stream.Primitives { /** * HC-256 is a software-efficient stream cipher created by Hongjun Wu. It * generates keystream from a 256-bit secret key and a 256-bit initialization * vector. * <p> * http://www.ecrypt.eu.org/stream/p3ciphers/hc/hc256_p3.pdf * </p><p> * Its brother, HC-128, is a third phase candidate in the eStream contest. * The algorithm is patent-free. No attacks are known as of today (April 2007). * See * * http://www.ecrypt.eu.org/stream/hcp3.html * </p> */ public class Hc256Engine : StreamCipherEngine { private uint[] _p = new uint[1024]; private uint[] _q = new uint[1024]; private uint _cnt; public Hc256Engine() : base(StreamCipher.Hc256) { } private uint Step() { uint j = _cnt & 0x3FF; uint ret; if (_cnt < 1024) { uint x = _p[(j - 3 & 0x3FF)]; uint y = _p[(j - 1023 & 0x3FF)]; _p[j] += _p[(j - 10 & 0x3FF)] + (x.RotateRight_NoChecks(10) ^ y.RotateRight_NoChecks(23)) + _q[((x ^ y) & 0x3FF)]; x = _p[(j - 12 & 0x3FF)]; ret = (_q[x & 0xFF] + _q[((x >> 8) & 0xFF) + 256] + _q[((x >> 16) & 0xFF) + 512] + _q[((x >> 24) & 0xFF) + 768]) ^ _p[j]; } else { uint x = _q[(j - 3 & 0x3FF)]; uint y = _q[(j - 1023 & 0x3FF)]; _q[j] += _q[(j - 10 & 0x3FF)] + (x.RotateRight_NoChecks(10) ^ y.RotateRight_NoChecks(23)) + _p[((x ^ y) & 0x3FF)]; x = _q[(j - 12 & 0x3FF)]; ret = (_p[x & 0xFF] + _p[((x >> 8) & 0xFF) + 256] + _p[((x >> 16) & 0xFF) + 512] + _p[((x >> 24) & 0xFF) + 768]) ^ _q[j]; } _cnt = _cnt + 1 & 0x7FF; return ret; } private void Init() { if (Key.Length != 32) { byte[] k = new byte[32]; Array.Copy(Key, 0, k, 0, Key.Length); Array.Copy(Key, 0, k, 16, Key.Length); Key = k; } if (Nonce.Length < 32) { byte[] newIV = new byte[32]; Array.Copy(Nonce, 0, newIV, 0, Nonce.Length); Array.Copy(Nonce, 0, newIV, Nonce.Length, newIV.Length - Nonce.Length); Nonce = newIV; } _cnt = 0; uint[] w = new uint[2560]; for (int i = 0; i < 32; i++) { w[i >> 2] |= ((uint)Key[i] << (8 * (i & 0x3))); } for (int i = 0; i < 32; i++) { w[(i >> 2) + 8] |= ((uint)Nonce[i] << (8 * (i & 0x3))); } for (uint i = 16; i < 2560; i++) { uint x = w[i - 2]; uint y = w[i - 15]; w[i] = (x.RotateRight_NoChecks(17) ^ x.RotateRight_NoChecks(19) ^ (x >> 10)) + w[i - 7] + (y.RotateRight_NoChecks(7) ^ y.RotateRight_NoChecks(18) ^ (y >> 3)) + w[i - 16] + i; } w.DeepCopy_NoChecks(512, _p, 0, 1024); w.DeepCopy_NoChecks(1536, _q, 0, 1024); for (int i = 0; i < 4096; i++) { Step(); } _cnt = 0; } /// <inheritdoc/> public override string AlgorithmName { get { return "HC-256"; } } /// <inheritdoc/> public override int StateSize { get { return 32; } } /// <inheritdoc /> protected override void InitState() { Init(); IsInitialised = true; } private byte[] buf = new byte[4]; private int idx; private byte GetByte() { if (idx == 0) { Pack.UInt32_To_LE(Step(), buf); } byte ret = buf[idx]; idx = (idx + 1) & 3; return ret; } /// <inheritdoc/> protected internal override void ProcessBytesInternal( byte[] input, int inOff, int len, byte[] output, int outOff) { // Process leftover keystream for (; idx != 0; idx = (idx + 1) & 3) { output[outOff++] = (byte)(input[inOff++] ^ buf[idx]); len--; } int remainder; int blocks = Math.DivRem (len, 4, out remainder); #if INCLUDE_UNSAFE unsafe { fixed (byte* inPtr = input) { fixed (byte* outPtr = output) { uint* inUintPtr = (uint*)(inPtr + inOff); uint* outUintPtr = (uint*)(outPtr + outOff); for (int i = 0; i < blocks; i++) { uint j = _cnt & 0x3FF; uint ret; if (_cnt < 1024) { uint x = _p[(j - 3 & 0x3FF)]; uint y = _p[(j - 1023 & 0x3FF)]; _p[j] += _p[(j - 10 & 0x3FF)] + (x.RotateRight(10) ^ y.RotateRight(23)) + _q[((x ^ y) & 0x3FF)]; x = _p[(j - 12 & 0x3FF)]; ret = (_q[x & 0xFF] + _q[((x >> 8) & 0xFF) + 256] + _q[((x >> 16) & 0xFF) + 512] + _q[((x >> 24) & 0xFF) + 768]) ^ _p[j]; } else { uint x = _q[(j - 3 & 0x3FF)]; uint y = _q[(j - 1023 & 0x3FF)]; _q[j] += _q[(j - 10 & 0x3FF)] + (x.RotateRight(10) ^ y.RotateRight(23)) + _p[((x ^ y) & 0x3FF)]; x = _q[(j - 12 & 0x3FF)]; ret = (_p[x & 0xFF] + _p[((x >> 8) & 0xFF) + 256] + _p[((x >> 16) & 0xFF) + 512] + _p[((x >> 24) & 0xFF) + 768]) ^ _q[j]; } _cnt = _cnt + 1 & 0x7FF; outUintPtr[i] = inUintPtr[i] ^ ret; } } } } inOff += 4 * blocks; outOff += 4 * blocks; #else for (int i = 0; i < blocks; i++) { Pack.UInt32_To_LE(Step(), buf); output[outOff + 0] = (byte)(input[inOff + 0] ^ buf[0]); output[outOff + 1] = (byte)(input[inOff + 1] ^ buf[1]); output[outOff + 2] = (byte)(input[inOff + 2] ^ buf[2]); output[outOff + 3] = (byte)(input[inOff + 3] ^ buf[3]); inOff += 4; outOff += 4; } #endif // Process remainder input (insufficient width for a full step) for (int i = 0; i < remainder; i++) { if (idx == 0) { Pack.UInt32_To_LE(Step(), buf); } output[outOff++] = (byte)(input[inOff++] ^ buf[idx]); idx = (idx + 1) & 3; } } /// <inheritdoc/> public override void Reset() { idx = 0; Init(); } /// <inheritdoc/> public override byte ReturnByte(byte input) { return (byte)(input ^ GetByte()); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics; using System.Dynamic.Utils; using System.Reflection; using System.Reflection.Emit; using static System.Linq.Expressions.CachedReflectionInfo; namespace System.Linq.Expressions.Compiler { internal partial class LambdaCompiler { private void EmitBinaryExpression(Expression expr) { EmitBinaryExpression(expr, CompilationFlags.EmitAsNoTail); } private void EmitBinaryExpression(Expression expr, CompilationFlags flags) { BinaryExpression b = (BinaryExpression)expr; Debug.Assert(b.NodeType != ExpressionType.AndAlso && b.NodeType != ExpressionType.OrElse && b.NodeType != ExpressionType.Coalesce); if (b.Method != null) { EmitBinaryMethod(b, flags); return; } // For EQ and NE, if there is a user-specified method, use it. // Otherwise implement the C# semantics that allow equality // comparisons on non-primitive nullable structs that don't // overload "==" if ((b.NodeType == ExpressionType.Equal || b.NodeType == ExpressionType.NotEqual) && (b.Type == typeof(bool) || b.Type == typeof(bool?))) { // If we have x==null, x!=null, null==x or null!=x where x is // nullable but not null, then generate a call to x.HasValue. Debug.Assert(!b.IsLiftedToNull || b.Type == typeof(bool?)); if (ConstantCheck.IsNull(b.Left) && !ConstantCheck.IsNull(b.Right) && b.Right.Type.IsNullableType()) { EmitNullEquality(b.NodeType, b.Right, b.IsLiftedToNull); return; } if (ConstantCheck.IsNull(b.Right) && !ConstantCheck.IsNull(b.Left) && b.Left.Type.IsNullableType()) { EmitNullEquality(b.NodeType, b.Left, b.IsLiftedToNull); return; } // For EQ and NE, we can avoid some conversions if we're // ultimately just comparing two managed pointers. EmitExpression(GetEqualityOperand(b.Left)); EmitExpression(GetEqualityOperand(b.Right)); } else { // Otherwise generate it normally EmitExpression(b.Left); EmitExpression(b.Right); } EmitBinaryOperator(b.NodeType, b.Left.Type, b.Right.Type, b.Type, b.IsLiftedToNull); } private void EmitNullEquality(ExpressionType op, Expression e, bool isLiftedToNull) { Debug.Assert(e.Type.IsNullableType()); Debug.Assert(op == ExpressionType.Equal || op == ExpressionType.NotEqual); // If we are lifted to null then just evaluate the expression for its side effects, discard, // and generate null. If we are not lifted to null then generate a call to HasValue. if (isLiftedToNull) { EmitExpressionAsVoid(e); _ilg.EmitDefault(typeof(bool?), this); } else { EmitAddress(e, e.Type); _ilg.EmitHasValue(e.Type); if (op == ExpressionType.Equal) { _ilg.Emit(OpCodes.Ldc_I4_0); _ilg.Emit(OpCodes.Ceq); } } } private void EmitBinaryMethod(BinaryExpression b, CompilationFlags flags) { if (b.IsLifted) { ParameterExpression p1 = Expression.Variable(b.Left.Type.GetNonNullableType(), name: null); ParameterExpression p2 = Expression.Variable(b.Right.Type.GetNonNullableType(), name: null); MethodCallExpression mc = Expression.Call(null, b.Method, p1, p2); Type resultType; if (b.IsLiftedToNull) { resultType = mc.Type.GetNullableType(); } else { switch (b.NodeType) { case ExpressionType.Equal: case ExpressionType.NotEqual: case ExpressionType.LessThan: case ExpressionType.LessThanOrEqual: case ExpressionType.GreaterThan: case ExpressionType.GreaterThanOrEqual: if (mc.Type != typeof(bool)) { throw Error.ArgumentMustBeBoolean(nameof(b)); } resultType = typeof(bool); break; default: resultType = mc.Type.GetNullableType(); break; } } var variables = new ParameterExpression[] { p1, p2 }; var arguments = new Expression[] { b.Left, b.Right }; ValidateLift(variables, arguments); EmitLift(b.NodeType, resultType, mc, variables, arguments); } else { EmitMethodCallExpression(Expression.Call(null, b.Method, b.Left, b.Right), flags); } } private void EmitBinaryOperator(ExpressionType op, Type leftType, Type rightType, Type resultType, bool liftedToNull) { bool leftIsNullable = leftType.IsNullableType(); bool rightIsNullable = rightType.IsNullableType(); switch (op) { case ExpressionType.ArrayIndex: if (rightType != typeof(int)) { throw ContractUtils.Unreachable; } EmitGetArrayElement(leftType); return; case ExpressionType.Coalesce: throw Error.UnexpectedCoalesceOperator(); } if (leftIsNullable || rightIsNullable) { EmitLiftedBinaryOp(op, leftType, rightType, resultType, liftedToNull); } else { EmitUnliftedBinaryOp(op, leftType, rightType); } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")] private void EmitUnliftedBinaryOp(ExpressionType op, Type leftType, Type rightType) { Debug.Assert(!leftType.IsNullableType()); Debug.Assert(!rightType.IsNullableType()); if (op == ExpressionType.Equal || op == ExpressionType.NotEqual) { EmitUnliftedEquality(op, leftType); return; } if (!leftType.IsPrimitive) { throw Error.OperatorNotImplementedForType(op, leftType); } switch (op) { case ExpressionType.Add: _ilg.Emit(OpCodes.Add); break; case ExpressionType.AddChecked: if (leftType.IsFloatingPoint()) { _ilg.Emit(OpCodes.Add); } else if (leftType.IsUnsigned()) { _ilg.Emit(OpCodes.Add_Ovf_Un); } else { _ilg.Emit(OpCodes.Add_Ovf); } break; case ExpressionType.Subtract: _ilg.Emit(OpCodes.Sub); break; case ExpressionType.SubtractChecked: if (leftType.IsFloatingPoint()) { _ilg.Emit(OpCodes.Sub); } else if (leftType.IsUnsigned()) { _ilg.Emit(OpCodes.Sub_Ovf_Un); // Guaranteed to fit within result type: no conversion return; } else { _ilg.Emit(OpCodes.Sub_Ovf); } break; case ExpressionType.Multiply: _ilg.Emit(OpCodes.Mul); break; case ExpressionType.MultiplyChecked: if (leftType.IsFloatingPoint()) { _ilg.Emit(OpCodes.Mul); } else if (leftType.IsUnsigned()) { _ilg.Emit(OpCodes.Mul_Ovf_Un); } else { _ilg.Emit(OpCodes.Mul_Ovf); } break; case ExpressionType.Divide: _ilg.Emit(leftType.IsUnsigned() ? OpCodes.Div_Un : OpCodes.Div); break; case ExpressionType.Modulo: _ilg.Emit(leftType.IsUnsigned() ? OpCodes.Rem_Un : OpCodes.Rem); // Guaranteed to fit within result type: no conversion return; case ExpressionType.And: case ExpressionType.AndAlso: _ilg.Emit(OpCodes.And); // Not an arithmetic operation: no conversion return; case ExpressionType.Or: case ExpressionType.OrElse: _ilg.Emit(OpCodes.Or); // Not an arithmetic operation: no conversion return; case ExpressionType.LessThan: _ilg.Emit(leftType.IsUnsigned() ? OpCodes.Clt_Un : OpCodes.Clt); // Not an arithmetic operation: no conversion return; case ExpressionType.LessThanOrEqual: _ilg.Emit(leftType.IsUnsigned() || leftType.IsFloatingPoint() ? OpCodes.Cgt_Un : OpCodes.Cgt); _ilg.Emit(OpCodes.Ldc_I4_0); _ilg.Emit(OpCodes.Ceq); // Not an arithmetic operation: no conversion return; case ExpressionType.GreaterThan: _ilg.Emit(leftType.IsUnsigned() ? OpCodes.Cgt_Un : OpCodes.Cgt); // Not an arithmetic operation: no conversion return; case ExpressionType.GreaterThanOrEqual: _ilg.Emit(leftType.IsUnsigned() || leftType.IsFloatingPoint() ? OpCodes.Clt_Un : OpCodes.Clt); _ilg.Emit(OpCodes.Ldc_I4_0); _ilg.Emit(OpCodes.Ceq); // Not an arithmetic operation: no conversion return; case ExpressionType.ExclusiveOr: _ilg.Emit(OpCodes.Xor); // Not an arithmetic operation: no conversion return; case ExpressionType.LeftShift: if (rightType != typeof(int)) { throw ContractUtils.Unreachable; } EmitShiftMask(leftType); _ilg.Emit(OpCodes.Shl); break; case ExpressionType.RightShift: if (rightType != typeof(int)) { throw ContractUtils.Unreachable; } EmitShiftMask(leftType); _ilg.Emit(leftType.IsUnsigned() ? OpCodes.Shr_Un : OpCodes.Shr); // Guaranteed to fit within result type: no conversion return; default: throw Error.UnhandledBinary(op, nameof(op)); } EmitConvertArithmeticResult(op, leftType); } // Shift operations have undefined behavior if the shift amount exceeds // the number of bits in the value operand. See CLI III.3.58 and C# 7.9 // for the bit mask used below. private void EmitShiftMask(Type leftType) { int mask = leftType.IsInteger64() ? 0x3F : 0x1F; _ilg.EmitPrimitive(mask); _ilg.Emit(OpCodes.And); } // Binary/unary operations on 8 and 16 bit operand types will leave a // 32-bit value on the stack, because that's how IL works. For these // cases, we need to cast it back to the resultType, possibly using a // checked conversion if the original operator was convert private void EmitConvertArithmeticResult(ExpressionType op, Type resultType) { Debug.Assert(!resultType.IsNullableType()); switch (resultType.GetTypeCode()) { case TypeCode.Byte: _ilg.Emit(IsChecked(op) ? OpCodes.Conv_Ovf_U1 : OpCodes.Conv_U1); break; case TypeCode.SByte: _ilg.Emit(IsChecked(op) ? OpCodes.Conv_Ovf_I1 : OpCodes.Conv_I1); break; case TypeCode.UInt16: _ilg.Emit(IsChecked(op) ? OpCodes.Conv_Ovf_U2 : OpCodes.Conv_U2); break; case TypeCode.Int16: _ilg.Emit(IsChecked(op) ? OpCodes.Conv_Ovf_I2 : OpCodes.Conv_I2); break; } } private void EmitUnliftedEquality(ExpressionType op, Type type) { Debug.Assert(op == ExpressionType.Equal || op == ExpressionType.NotEqual); if (!type.IsPrimitive && type.IsValueType && !type.IsEnum) { throw Error.OperatorNotImplementedForType(op, type); } _ilg.Emit(OpCodes.Ceq); if (op == ExpressionType.NotEqual) { _ilg.Emit(OpCodes.Ldc_I4_0); _ilg.Emit(OpCodes.Ceq); } } [System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")] private void EmitLiftedBinaryOp(ExpressionType op, Type leftType, Type rightType, Type resultType, bool liftedToNull) { Debug.Assert(leftType.IsNullableType() || rightType.IsNullableType()); switch (op) { case ExpressionType.And: if (leftType == typeof(bool?)) { EmitLiftedBooleanAnd(); } else { EmitLiftedBinaryArithmetic(op, leftType, rightType, resultType); } break; case ExpressionType.Or: if (leftType == typeof(bool?)) { EmitLiftedBooleanOr(); } else { EmitLiftedBinaryArithmetic(op, leftType, rightType, resultType); } break; case ExpressionType.ExclusiveOr: case ExpressionType.Add: case ExpressionType.AddChecked: case ExpressionType.Subtract: case ExpressionType.SubtractChecked: case ExpressionType.Multiply: case ExpressionType.MultiplyChecked: case ExpressionType.Divide: case ExpressionType.Modulo: case ExpressionType.LeftShift: case ExpressionType.RightShift: EmitLiftedBinaryArithmetic(op, leftType, rightType, resultType); break; case ExpressionType.LessThan: case ExpressionType.LessThanOrEqual: case ExpressionType.GreaterThan: case ExpressionType.GreaterThanOrEqual: case ExpressionType.Equal: case ExpressionType.NotEqual: Debug.Assert(leftType == rightType); if (liftedToNull) { Debug.Assert(resultType == typeof(bool?)); EmitLiftedToNullRelational(op, leftType); } else { Debug.Assert(resultType == typeof(bool)); EmitLiftedRelational(op, leftType); } break; case ExpressionType.AndAlso: case ExpressionType.OrElse: default: throw ContractUtils.Unreachable; } } private void EmitLiftedRelational(ExpressionType op, Type type) { // Equal is (left.GetValueOrDefault() == right.GetValueOrDefault()) & (left.HasValue == right.HasValue) // NotEqual is !((left.GetValueOrDefault() == right.GetValueOrDefault()) & (left.HasValue == right.HasValue)) // Others are (left.GetValueOrDefault() op right.GetValueOrDefault()) & (left.HasValue & right.HasValue) bool invert = op == ExpressionType.NotEqual; if (invert) { op = ExpressionType.Equal; } LocalBuilder locLeft = GetLocal(type); LocalBuilder locRight = GetLocal(type); _ilg.Emit(OpCodes.Stloc, locRight); _ilg.Emit(OpCodes.Stloc, locLeft); _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitGetValueOrDefault(type); _ilg.Emit(OpCodes.Ldloca, locRight); _ilg.EmitGetValueOrDefault(type); Type unnullable = type.GetNonNullableType(); EmitUnliftedBinaryOp(op, unnullable, unnullable); _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitHasValue(type); _ilg.Emit(OpCodes.Ldloca, locRight); _ilg.EmitHasValue(type); FreeLocal(locLeft); FreeLocal(locRight); _ilg.Emit(op == ExpressionType.Equal ? OpCodes.Ceq : OpCodes.And); _ilg.Emit(OpCodes.And); if (invert) { _ilg.Emit(OpCodes.Ldc_I4_0); _ilg.Emit(OpCodes.Ceq); } } private void EmitLiftedToNullRelational(ExpressionType op, Type type) { // (left.HasValue & right.HasValue) ? left.GetValueOrDefault() op right.GetValueOrDefault() : default(bool?) Label notNull = _ilg.DefineLabel(); Label end = _ilg.DefineLabel(); LocalBuilder locLeft = GetLocal(type); LocalBuilder locRight = GetLocal(type); _ilg.Emit(OpCodes.Stloc, locRight); _ilg.Emit(OpCodes.Stloc, locLeft); _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitHasValue(type); _ilg.Emit(OpCodes.Ldloca, locRight); _ilg.EmitHasValue(type); _ilg.Emit(OpCodes.And); _ilg.Emit(OpCodes.Brtrue_S, notNull); _ilg.EmitDefault(typeof(bool?), this); _ilg.Emit(OpCodes.Br_S, end); _ilg.MarkLabel(notNull); _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitGetValueOrDefault(type); _ilg.Emit(OpCodes.Ldloca, locRight); _ilg.EmitGetValueOrDefault(type); FreeLocal(locLeft); FreeLocal(locRight); Type unnullable = type.GetNonNullableType(); EmitUnliftedBinaryOp(op, unnullable, unnullable); _ilg.Emit(OpCodes.Newobj, Nullable_Boolean_Ctor); _ilg.MarkLabel(end); } private void EmitLiftedBinaryArithmetic(ExpressionType op, Type leftType, Type rightType, Type resultType) { bool leftIsNullable = leftType.IsNullableType(); bool rightIsNullable = rightType.IsNullableType(); Debug.Assert(leftIsNullable || rightIsNullable); Label labIfNull = _ilg.DefineLabel(); Label labEnd = _ilg.DefineLabel(); LocalBuilder locLeft = GetLocal(leftType); LocalBuilder locRight = GetLocal(rightType); LocalBuilder locResult = GetLocal(resultType); // store values (reverse order since they are already on the stack) _ilg.Emit(OpCodes.Stloc, locRight); _ilg.Emit(OpCodes.Stloc, locLeft); // test for null // don't use short circuiting if (leftIsNullable) { _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitHasValue(leftType); } if (rightIsNullable) { _ilg.Emit(OpCodes.Ldloca, locRight); _ilg.EmitHasValue(rightType); if (leftIsNullable) { _ilg.Emit(OpCodes.And); } } _ilg.Emit(OpCodes.Brfalse_S, labIfNull); // do op on values if (leftIsNullable) { _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitGetValueOrDefault(leftType); } else { _ilg.Emit(OpCodes.Ldloc, locLeft); } if (rightIsNullable) { _ilg.Emit(OpCodes.Ldloca, locRight); _ilg.EmitGetValueOrDefault(rightType); } else { _ilg.Emit(OpCodes.Ldloc, locRight); } //RELEASING locLeft locRight FreeLocal(locLeft); FreeLocal(locRight); EmitBinaryOperator(op, leftType.GetNonNullableType(), rightType.GetNonNullableType(), resultType.GetNonNullableType(), liftedToNull: false); // construct result type ConstructorInfo ci = resultType.GetConstructor(new Type[] { resultType.GetNonNullableType() }); _ilg.Emit(OpCodes.Newobj, ci); _ilg.Emit(OpCodes.Stloc, locResult); _ilg.Emit(OpCodes.Br_S, labEnd); // if null then create a default one _ilg.MarkLabel(labIfNull); _ilg.Emit(OpCodes.Ldloca, locResult); _ilg.Emit(OpCodes.Initobj, resultType); _ilg.MarkLabel(labEnd); _ilg.Emit(OpCodes.Ldloc, locResult); //RELEASING locResult FreeLocal(locResult); } private void EmitLiftedBooleanAnd() { Type type = typeof(bool?); Label labComputeRight = _ilg.DefineLabel(); Label labReturnFalse = _ilg.DefineLabel(); Label labReturnNull = _ilg.DefineLabel(); Label labReturnValue = _ilg.DefineLabel(); Label labExit = _ilg.DefineLabel(); // store values (reverse order since they are already on the stack) LocalBuilder locLeft = GetLocal(type); LocalBuilder locRight = GetLocal(type); _ilg.Emit(OpCodes.Stloc, locRight); _ilg.Emit(OpCodes.Stloc, locLeft); // compute left _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitHasValue(type); _ilg.Emit(OpCodes.Brfalse, labComputeRight); _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitGetValueOrDefault(type); _ilg.Emit(OpCodes.Brfalse, labReturnFalse); // compute right _ilg.MarkLabel(labComputeRight); _ilg.Emit(OpCodes.Ldloca, locRight); _ilg.EmitHasValue(type); _ilg.Emit(OpCodes.Brfalse_S, labReturnNull); _ilg.Emit(OpCodes.Ldloca, locRight); //RELEASING locRight FreeLocal(locRight); _ilg.EmitGetValueOrDefault(type); _ilg.Emit(OpCodes.Brfalse_S, labReturnFalse); // check left for null again _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitHasValue(type); _ilg.Emit(OpCodes.Brfalse, labReturnNull); // return true _ilg.Emit(OpCodes.Ldc_I4_1); _ilg.Emit(OpCodes.Br_S, labReturnValue); // return false _ilg.MarkLabel(labReturnFalse); _ilg.Emit(OpCodes.Ldc_I4_0); _ilg.Emit(OpCodes.Br_S, labReturnValue); _ilg.MarkLabel(labReturnValue); ConstructorInfo ci = type.GetConstructor(ArrayOfType_Bool); _ilg.Emit(OpCodes.Newobj, ci); _ilg.Emit(OpCodes.Stloc, locLeft); _ilg.Emit(OpCodes.Br, labExit); // return null _ilg.MarkLabel(labReturnNull); _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.Emit(OpCodes.Initobj, type); _ilg.MarkLabel(labExit); _ilg.Emit(OpCodes.Ldloc, locLeft); //RELEASING locLeft FreeLocal(locLeft); } private void EmitLiftedBooleanOr() { Type type = typeof(bool?); Label labComputeRight = _ilg.DefineLabel(); Label labReturnTrue = _ilg.DefineLabel(); Label labReturnNull = _ilg.DefineLabel(); Label labReturnValue = _ilg.DefineLabel(); Label labExit = _ilg.DefineLabel(); // store values (reverse order since they are already on the stack) LocalBuilder locLeft = GetLocal(type); LocalBuilder locRight = GetLocal(type); _ilg.Emit(OpCodes.Stloc, locRight); _ilg.Emit(OpCodes.Stloc, locLeft); // compute left _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitHasValue(type); _ilg.Emit(OpCodes.Brfalse, labComputeRight); _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitGetValueOrDefault(type); _ilg.Emit(OpCodes.Brtrue, labReturnTrue); // compute right _ilg.MarkLabel(labComputeRight); _ilg.Emit(OpCodes.Ldloca, locRight); _ilg.EmitHasValue(type); _ilg.Emit(OpCodes.Brfalse_S, labReturnNull); _ilg.Emit(OpCodes.Ldloca, locRight); //RELEASING locRight FreeLocal(locRight); _ilg.EmitGetValueOrDefault(type); _ilg.Emit(OpCodes.Brtrue_S, labReturnTrue); // check left for null again _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.EmitHasValue(type); _ilg.Emit(OpCodes.Brfalse, labReturnNull); // return false _ilg.Emit(OpCodes.Ldc_I4_0); _ilg.Emit(OpCodes.Br_S, labReturnValue); // return true _ilg.MarkLabel(labReturnTrue); _ilg.Emit(OpCodes.Ldc_I4_1); _ilg.Emit(OpCodes.Br_S, labReturnValue); _ilg.MarkLabel(labReturnValue); ConstructorInfo ci = type.GetConstructor(ArrayOfType_Bool); _ilg.Emit(OpCodes.Newobj, ci); _ilg.Emit(OpCodes.Stloc, locLeft); _ilg.Emit(OpCodes.Br, labExit); // return null _ilg.MarkLabel(labReturnNull); _ilg.Emit(OpCodes.Ldloca, locLeft); _ilg.Emit(OpCodes.Initobj, type); _ilg.MarkLabel(labExit); _ilg.Emit(OpCodes.Ldloc, locLeft); //RELEASING locLeft FreeLocal(locLeft); } } }
using System; using System.Collections; using System.IO; using Org.BouncyCastle.Asn1.X509; using Org.BouncyCastle.Crypto.Agreement; using Org.BouncyCastle.Crypto.Generators; using Org.BouncyCastle.Crypto.Parameters; using Org.BouncyCastle.Math; using Org.BouncyCastle.Security; using Org.BouncyCastle.Utilities; namespace Org.BouncyCastle.Crypto.Tls { /** * ECDH key exchange (see RFC 4492) */ internal class TlsECDHKeyExchange : TlsKeyExchange { protected TlsClientContext context; protected KeyExchangeAlgorithm keyExchange; protected TlsSigner tlsSigner; protected AsymmetricKeyParameter serverPublicKey; protected ECPublicKeyParameters ecAgreeServerPublicKey; protected TlsAgreementCredentials agreementCredentials; protected ECPrivateKeyParameters ecAgreeClientPrivateKey = null; internal TlsECDHKeyExchange(TlsClientContext context, KeyExchangeAlgorithm keyExchange) { switch (keyExchange) { case KeyExchangeAlgorithm.ECDHE_RSA: this.tlsSigner = new TlsRsaSigner(); break; case KeyExchangeAlgorithm.ECDHE_ECDSA: this.tlsSigner = new TlsECDsaSigner(); break; case KeyExchangeAlgorithm.ECDH_RSA: case KeyExchangeAlgorithm.ECDH_ECDSA: this.tlsSigner = null; break; default: throw new ArgumentException("unsupported key exchange algorithm", "keyExchange"); } this.context = context; this.keyExchange = keyExchange; } public virtual void SkipServerCertificate() { throw new TlsFatalAlert(AlertDescription.unexpected_message); } public virtual void ProcessServerCertificate(Certificate serverCertificate) { X509CertificateStructure x509Cert = serverCertificate.certs[0]; SubjectPublicKeyInfo keyInfo = x509Cert.SubjectPublicKeyInfo; try { this.serverPublicKey = PublicKeyFactory.CreateKey(keyInfo); } catch (Exception) { throw new TlsFatalAlert(AlertDescription.unsupported_certificate); } if (tlsSigner == null) { try { this.ecAgreeServerPublicKey = ValidateECPublicKey((ECPublicKeyParameters)this.serverPublicKey); } catch (InvalidCastException) { throw new TlsFatalAlert(AlertDescription.certificate_unknown); } TlsUtilities.ValidateKeyUsage(x509Cert, KeyUsage.KeyAgreement); } else { if (!tlsSigner.IsValidPublicKey(this.serverPublicKey)) { throw new TlsFatalAlert(AlertDescription.certificate_unknown); } TlsUtilities.ValidateKeyUsage(x509Cert, KeyUsage.DigitalSignature); } // TODO /* * Perform various checks per RFC2246 7.4.2: "Unless otherwise specified, the * signing algorithm for the certificate must be the same as the algorithm for the * certificate key." */ } public virtual void SkipServerKeyExchange() { // do nothing } public virtual void ProcessServerKeyExchange(Stream input) { throw new TlsFatalAlert(AlertDescription.unexpected_message); } public virtual void ValidateCertificateRequest(CertificateRequest certificateRequest) { /* * RFC 4492 3. [...] The ECDSA_fixed_ECDH and RSA_fixed_ECDH mechanisms are usable * with ECDH_ECDSA and ECDH_RSA. Their use with ECDHE_ECDSA and ECDHE_RSA is * prohibited because the use of a long-term ECDH client key would jeopardize the * forward secrecy property of these algorithms. */ ClientCertificateType[] types = certificateRequest.CertificateTypes; foreach (ClientCertificateType type in types) { switch (type) { case ClientCertificateType.rsa_sign: case ClientCertificateType.dss_sign: case ClientCertificateType.ecdsa_sign: case ClientCertificateType.rsa_fixed_ecdh: case ClientCertificateType.ecdsa_fixed_ecdh: break; default: throw new TlsFatalAlert(AlertDescription.illegal_parameter); } } } public virtual void SkipClientCredentials() { this.agreementCredentials = null; } public virtual void ProcessClientCredentials(TlsCredentials clientCredentials) { if (clientCredentials is TlsAgreementCredentials) { // TODO Validate client cert has matching parameters (see 'AreOnSameCurve')? this.agreementCredentials = (TlsAgreementCredentials)clientCredentials; } else if (clientCredentials is TlsSignerCredentials) { // OK } else { throw new TlsFatalAlert(AlertDescription.internal_error); } } public virtual void GenerateClientKeyExchange(Stream output) { if (agreementCredentials != null) { TlsUtilities.WriteUint24(0, output); } else { GenerateEphemeralClientKeyExchange(ecAgreeServerPublicKey.Parameters, output); } } public virtual byte[] GeneratePremasterSecret() { if (agreementCredentials != null) { return agreementCredentials.GenerateAgreement(ecAgreeServerPublicKey); } return CalculateECDHBasicAgreement(ecAgreeServerPublicKey, ecAgreeClientPrivateKey); } protected virtual bool AreOnSameCurve(ECDomainParameters a, ECDomainParameters b) { // TODO Move to ECDomainParameters.Equals() or other utility method? return a.Curve.Equals(b.Curve) && a.G.Equals(b.G) && a.N.Equals(b.N) && a.H.Equals(b.H); } protected virtual byte[] ExternalizeKey(ECPublicKeyParameters keyParameters) { // TODO Add support for compressed encoding and SPF extension /* * RFC 4492 5.7. ...an elliptic curve point in uncompressed or compressed format. * Here, the format MUST conform to what the server has requested through a * Supported Point Formats Extension if this extension was used, and MUST be * uncompressed if this extension was not used. */ return keyParameters.Q.GetEncoded(); } protected virtual AsymmetricCipherKeyPair GenerateECKeyPair(ECDomainParameters ecParams) { ECKeyPairGenerator keyPairGenerator = new ECKeyPairGenerator(); ECKeyGenerationParameters keyGenerationParameters = new ECKeyGenerationParameters(ecParams, context.SecureRandom); keyPairGenerator.Init(keyGenerationParameters); return keyPairGenerator.GenerateKeyPair(); } protected virtual void GenerateEphemeralClientKeyExchange(ECDomainParameters ecParams, Stream output) { AsymmetricCipherKeyPair ecAgreeClientKeyPair = GenerateECKeyPair(ecParams); this.ecAgreeClientPrivateKey = (ECPrivateKeyParameters)ecAgreeClientKeyPair.Private; byte[] keData = ExternalizeKey((ECPublicKeyParameters)ecAgreeClientKeyPair.Public); TlsUtilities.WriteUint24(keData.Length + 1, output); TlsUtilities.WriteOpaque8(keData, output); } protected virtual byte[] CalculateECDHBasicAgreement(ECPublicKeyParameters publicKey, ECPrivateKeyParameters privateKey) { ECDHBasicAgreement basicAgreement = new ECDHBasicAgreement(); basicAgreement.Init(privateKey); BigInteger agreement = basicAgreement.CalculateAgreement(publicKey); return BigIntegers.AsUnsignedByteArray(agreement); } protected virtual ECPublicKeyParameters ValidateECPublicKey(ECPublicKeyParameters key) { // TODO Check RFC 4492 for validation return key; } } }
/******************************************************************** The Multiverse Platform is made available under the MIT License. Copyright (c) 2012 The Multiverse Foundation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *********************************************************************/ #region license /* DirectShowLib - Provide access to DirectShow interfaces via .NET Copyright (C) 2006 http://sourceforge.net/projects/directshownet/ This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #endregion using System; using System.Drawing; using System.Runtime.InteropServices; namespace DirectShowLib { #region Declarations /// <summary> /// From DDCOLOR_* defines /// </summary> [Flags] public enum DDColor { None = 0x00000000, Brightness = 0x00000001, Contrast = 0x00000002, Hue = 0x00000004, Saturation = 0x00000008, Sharpness = 0x00000010, Gamma = 0x00000020, ColorEnable = 0x00000040 } /// <summary> /// From DDCOLORCONTROL /// </summary> [StructLayout(LayoutKind.Sequential)] public class DDColorControl { public int dwSize; public DDColor dwFlags; public int lBrightness; public int lContrast; public int lHue; public int lSaturation; public int lSharpness; public int lGamma; public int lColorEnable; public int dwReserved1; } public enum AspectRatioMode { Stretched, LetterBox, Crop, StretchedAsPrimary } #endregion #region Interfaces [ComImport, Guid("EBF47182-8764-11d1-9E69-00C04FD7C15B"), InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] public interface IMixerPinConfig2 : IMixerPinConfig { #region IMixerPinConfig Methods [PreserveSig] new int SetRelativePosition( int dwLeft, int dwTop, int dwRight, int dwBottom); [PreserveSig] new int GetRelativePosition( out int pdwLeft, out int pdwTop, out int pdwRight, out int pdwBottom ); [PreserveSig] new int SetZOrder( int dwZOrder ); [PreserveSig] new int GetZOrder( out int pdwZOrder ); [PreserveSig] new int SetColorKey( [MarshalAs(UnmanagedType.LPStruct)]ColorKey pColorKey ); [PreserveSig] new int GetColorKey( [Out, MarshalAs(UnmanagedType.LPStruct)] ColorKey pColorKey, out int pColor ); [PreserveSig] new int SetBlendingParameter( int dwBlendingParameter ); [PreserveSig] new int GetBlendingParameter( out int pdwBlendingParameter ); [PreserveSig] new int SetAspectRatioMode( AspectRatioMode amAspectRatioMode ); [PreserveSig] new int GetAspectRatioMode( out AspectRatioMode pamAspectRatioMode ); [PreserveSig] new int SetStreamTransparent( [In, MarshalAs(UnmanagedType.Bool)] bool bStreamTransparent ); [PreserveSig] new int GetStreamTransparent( [Out, MarshalAs(UnmanagedType.Bool)] out bool pbStreamTransparent ); #endregion [PreserveSig] int SetOverlaySurfaceColorControls( DDColorControl pColorControl ); [PreserveSig] int GetOverlaySurfaceColorControls( DDColorControl pColorControl ); } [ComImport, Guid("593CDDE1-0759-11d1-9E69-00C04FD7C15B"), InterfaceType(ComInterfaceType.InterfaceIsIUnknown)] public interface IMixerPinConfig { [PreserveSig] int SetRelativePosition( int dwLeft, int dwTop, int dwRight, int dwBottom); [PreserveSig] int GetRelativePosition( out int pdwLeft, out int pdwTop, out int pdwRight, out int pdwBottom ); [PreserveSig] int SetZOrder( int dwZOrder ); [PreserveSig] int GetZOrder( out int pdwZOrder ); [PreserveSig] int SetColorKey( [MarshalAs(UnmanagedType.LPStruct)] ColorKey pColorKey ); [PreserveSig] int GetColorKey( [Out, MarshalAs(UnmanagedType.LPStruct)] ColorKey pColorKey, out int pColor ); [PreserveSig] int SetBlendingParameter( int dwBlendingParameter ); [PreserveSig] int GetBlendingParameter( out int pdwBlendingParameter ); [PreserveSig] int SetAspectRatioMode( AspectRatioMode amAspectRatioMode ); [PreserveSig] int GetAspectRatioMode( out AspectRatioMode pamAspectRatioMode ); [PreserveSig] int SetStreamTransparent( [In, MarshalAs(UnmanagedType.Bool)] bool bStreamTransparent ); [PreserveSig] int GetStreamTransparent( [Out, MarshalAs(UnmanagedType.Bool)] out bool pbStreamTransparent ); } #endregion }
#if !DISABLE_PLAYFABENTITY_API using System; using System.Collections.Generic; using PlayFab.SharedModels; namespace PlayFab.ProfilesModels { public enum EffectType { Allow, Deny } /// <summary> /// An entity object and its associated meta data. /// </summary> [Serializable] public class EntityDataObject { /// <summary> /// Un-escaped JSON object, if DataAsObject is true. /// </summary> public object DataObject; /// <summary> /// Escaped string JSON body of the object, if DataAsObject is default or false. /// </summary> public string EscapedDataObject; /// <summary> /// Name of this object. /// </summary> public string ObjectName; } /// <summary> /// Combined entity type and ID structure which uniquely identifies a single entity. /// </summary> [Serializable] public class EntityKey { /// <summary> /// Unique ID of the entity. /// </summary> public string Id; /// <summary> /// Entity type. See https://api.playfab.com/docs/tutorials/entities/entitytypes /// </summary> public string Type; } [Serializable] public class EntityLineage { /// <summary> /// The Character Id of the associated entity. /// </summary> public string CharacterId; /// <summary> /// The Group Id of the associated entity. /// </summary> public string GroupId; /// <summary> /// The Master Player Account Id of the associated entity. /// </summary> public string MasterPlayerAccountId; /// <summary> /// The Namespace Id of the associated entity. /// </summary> public string NamespaceId; /// <summary> /// The Title Id of the associated entity. /// </summary> public string TitleId; /// <summary> /// The Title Player Account Id of the associated entity. /// </summary> public string TitlePlayerAccountId; } [Serializable] public class EntityPermissionStatement { /// <summary> /// The action this statement effects. May be 'Read', 'Write' or '*' for both read and write. /// </summary> public string Action; /// <summary> /// A comment about the statement. Intended solely for bookkeeping and debugging. /// </summary> public string Comment; /// <summary> /// Additional conditions to be applied for entity resources. /// </summary> public object Condition; /// <summary> /// The effect this statement will have. It may be either Allow or Deny /// </summary> public EffectType Effect; /// <summary> /// The principal this statement will effect. /// </summary> public object Principal; /// <summary> /// The resource this statements effects. Similar to 'pfrn:data--title![Title ID]/Profile/*' /// </summary> public string Resource; } [Serializable] public class EntityProfileBody { /// <summary> /// The creation time of this profile in UTC. /// </summary> public DateTime Created; /// <summary> /// The display name of the entity. This field may serve different purposes for different entity types. i.e.: for a title /// player account it could represent the display name of the player, whereas on a character it could be character's name. /// </summary> public string DisplayName; /// <summary> /// The entity id and type. /// </summary> public EntityKey Entity; /// <summary> /// The chain of responsibility for this entity. Use Lineage. /// </summary> public string EntityChain; /// <summary> /// The files on this profile. /// </summary> public Dictionary<string,EntityProfileFileMetadata> Files; /// <summary> /// The language on this profile. /// </summary> public string Language; /// <summary> /// The lineage of this profile. /// </summary> public EntityLineage Lineage; /// <summary> /// The objects on this profile. /// </summary> public Dictionary<string,EntityDataObject> Objects; /// <summary> /// The permissions that govern access to this entity profile and its properties. Only includes permissions set on this /// profile, not global statements from titles and namespaces. /// </summary> public List<EntityPermissionStatement> Permissions; /// <summary> /// The version number of the profile in persistent storage at the time of the read. Used for optional optimistic /// concurrency during update. /// </summary> public int VersionNumber; } /// <summary> /// An entity file's meta data. To get a download URL call File/GetFiles API. /// </summary> [Serializable] public class EntityProfileFileMetadata { /// <summary> /// Checksum value for the file /// </summary> public string Checksum; /// <summary> /// Name of the file /// </summary> public string FileName; /// <summary> /// Last UTC time the file was modified /// </summary> public DateTime LastModified; /// <summary> /// Storage service's reported byte count /// </summary> public int Size; } /// <summary> /// Given an entity type and entity identifier will retrieve the profile from the entity store. If the profile being /// retrieved is the caller's, then the read operation is consistent, if not it is an inconsistent read. An inconsistent /// read means that we do not guarantee all committed writes have occurred before reading the profile, allowing for a stale /// read. If consistency is important the Version Number on the result can be used to compare which version of the profile /// any reader has. /// </summary> [Serializable] public class GetEntityProfileRequest : PlayFabRequestCommon { /// <summary> /// Determines whether the objects will be returned as an escaped JSON string or as a un-escaped JSON object. Default is /// JSON string. /// </summary> public bool? DataAsObject; /// <summary> /// The entity to perform this action on. /// </summary> public EntityKey Entity; } [Serializable] public class GetEntityProfileResponse : PlayFabResultCommon { /// <summary> /// Entity profile /// </summary> public EntityProfileBody Profile; } /// <summary> /// Given a set of entity types and entity identifiers will retrieve all readable profiles properties for the caller. /// Profiles that the caller is not allowed to read will silently not be included in the results. /// </summary> [Serializable] public class GetEntityProfilesRequest : PlayFabRequestCommon { /// <summary> /// Determines whether the objects will be returned as an escaped JSON string or as a un-escaped JSON object. Default is /// JSON string. /// </summary> public bool? DataAsObject; /// <summary> /// Entity keys of the profiles to load. Must be between 1 and 25 /// </summary> public List<EntityKey> Entities; } [Serializable] public class GetEntityProfilesResponse : PlayFabResultCommon { /// <summary> /// Entity profiles /// </summary> public List<EntityProfileBody> Profiles; } /// <summary> /// Retrieves the title access policy that is used before the profile's policy is inspected during a request. If never /// customized this will return the default starter policy built by PlayFab. /// </summary> [Serializable] public class GetGlobalPolicyRequest : PlayFabRequestCommon { } [Serializable] public class GetGlobalPolicyResponse : PlayFabResultCommon { /// <summary> /// The permissions that govern access to all entities under this title or namespace. /// </summary> public List<EntityPermissionStatement> Permissions; } public enum OperationTypes { Created, Updated, Deleted, None } /// <summary> /// This will set the access policy statements on the given entity profile. This is not additive, any existing statements /// will be replaced with the statements in this request. /// </summary> [Serializable] public class SetEntityProfilePolicyRequest : PlayFabRequestCommon { /// <summary> /// The entity to perform this action on. /// </summary> public EntityKey Entity; /// <summary> /// The statements to include in the access policy. /// </summary> public List<EntityPermissionStatement> Statements; } [Serializable] public class SetEntityProfilePolicyResponse : PlayFabResultCommon { /// <summary> /// The permissions that govern access to this entity profile and its properties. Only includes permissions set on this /// profile, not global statements from titles and namespaces. /// </summary> public List<EntityPermissionStatement> Permissions; } /// <summary> /// Updates the title access policy that is used before the profile's policy is inspected during a request. Policies are /// compiled and cached for several minutes so an update here may not be reflected in behavior for a short time. /// </summary> [Serializable] public class SetGlobalPolicyRequest : PlayFabRequestCommon { /// <summary> /// The permissions that govern access to all entities under this title or namespace. /// </summary> public List<EntityPermissionStatement> Permissions; } [Serializable] public class SetGlobalPolicyResponse : PlayFabResultCommon { } /// <summary> /// Given an entity profile, will update its language to the one passed in if the profile's version is at least the one /// passed in. /// </summary> [Serializable] public class SetProfileLanguageRequest : PlayFabRequestCommon { /// <summary> /// The entity to perform this action on. /// </summary> public EntityKey Entity; /// <summary> /// The expected version of a profile to perform this update on /// </summary> public int ExpectedVersion; /// <summary> /// The language to set on the given entity. Deletes the profile's language if passed in a null string. /// </summary> public string Language; } [Serializable] public class SetProfileLanguageResponse : PlayFabResultCommon { /// <summary> /// The type of operation that occured on the profile's language /// </summary> public OperationTypes? OperationResult; /// <summary> /// The updated version of the profile after the language update /// </summary> public int? VersionNumber; } } #endif
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Net.Http; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.AspNetCore.Http; using Microsoft.AspNetCore.Http.Features; using Microsoft.AspNetCore.Testing; using Xunit; namespace Microsoft.AspNetCore.Server.HttpSys { public class ResponseBodyTests { [ConditionalFact] public async Task ResponseBody_StartAsync_LocksHeadersAndTriggersOnStarting() { using (Utilities.CreateHttpServer(out var address, async httpContext => { var startingTcs = new TaskCompletionSource<int>(TaskCreationOptions.RunContinuationsAsynchronously); httpContext.Response.OnStarting(() => { startingTcs.SetResult(0); return Task.CompletedTask; }); await httpContext.Response.StartAsync(); Assert.True(httpContext.Response.HasStarted); Assert.True(httpContext.Response.Headers.IsReadOnly); await startingTcs.Task.DefaultTimeout(); await httpContext.Response.WriteAsync("Hello World"); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); IEnumerable<string> ignored; Assert.False(response.Content.Headers.TryGetValues("content-length", out ignored), "Content-Length"); Assert.True(response.Headers.TransferEncodingChunked.HasValue, "Chunked"); Assert.Equal("Hello World", await response.Content.ReadAsStringAsync()); } } [ConditionalFact] public async Task ResponseBody_CompleteAsync_TriggersOnStartingAndLocksHeaders() { var responseReceived = new TaskCompletionSource<int>(TaskCreationOptions.RunContinuationsAsynchronously); using (Utilities.CreateHttpServer(out var address, async httpContext => { var startingTcs = new TaskCompletionSource<int>(TaskCreationOptions.RunContinuationsAsynchronously); httpContext.Response.OnStarting(() => { startingTcs.SetResult(0); return Task.CompletedTask; }); await httpContext.Response.CompleteAsync(); Assert.True(httpContext.Response.HasStarted); Assert.True(httpContext.Response.Headers.IsReadOnly); await startingTcs.Task.DefaultTimeout(); await responseReceived.Task.DefaultTimeout(); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); Assert.Equal(0, response.Content.Headers.ContentLength); responseReceived.SetResult(0); } } [ConditionalFact] public async Task ResponseBody_CompleteAsync_FlushesThePipe() { var responseReceived = new TaskCompletionSource<int>(TaskCreationOptions.RunContinuationsAsynchronously); using (Utilities.CreateHttpServer(out var address, async httpContext => { var writer = httpContext.Response.BodyWriter; var memory = writer.GetMemory(); writer.Advance(memory.Length); await httpContext.Response.CompleteAsync(); await responseReceived.Task.DefaultTimeout(); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); Assert.True(0 < (await response.Content.ReadAsByteArrayAsync()).Length); responseReceived.SetResult(0); } } [ConditionalFact] public async Task ResponseBody_PipeAdapter_AutomaticallyFlushed() { using (Utilities.CreateHttpServer(out var address, httpContext => { var writer = httpContext.Response.BodyWriter; var memory = writer.GetMemory(); writer.Advance(memory.Length); return Task.CompletedTask; })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); Assert.True(0 < (await response.Content.ReadAsByteArrayAsync()).Length); } } [ConditionalFact] public async Task ResponseBody_WriteNoHeaders_SetsChunked() { string address; using (Utilities.CreateHttpServer(out address, httpContext => { httpContext.Features.Get<IHttpBodyControlFeature>().AllowSynchronousIO = true; httpContext.Response.Body.Write(new byte[10], 0, 10); return httpContext.Response.Body.WriteAsync(new byte[10], 0, 10); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); IEnumerable<string> ignored; Assert.False(response.Content.Headers.TryGetValues("content-length", out ignored), "Content-Length"); Assert.True(response.Headers.TransferEncodingChunked.HasValue, "Chunked"); Assert.Equal(new byte[20], await response.Content.ReadAsByteArrayAsync()); } } [ConditionalFact] public async Task ResponseBody_WriteNoHeadersAndFlush_DefaultsToChunked() { string address; using (Utilities.CreateHttpServer(out address, async httpContext => { httpContext.Features.Get<IHttpBodyControlFeature>().AllowSynchronousIO = true; httpContext.Response.Body.Write(new byte[10], 0, 10); await httpContext.Response.Body.WriteAsync(new byte[10], 0, 10); await httpContext.Response.Body.FlushAsync(); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); IEnumerable<string> ignored; Assert.False(response.Content.Headers.TryGetValues("content-length", out ignored), "Content-Length"); Assert.True(response.Headers.TransferEncodingChunked.Value, "Chunked"); Assert.Equal(new byte[20], await response.Content.ReadAsByteArrayAsync()); } } [ConditionalFact] public async Task ResponseBody_WriteChunked_ManuallyChunked() { string address; using (Utilities.CreateHttpServer(out address, async httpContext => { httpContext.Response.Headers["transfeR-Encoding"] = "CHunked"; Stream stream = httpContext.Response.Body; var responseBytes = Encoding.ASCII.GetBytes("10\r\nManually Chunked\r\n0\r\n\r\n"); await stream.WriteAsync(responseBytes, 0, responseBytes.Length); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); IEnumerable<string> ignored; Assert.False(response.Content.Headers.TryGetValues("content-length", out ignored), "Content-Length"); Assert.True(response.Headers.TransferEncodingChunked.Value, "Chunked"); Assert.Equal("Manually Chunked", await response.Content.ReadAsStringAsync()); } } [ConditionalFact] public async Task ResponseBody_WriteContentLength_PassedThrough() { string address; using (Utilities.CreateHttpServer(out address, async httpContext => { httpContext.Features.Get<IHttpBodyControlFeature>().AllowSynchronousIO = true; httpContext.Response.Headers["Content-lenGth"] = " 30 "; Stream stream = httpContext.Response.Body; stream.EndWrite(stream.BeginWrite(new byte[10], 0, 10, null, null)); stream.Write(new byte[10], 0, 10); await stream.WriteAsync(new byte[10], 0, 10); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); IEnumerable<string> contentLength; Assert.True(response.Content.Headers.TryGetValues("content-length", out contentLength), "Content-Length"); Assert.Equal("30", contentLength.First()); Assert.Null(response.Headers.TransferEncodingChunked); Assert.Equal(new byte[30], await response.Content.ReadAsByteArrayAsync()); } } [ConditionalFact] public async Task ResponseBody_WriteContentLengthNoneWritten_Throws() { string address; using (Utilities.CreateHttpServer(out address, httpContext => { httpContext.Response.Headers["Content-lenGth"] = " 20 "; return Task.FromResult(0); })) { await Assert.ThrowsAsync<HttpRequestException>(() => SendRequestAsync(address)); } } [ConditionalFact] public async Task ResponseBody_WriteContentLengthNotEnoughWritten_Throws() { string address; using (Utilities.CreateHttpServer(out address, httpContext => { httpContext.Response.Headers["Content-lenGth"] = " 20 "; return httpContext.Response.Body.WriteAsync(new byte[5], 0, 5); })) { await Assert.ThrowsAsync<HttpRequestException>(async () => await SendRequestAsync(address)); } } [ConditionalFact] public async Task ResponseBody_WriteContentLengthTooMuchWritten_Throws() { var completed = false; string address; using (Utilities.CreateHttpServer(out address, async httpContext => { httpContext.Response.Headers["Content-lenGth"] = " 10 "; await httpContext.Response.Body.WriteAsync(new byte[5], 0, 5); await Assert.ThrowsAsync<InvalidOperationException>(() => httpContext.Response.Body.WriteAsync(new byte[6], 0, 6)); completed = true; })) { await Assert.ThrowsAsync<HttpRequestException>(() => SendRequestAsync(address)); Assert.True(completed); } } [ConditionalFact] public async Task ResponseBody_WriteContentLengthExtraWritten_Throws() { var requestThrew = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously); using (Utilities.CreateHttpServer(out var address, httpContext => { try { httpContext.Features.Get<IHttpBodyControlFeature>().AllowSynchronousIO = true; httpContext.Response.Headers["Content-lenGth"] = " 10 "; httpContext.Response.Body.Write(new byte[10], 0, 10); httpContext.Response.Body.Write(new byte[9], 0, 9); requestThrew.SetResult(false); } catch (Exception) { requestThrew.SetResult(true); } return Task.FromResult(0); })) { // The full response is received. HttpResponseMessage response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); IEnumerable<string> contentLength; Assert.True(response.Content.Headers.TryGetValues("content-length", out contentLength), "Content-Length"); Assert.Equal("10", contentLength.First()); Assert.Null(response.Headers.TransferEncodingChunked); Assert.Equal(new byte[10], await response.Content.ReadAsByteArrayAsync()); Assert.True(await requestThrew.Task.WaitAsync(TimeSpan.FromSeconds(10))); } } [ConditionalFact] public async Task ResponseBody_Write_TriggersOnStarting() { var onStartingCalled = false; string address; using (Utilities.CreateHttpServer(out address, httpContext => { httpContext.Features.Get<IHttpBodyControlFeature>().AllowSynchronousIO = true; httpContext.Response.OnStarting(state => { onStartingCalled = true; Assert.Same(state, httpContext); return Task.FromResult(0); }, httpContext); httpContext.Response.Body.Write(new byte[10], 0, 10); return Task.FromResult(0); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); Assert.True(onStartingCalled); IEnumerable<string> ignored; Assert.False(response.Content.Headers.TryGetValues("content-length", out ignored), "Content-Length"); Assert.True(response.Headers.TransferEncodingChunked.HasValue, "Chunked"); Assert.Equal(new byte[10], await response.Content.ReadAsByteArrayAsync()); } } [ConditionalFact] public async Task ResponseBody_BeginWrite_TriggersOnStarting() { var onStartingCalled = false; string address; using (Utilities.CreateHttpServer(out address, httpContext => { httpContext.Response.OnStarting(state => { onStartingCalled = true; Assert.Same(state, httpContext); return Task.FromResult(0); }, httpContext); httpContext.Response.Body.EndWrite(httpContext.Response.Body.BeginWrite(new byte[10], 0, 10, null, null)); return Task.FromResult(0); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); Assert.True(onStartingCalled); IEnumerable<string> ignored; Assert.False(response.Content.Headers.TryGetValues("content-length", out ignored), "Content-Length"); Assert.True(response.Headers.TransferEncodingChunked.HasValue, "Chunked"); Assert.Equal(new byte[10], await response.Content.ReadAsByteArrayAsync()); } } [ConditionalFact] public async Task ResponseBody_WriteAsync_TriggersOnStarting() { var onStartingCalled = false; string address; using (Utilities.CreateHttpServer(out address, httpContext => { httpContext.Response.OnStarting(state => { onStartingCalled = true; Assert.Same(state, httpContext); return Task.FromResult(0); }, httpContext); return httpContext.Response.Body.WriteAsync(new byte[10], 0, 10); })) { var response = await SendRequestAsync(address); Assert.Equal(200, (int)response.StatusCode); Assert.Equal(new Version(1, 1), response.Version); Assert.True(onStartingCalled); IEnumerable<string> ignored; Assert.False(response.Content.Headers.TryGetValues("content-length", out ignored), "Content-Length"); Assert.True(response.Headers.TransferEncodingChunked.HasValue, "Chunked"); Assert.Equal(new byte[10], await response.Content.ReadAsByteArrayAsync()); } } private async Task<HttpResponseMessage> SendRequestAsync(string uri) { using (HttpClient client = new HttpClient()) { return await client.GetAsync(uri); } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Runtime.Serialization; using NUnit.Framework; using ServiceStack.Text.Tests.Support; namespace ServiceStack.Text.Tests { [TestFixture] public class DataContractTests : TestBase { [Test] public void Only_Serializes_DataMember_fields_for_DataContracts() { var dto = new ResponseStatus { ErrorCode = "ErrorCode", Message = "Message", StackTrace = "StackTrace", Errors = new List<ResponseError>(), }; Serialize(dto); } public class RequestWithIgnoredMembers { public string Name { get; set; } [IgnoreDataMember] public string Comment { get; set; } } private void DoIgnoreMemberTest(Func<RequestWithIgnoredMembers, string> serialize, Func<string, RequestWithIgnoredMembers> deserialize) { var dto = new RequestWithIgnoredMembers() { Name = "John", Comment = "Some Comment" }; var clone = deserialize(serialize(dto)); Assert.AreEqual(dto.Name, clone.Name); Assert.IsNull(clone.Comment); } [Test] public void JsonSerializerHonorsIgnoreMemberAttribute() { DoIgnoreMemberTest(r => JsonSerializer.SerializeToString(r), s => JsonSerializer.DeserializeFromString<RequestWithIgnoredMembers>(s)); } [Test] public void JsvSerializerHonorsIgnoreMemberAttribute() { DoIgnoreMemberTest(r => TypeSerializer.SerializeToString(r), s => TypeSerializer.DeserializeFromString<RequestWithIgnoredMembers>(s)); } [Test] public void XmlSerializerHonorsIgnoreMemberAttribute() { DoIgnoreMemberTest(r => XmlSerializer.SerializeToString(r), s => XmlSerializer.DeserializeFromString<RequestWithIgnoredMembers>(s)); } [DataContract] public class EmptyDataContract { } [Test] public void Can_Serialize_Empty_DataContract() { var dto = new EmptyDataContract(); Serialize(dto); } [CollectionDataContract] public class MyCollection : ICollection<MyType> { List<MyType> _internal = new List<MyType>{ new MyType()}; public IEnumerator<MyType> GetEnumerator() { return _internal.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return _internal.GetEnumerator(); } public void Add(MyType item) { _internal.Add(item); } public void Clear() { _internal.Clear(); } public bool Contains(MyType item) { return _internal.Contains(item); } public void CopyTo(MyType[] array, int arrayIndex) { _internal.CopyTo(array, arrayIndex); } public bool Remove(MyType item) { return _internal.Remove(item); } public int Count { get { return _internal.Count; } } public bool IsReadOnly { get { return false; } } } [DataContract] public class MyType { } [Test] public void Can_Serialize_MyCollection() { var dto = new MyCollection(); Serialize(dto); } [DataContract] public class PersonRecord { public int Id { get; set; } public string Name { get; set; } } [Test] //https://github.com/ServiceStack/ServiceStack.Text/issues/46 public void Replicate_serialization_bug() { var p = new PersonRecord { Id = 27, Name = "John" }; // Fails at this point, with a "Cannot access a closed Stream." exception. // Am I doing something wrong? string output = XmlSerializer.SerializeToString(p); Console.WriteLine(output); } [DataContract] public class ClassOne { [DataMember] public int Id { get; set; } [DataMember(Name = "listClassTwo")] public List<ClassTwo> List { get; set; } public ClassOne() { List = new List<ClassTwo>(); } } [DataContract] public class ClassTwo { [DataMember] public string Name { get; set; } } [Test] public void deserialize_from_string_with_the_dataMember_name() { const string jsonList = "{\"Id\":1,\"listClassTwo\":[{\"Name\":\"Name One\"},{\"Name\":\"Name Two\"}]}"; var classOne = JsonSerializer.DeserializeFromString<ClassOne>(jsonList); Assert.AreEqual(1, classOne.Id); Assert.AreEqual(2, classOne.List.Count); } [Test] public void serialize() { var classOne= new ClassOne { Id = 1, List = new List<ClassTwo> { new ClassTwo { Name = "Name One" }, new ClassTwo { Name = "Name Two" } } }; Console.WriteLine(JsonSerializer.SerializeToString(classOne)); } } }
using System.Collections.Generic; using System.Linq; using Semver; using UnityEngine; using UnityEngine.Experimental.UIElements; namespace UnityEditor.PackageManager.UI { #if !UNITY_2018_3_OR_NEWER internal class PackageDetailsFactory : UxmlFactory<PackageDetails> { protected override PackageDetails DoCreate(IUxmlAttributes bag, CreationContext cc) { return new PackageDetails(); } } #endif internal class PackageDetails : VisualElement { #if UNITY_2018_3_OR_NEWER internal new class UxmlFactory : UxmlFactory<PackageDetails> { } #endif private readonly VisualElement root; private Package package; private const string emptyDescriptionClass = "empty"; private List<VersionItem> VersionItems; internal PopupField<VersionItem> VersionPopup; private PackageInfo DisplayPackage; private PackageInfo SelectedPackage { get { return VersionPopup.value.Version != null ? VersionPopup.value.Version : null; } } internal enum PackageAction { Add, Remove, Update, Downgrade, Enable, Disable, UpToDate, Current, Local, Git, Embedded } private static readonly VersionItem EmptyVersion = new VersionItem {Version = null}; internal static readonly string[] PackageActionVerbs = { "Install", "Remove", "Update to", "Update to", "Enable", "Disable", "Up to date", "Current", "Local", "Git", "Embedded" }; internal static readonly string[] PackageActionInProgressVerbs = { "Installing", "Removing", "Updating to", "Updating to", "Enabling", "Disabling", "Up to date", "Current", "Local", "Git", "Embedded" }; public PackageDetails() { UIUtils.SetElementDisplay(this, false); root = Resources.GetTemplate("PackageDetails.uxml"); Add(root); foreach (var extension in PackageManagerExtensions.Extensions) CustomContainer.Add(extension.CreateExtensionUI()); root.StretchToParentSize(); SetUpdateVisibility(false); RemoveButton.visible = false; UpdateBuiltIn.visible = false; UpdateButton.clickable.clicked += UpdateClick; UpdateBuiltIn.clickable.clicked += UpdateClick; RemoveButton.clickable.clicked += RemoveClick; ViewDocButton.clickable.clicked += ViewDocClick; ViewChangelogButton.clickable.clicked += ViewChangelogClick; ViewLicenses.clickable.clicked += ViewLicensesClick; VersionItems = new List<VersionItem> {EmptyVersion}; VersionPopup = new PopupField<VersionItem>(VersionItems, 0); VersionPopup.SetLabelCallback(VersionSelectionSetLabel); VersionPopup.AddToClassList("popup"); VersionPopup.OnValueChanged(VersionSelectionChanged); if (VersionItems.Count == 1) VersionPopup.SetEnabled(false); UpdateDropdownContainer.Add(VersionPopup); VersionPopup.StretchToParentSize(); // Fix button on dark skin but overlapping edge pixel perfectly if (EditorGUIUtility.isProSkin) { VersionPopup.style.positionLeft = -1; UpdateDropdownContainer.style.sliceLeft = 4; } } private string VersionSelectionSetLabel(VersionItem item) { return item.Label; } private void VersionSelectionChanged(ChangeEvent<VersionItem> e) { RefreshAddButton(); } private void SetUpdateVisibility(bool value) { if (UpdateContainer != null) UIUtils.SetElementDisplay(UpdateContainer, value); } internal void SetDisplayPackage(PackageInfo packageInfo) { DisplayPackage = packageInfo; var detailVisible = true; Error error = null; if (package == null || DisplayPackage == null) { detailVisible = false; UIUtils.SetElementDisplay(DocumentationContainer, false); UIUtils.SetElementDisplay(CustomContainer, false); UIUtils.SetElementDisplay(UpdateBuiltIn, false); foreach (var extension in PackageManagerExtensions.Extensions) extension.OnPackageSelectionChange(null); } else { SetUpdateVisibility(true); UIUtils.SetElementDisplay(ViewDocButton, true); RemoveButton.visible = true; if (string.IsNullOrEmpty(DisplayPackage.Description)) { DetailDesc.text = "There is no description for this package."; DetailDesc.AddToClassList(emptyDescriptionClass); } else { DetailDesc.text = DisplayPackage.Description; DetailDesc.RemoveFromClassList(emptyDescriptionClass); } root.Q<Label>("detailTitle").text = DisplayPackage.DisplayName; DetailVersion.text = "Version " + DisplayPackage.VersionWithoutTag; if (DisplayPackage.IsInDevelopment || DisplayPackage.HasVersionTag(PackageTag.preview)) UIUtils.SetElementDisplay(GetTag(PackageTag.verified), false); else { var unityVersionParts = Application.unityVersion.Split('.'); var unityVersion = string.Format("{0}.{1}", unityVersionParts[0], unityVersionParts[1]); VerifyLabel.text = unityVersion + " verified"; UIUtils.SetElementDisplay(GetTag(PackageTag.verified), DisplayPackage.IsVerified); } UIUtils.SetElementDisplay(GetTag(PackageTag.inDevelopment), DisplayPackage.IsInDevelopment); UIUtils.SetElementDisplay(GetTag(PackageTag.local), DisplayPackage.IsLocal); UIUtils.SetElementDisplay(GetTag(PackageTag.preview), DisplayPackage.IsPreview); UIUtils.SetElementDisplay(DocumentationContainer, DisplayPackage.Origin != PackageSource.BuiltIn); UIUtils.SetElementDisplay(ChangelogContainer, DisplayPackage.HasChangelog(DisplayPackage)); root.Q<Label>("detailName").text = DisplayPackage.Name; root.Q<ScrollView>("detailView").scrollOffset = new Vector2(0, 0); DetailModuleReference.text = ""; var isBuiltIn = DisplayPackage.IsBuiltIn; if (isBuiltIn) DetailModuleReference.text = DisplayPackage.BuiltInDescription; DetailAuthor.text = ""; if (!string.IsNullOrEmpty(DisplayPackage.Author)) DetailAuthor.text = string.Format("Author: {0}", DisplayPackage.Author); UIUtils.SetElementDisplay(DetailDesc, !isBuiltIn); UIUtils.SetElementDisplay(DetailVersion, !isBuiltIn); UIUtils.SetElementDisplayNonEmpty(DetailModuleReference); UIUtils.SetElementDisplayNonEmpty(DetailAuthor); if (DisplayPackage.Errors.Count > 0) error = DisplayPackage.Errors.First(); RefreshAddButton(); RefreshRemoveButton(); UIUtils.SetElementDisplay(CustomContainer, true); package.AddSignal.OnOperation += OnAddOperation; package.RemoveSignal.OnOperation += OnRemoveOperation; foreach (var extension in PackageManagerExtensions.Extensions) extension.OnPackageSelectionChange(DisplayPackage.Info); } // Set visibility root.Q<VisualElement>("detail").visible = detailVisible; if (null == error) error = PackageCollection.Instance.GetPackageError(package); if (error != null) SetError(error); else DetailError.ClearError(); } private void ResetVersionItems(PackageInfo displayPackage) { VersionItems.Clear(); VersionPopup.SetEnabled(true); if (displayPackage == null) return; // // Get key versions -- Latest, Verified, LatestPatch, Current. var keyVersions = new List<PackageInfo>(); if (package.LatestRelease != null) keyVersions.Add(package.LatestRelease); if (package.Current != null) keyVersions.Add(package.Current); if (package.Verified != null && package.Verified != package.Current) keyVersions.Add(package.Verified); if (package.LatestPatch != null && package.IsAfterCurrentVersion(package.LatestPatch)) keyVersions.Add(package.LatestPatch); if (package.Current == null && package.LatestRelease == null && package.Latest != null) keyVersions.Add(package.Latest); if (Package.ShouldProposeLatestVersions && package.Latest != package.LatestRelease && package.Latest != null) keyVersions.Add(package.Latest); keyVersions.Add(package.LatestUpdate); // Make sure LatestUpdate is always in the list. foreach (var version in keyVersions.OrderBy(package => package.Version).Reverse()) { var item = new VersionItem {Version = version}; VersionItems.Add(item); if (version == package.LatestUpdate) VersionPopup.value = item; } // // Add all versions foreach (var version in package.Versions.Reverse()) { var item = new VersionItem {Version = version}; item.MenuName = "All Versions/"; VersionItems.Add(item); } if (VersionItems.Count == 0) { VersionItems.Add(EmptyVersion); VersionPopup.value = EmptyVersion; VersionPopup.SetEnabled(false); } } public void SetPackage(Package package) { if (this.package != null) { if (this.package.AddSignal.Operation != null) { this.package.AddSignal.Operation.OnOperationError -= OnAddOperationError; this.package.AddSignal.Operation.OnOperationSuccess -= OnAddOperationSuccess; } this.package.AddSignal.ResetEvents(); if (this.package.RemoveSignal.Operation != null) { this.package.RemoveSignal.Operation.OnOperationSuccess -= OnRemoveOperationSuccess; this.package.RemoveSignal.Operation.OnOperationError -= OnRemoveOperationError; } this.package.RemoveSignal.ResetEvents(); } UIUtils.SetElementDisplay(this, true); this.package = package; var displayPackage = package != null ? package.VersionToDisplay : null; ResetVersionItems(displayPackage); SetDisplayPackage(displayPackage); } private void SetError(Error error) { DetailError.AdjustSize(DetailView.verticalScroller.visible); DetailError.SetError(error); DetailError.OnCloseError = () => { PackageCollection.Instance.RemovePackageErrors(package); PackageCollection.Instance.UpdatePackageCollection(); }; } private void OnAddOperation(IAddOperation operation) { operation.OnOperationError += OnAddOperationError; operation.OnOperationSuccess += OnAddOperationSuccess; } private void OnAddOperationError(Error error) { if (package != null && package.AddSignal.Operation != null) { package.AddSignal.Operation.OnOperationSuccess -= OnAddOperationSuccess; package.AddSignal.Operation.OnOperationError -= OnAddOperationError; package.AddSignal.Operation = null; } PackageCollection.Instance.AddPackageError(package, error); SetError(error); if (package != null) ResetVersionItems(package.VersionToDisplay); PackageCollection.Instance.UpdatePackageCollection(); } private void OnAddOperationSuccess(PackageInfo packageInfo) { if (package != null && package.AddSignal.Operation != null) { package.AddSignal.Operation.OnOperationSuccess -= OnAddOperationSuccess; package.AddSignal.Operation.OnOperationError -= OnAddOperationError; package.AddSignal.Operation = null; } foreach (var extension in PackageManagerExtensions.Extensions) extension.OnPackageAddedOrUpdated(packageInfo.Info); } private void OnRemoveOperation(IRemoveOperation operation) { // Make sure we are not already registered operation.OnOperationError -= OnRemoveOperationError; operation.OnOperationSuccess -= OnRemoveOperationSuccess; operation.OnOperationError += OnRemoveOperationError; operation.OnOperationSuccess += OnRemoveOperationSuccess; } private void OnRemoveOperationError(Error error) { if (package != null && package.RemoveSignal.Operation != null) { package.RemoveSignal.Operation.OnOperationSuccess -= OnRemoveOperationSuccess; package.RemoveSignal.Operation.OnOperationError -= OnRemoveOperationError; package.RemoveSignal.Operation = null; } PackageCollection.Instance.AddPackageError(package, error); SetError(error); PackageCollection.Instance.UpdatePackageCollection(); } private void OnRemoveOperationSuccess(PackageInfo packageInfo) { if (package != null && package.RemoveSignal.Operation != null) { package.RemoveSignal.Operation.OnOperationSuccess -= OnRemoveOperationSuccess; package.RemoveSignal.Operation.OnOperationError -= OnRemoveOperationError; package.RemoveSignal.Operation = null; } foreach (var extension in PackageManagerExtensions.Extensions) extension.OnPackageRemoved(packageInfo.Info); } private void RefreshAddButton() { if (package.Current != null && package.Current.IsInDevelopment) { UIUtils.SetElementDisplay(UpdateBuiltIn, false); UIUtils.SetElementDisplay(UpdateCombo, false); UIUtils.SetElementDisplay(UpdateButton, false); return; } var targetVersion = SelectedPackage; if (targetVersion == null) return; var enableButton = !Package.AddRemoveOperationInProgress; var enableVersionButton = true; var action = PackageAction.Update; var inprogress = false; var isBuiltIn = package.IsBuiltIn; SemVersion version = null; if (package.AddSignal.Operation != null) { if (isBuiltIn) { action = PackageAction.Enable; inprogress = true; enableButton = false; } else { var addOperationVersion = package.AddSignal.Operation.PackageInfo.Version; if (package.Current == null) { action = PackageAction.Add; inprogress = true; } else { action = addOperationVersion.CompareByPrecedence(package.Current.Version) >= 0 ? PackageAction.Update : PackageAction.Downgrade; inprogress = true; } enableButton = false; enableVersionButton = false; } } else { if (package.Current != null) { // Installed if (package.Current.IsVersionLocked) { if (package.Current.Origin == PackageSource.Embedded) action = PackageAction.Embedded; else if (package.Current.Origin == PackageSource.Git) action = PackageAction.Git; enableButton = false; enableVersionButton = false; } else { if (targetVersion.IsCurrent) { if (targetVersion == package.LatestUpdate) action = PackageAction.UpToDate; else action = PackageAction.Current; enableButton = false; } else { action = targetVersion.Version.CompareByPrecedence(package.Current.Version) >= 0 ? PackageAction.Update : PackageAction.Downgrade; } } } else { // Not Installed if (package.Versions.Any()) { if (isBuiltIn) action = PackageAction.Enable; else action = PackageAction.Add; } } } if (package.RemoveSignal.Operation != null) enableButton = false; if (EditorApplication.isCompiling) { enableButton = false; enableVersionButton = false; EditorApplication.update -= CheckCompilationStatus; EditorApplication.update += CheckCompilationStatus; } var button = isBuiltIn ? UpdateBuiltIn : UpdateButton; button.SetEnabled(enableButton); VersionPopup.SetEnabled(enableVersionButton); button.text = GetButtonText(action, inprogress, version); var visibleFlag = !(package.Current != null && package.Current.IsVersionLocked); UIUtils.SetElementDisplay(UpdateBuiltIn, isBuiltIn && visibleFlag); UIUtils.SetElementDisplay(UpdateCombo, !isBuiltIn && visibleFlag); UIUtils.SetElementDisplay(UpdateButton, !isBuiltIn && visibleFlag); } private void RefreshRemoveButton() { var visibleFlag = false; var current = package.Current; // Show only if there is a current package installed if (current != null) { visibleFlag = current.CanBeRemoved && !package.IsPackageManagerUI; var action = current.IsBuiltIn ? PackageAction.Disable : PackageAction.Remove; var inprogress = package.RemoveSignal.Operation != null; var enableButton = visibleFlag && !EditorApplication.isCompiling && !inprogress && !Package.AddRemoveOperationInProgress; if (EditorApplication.isCompiling) { EditorApplication.update -= CheckCompilationStatus; EditorApplication.update += CheckCompilationStatus; } RemoveButton.SetEnabled(enableButton); RemoveButton.text = GetButtonText(action, inprogress); } UIUtils.SetElementDisplay(RemoveButton, visibleFlag); } private void CheckCompilationStatus() { if (EditorApplication.isCompiling) return; RefreshAddButton(); RefreshRemoveButton(); EditorApplication.update -= CheckCompilationStatus; } private static string GetButtonText(PackageAction action, bool inProgress = false, SemVersion version = null) { return version == null ? string.Format("{0}", inProgress ? PackageActionInProgressVerbs[(int) action] : PackageActionVerbs[(int) action]) : string.Format("{0} {1}", inProgress ? PackageActionInProgressVerbs[(int) action] : PackageActionVerbs[(int) action], version); } private void UpdateClick() { if (package.IsPackageManagerUI) { // Let's not allow updating of the UI if there are build errrors, as for now, that will prevent the UI from reloading properly. if (EditorUtility.scriptCompilationFailed) { EditorUtility.DisplayDialog("Unity Package Manager", "The Package Manager UI cannot be updated while there are script compilation errors in your project. Please fix the errors and try again.", "Ok"); return; } if (!EditorUtility.DisplayDialog("Unity Package Manager", "Updating this package will close the Package Manager window. You will have to re-open it after the update is done. Do you want to continue?", "Yes", "No")) return; if (package.AddSignal.Operation != null) { package.AddSignal.Operation.OnOperationSuccess -= OnAddOperationSuccess; package.AddSignal.Operation.OnOperationError -= OnAddOperationError; package.AddSignal.ResetEvents(); package.AddSignal.Operation = null; } DetailError.ClearError(); EditorApplication.update += CloseAndUpdate; return; } DetailError.ClearError(); package.Add(SelectedPackage); RefreshAddButton(); RefreshRemoveButton(); } private void CloseAndUpdate() { EditorApplication.update -= CloseAndUpdate; package.Add(SelectedPackage); var windows = UnityEngine.Resources.FindObjectsOfTypeAll<PackageManagerWindow>(); if (windows.Length > 0) { windows[0].Close(); } } private void RemoveClick() { DetailError.ClearError(); package.Remove(); RefreshRemoveButton(); RefreshAddButton(); } private void ViewDocClick() { Application.OpenURL(DisplayPackage.GetDocumentationUrl()); } private void ViewChangelogClick() { Application.OpenURL(DisplayPackage.GetChangelogUrl()); } private void ViewLicensesClick() { Application.OpenURL(DisplayPackage.GetLicensesUrl()); } private Label DetailDesc { get { return root.Q<Label>("detailDesc"); } } internal Button UpdateButton { get { return root.Q<Button>("update"); } } private Button RemoveButton { get { return root.Q<Button>("remove"); } } private Button ViewDocButton { get { return root.Q<Button>("viewDocumentation"); } } private VisualElement DocumentationContainer { get { return root.Q<VisualElement>("documentationContainer"); } } private Button ViewChangelogButton { get { return root.Q<Button>("viewChangelog"); } } private VisualElement ChangelogContainer { get { return root.Q<VisualElement>("changeLogContainer"); } } private VisualElement ViewLicensesContainer { get { return root.Q<VisualElement>("viewLicensesContainer"); } } private Button ViewLicenses { get { return root.Q<Button>("viewLicenses"); } } private VisualElement UpdateContainer { get { return root.Q<VisualElement>("updateContainer"); } } private Alert DetailError { get { return root.Q<Alert>("detailError"); } } private ScrollView DetailView { get { return root.Q<ScrollView>("detailView"); } } private Label DetailModuleReference { get { return root.Q<Label>("detailModuleReference"); } } private Label DetailVersion { get { return root.Q<Label>("detailVersion"); }} private Label DetailAuthor { get { return root.Q<Label>("detailAuthor"); }} private Label VerifyLabel { get { return root.Q<Label>("tagVerify"); } } private VisualElement CustomContainer { get { return root.Q<VisualElement>("detailCustomContainer"); } } internal VisualElement GetTag(PackageTag tag) {return root.Q<VisualElement>("tag-" + tag); } private VisualElement UpdateDropdownContainer { get { return root.Q<VisualElement>("updateDropdownContainer"); } } internal VisualElement UpdateCombo { get { return root.Q<VisualElement>("updateCombo"); } } internal Button UpdateBuiltIn { get { return root.Q<Button>("updateBuiltIn"); } } } }
using System; using System.Collections; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Linq; using System.Reflection; namespace StacksOfWax.WebApiTemplate.Areas.HelpPage { /// <summary> /// This class will create an object of a given type and populate it with sample data. /// </summary> public class ObjectGenerator { internal const int DefaultCollectionSize = 2; private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator(); /// <summary> /// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types: /// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc. /// Complex types: POCO types. /// Nullables: <see cref="Nullable{T}"/>. /// Arrays: arrays of simple types or complex types. /// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/> /// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc /// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>. /// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>. /// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>. /// </summary> /// <param name="type">The type.</param> /// <returns>An object of the given type.</returns> public object GenerateObject(Type type) { return GenerateObject(type, new Dictionary<Type, object>()); } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")] private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences) { try { if (SimpleTypeObjectGenerator.CanGenerateObject(type)) { return SimpleObjectGenerator.GenerateObject(type); } if (type.IsArray) { return GenerateArray(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsGenericType) { return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IDictionary)) { return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences); } if (typeof(IDictionary).IsAssignableFrom(type)) { return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IList) || type == typeof(IEnumerable) || type == typeof(ICollection)) { return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences); } if (typeof(IList).IsAssignableFrom(type)) { return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences); } if (type == typeof(IQueryable)) { return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences); } if (type.IsEnum) { return GenerateEnum(type); } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } } catch { // Returns null if anything fails return null; } return null; } private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences) { Type genericTypeDefinition = type.GetGenericTypeDefinition(); if (genericTypeDefinition == typeof(Nullable<>)) { return GenerateNullable(type, createdObjectReferences); } if (genericTypeDefinition == typeof(KeyValuePair<,>)) { return GenerateKeyValuePair(type, createdObjectReferences); } if (IsTuple(genericTypeDefinition)) { return GenerateTuple(type, createdObjectReferences); } Type[] genericArguments = type.GetGenericArguments(); if (genericArguments.Length == 1) { if (genericTypeDefinition == typeof(IList<>) || genericTypeDefinition == typeof(IEnumerable<>) || genericTypeDefinition == typeof(ICollection<>)) { Type collectionType = typeof(List<>).MakeGenericType(genericArguments); return GenerateCollection(collectionType, collectionSize, createdObjectReferences); } if (genericTypeDefinition == typeof(IQueryable<>)) { return GenerateQueryable(type, collectionSize, createdObjectReferences); } Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]); if (closedCollectionType.IsAssignableFrom(type)) { return GenerateCollection(type, collectionSize, createdObjectReferences); } } if (genericArguments.Length == 2) { if (genericTypeDefinition == typeof(IDictionary<,>)) { Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments); return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences); } Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]); if (closedDictionaryType.IsAssignableFrom(type)) { return GenerateDictionary(type, collectionSize, createdObjectReferences); } } if (type.IsPublic || type.IsNestedPublic) { return GenerateComplexObject(type, createdObjectReferences); } return null; } private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = type.GetGenericArguments(); object[] parameterValues = new object[genericArgs.Length]; bool failedToCreateTuple = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < genericArgs.Length; i++) { parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences); failedToCreateTuple &= parameterValues[i] == null; } if (failedToCreateTuple) { return null; } object result = Activator.CreateInstance(type, parameterValues); return result; } private static bool IsTuple(Type genericTypeDefinition) { return genericTypeDefinition == typeof(Tuple<>) || genericTypeDefinition == typeof(Tuple<,>) || genericTypeDefinition == typeof(Tuple<,,>) || genericTypeDefinition == typeof(Tuple<,,,>) || genericTypeDefinition == typeof(Tuple<,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,>) || genericTypeDefinition == typeof(Tuple<,,,,,,,>); } private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences) { Type[] genericArgs = keyValuePairType.GetGenericArguments(); Type typeK = genericArgs[0]; Type typeV = genericArgs[1]; ObjectGenerator objectGenerator = new ObjectGenerator(); object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences); object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences); if (keyObject == null && valueObject == null) { // Failed to create key and values return null; } object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject); return result; } private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = arrayType.GetElementType(); Array result = Array.CreateInstance(type, size); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); result.SetValue(element, i); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences) { Type typeK = typeof(object); Type typeV = typeof(object); if (dictionaryType.IsGenericType) { Type[] genericArgs = dictionaryType.GetGenericArguments(); typeK = genericArgs[0]; typeV = genericArgs[1]; } object result = Activator.CreateInstance(dictionaryType); MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd"); MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey"); ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences); if (newKey == null) { // Cannot generate a valid key return null; } bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey }); if (!containsKey) { object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences); addMethod.Invoke(result, new object[] { newKey, newValue }); } } return result; } private static object GenerateEnum(Type enumType) { Array possibleValues = Enum.GetValues(enumType); if (possibleValues.Length > 0) { return possibleValues.GetValue(0); } return null; } private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences) { bool isGeneric = queryableType.IsGenericType; object list; if (isGeneric) { Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments()); list = GenerateCollection(listType, size, createdObjectReferences); } else { list = GenerateArray(typeof(object[]), size, createdObjectReferences); } if (list == null) { return null; } if (isGeneric) { Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments()); MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType }); return asQueryableMethod.Invoke(null, new[] { list }); } return Queryable.AsQueryable((IEnumerable)list); } private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences) { Type type = collectionType.IsGenericType ? collectionType.GetGenericArguments()[0] : typeof(object); object result = Activator.CreateInstance(collectionType); MethodInfo addMethod = collectionType.GetMethod("Add"); bool areAllElementsNull = true; ObjectGenerator objectGenerator = new ObjectGenerator(); for (int i = 0; i < size; i++) { object element = objectGenerator.GenerateObject(type, createdObjectReferences); addMethod.Invoke(result, new object[] { element }); areAllElementsNull &= element == null; } if (areAllElementsNull) { return null; } return result; } private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences) { Type type = nullableType.GetGenericArguments()[0]; ObjectGenerator objectGenerator = new ObjectGenerator(); return objectGenerator.GenerateObject(type, createdObjectReferences); } private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences) { object result = null; if (createdObjectReferences.TryGetValue(type, out result)) { // The object has been created already, just return it. This will handle the circular reference case. return result; } if (type.IsValueType) { result = Activator.CreateInstance(type); } else { ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes); if (defaultCtor == null) { // Cannot instantiate the type because it doesn't have a default constructor return null; } result = defaultCtor.Invoke(new object[0]); } createdObjectReferences.Add(type, result); SetPublicProperties(type, result, createdObjectReferences); SetPublicFields(type, result, createdObjectReferences); return result; } private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (PropertyInfo property in properties) { if (property.CanWrite) { object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences); property.SetValue(obj, propertyValue, null); } } } private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences) { FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance); ObjectGenerator objectGenerator = new ObjectGenerator(); foreach (FieldInfo field in fields) { object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences); field.SetValue(obj, fieldValue); } } private class SimpleTypeObjectGenerator { private long _index = 0; private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators(); [SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")] private static Dictionary<Type, Func<long, object>> InitializeGenerators() { return new Dictionary<Type, Func<long, object>> { { typeof(Boolean), index => true }, { typeof(Byte), index => (Byte)64 }, { typeof(Char), index => (Char)65 }, { typeof(DateTime), index => DateTime.Now }, { typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) }, { typeof(DBNull), index => DBNull.Value }, { typeof(Decimal), index => (Decimal)index }, { typeof(Double), index => (Double)(index + 0.1) }, { typeof(Guid), index => Guid.NewGuid() }, { typeof(Int16), index => (Int16)(index % Int16.MaxValue) }, { typeof(Int32), index => (Int32)(index % Int32.MaxValue) }, { typeof(Int64), index => (Int64)index }, { typeof(Object), index => new object() }, { typeof(SByte), index => (SByte)64 }, { typeof(Single), index => (Single)(index + 0.1) }, { typeof(String), index => { return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index); } }, { typeof(TimeSpan), index => { return TimeSpan.FromTicks(1234567); } }, { typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) }, { typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) }, { typeof(UInt64), index => (UInt64)index }, { typeof(Uri), index => { return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index)); } }, }; } public static bool CanGenerateObject(Type type) { return DefaultGenerators.ContainsKey(type); } public object GenerateObject(Type type) { return DefaultGenerators[type](++_index); } } } }
// // ImageHandler.cs // // Author: // Lluis Sanchez <lluis@xamarin.com> // // Copyright (c) 2011 Xamarin Inc // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using System.Linq; using Xwt.Backends; using MonoMac.AppKit; using MonoMac.Foundation; using MonoMac.ObjCRuntime; using System.Drawing; using System.Collections.Generic; using System.Runtime.InteropServices; using Xwt.Drawing; using System.IO; using MonoMac.CoreGraphics; namespace Xwt.Mac { public class ImageHandler: ImageBackendHandler { static readonly IntPtr sel_alloc = new Selector ("alloc").Handle; static readonly IntPtr sel_release = new Selector ("release").Handle; static readonly IntPtr sel_initWithIconRef = new Selector ("initWithIconRef:").Handle; static readonly IntPtr cls_NSImage = new Class (typeof (NSImage)).Handle; static Dictionary<string, NSImage> stockIcons = new Dictionary<string, NSImage> (); public override object LoadFromStream (Stream stream) { using (NSData data = NSData.FromStream (stream)) { return new NSImage (data); } } public override object LoadFromFile (string file) { return new NSImage (file); } public override object CreateMultiResolutionImage (IEnumerable<object> images) { NSImage res = new NSImage (); foreach (NSImage img in images) res.AddRepresentations (img.Representations ()); return res; } public override object CreateMultiSizeIcon (IEnumerable<object> images) { if (images.Count () == 1) return images.First (); NSImage res = new NSImage (); foreach (NSImage img in images) res.AddRepresentations (img.Representations ()); return res; } public override object CreateCustomDrawn (ImageDrawCallback drawCallback) { return new CustomImage (ApplicationContext, drawCallback); } public override Xwt.Drawing.Image GetStockIcon (string id) { NSImage img; if (!stockIcons.TryGetValue (id, out img)) { img = LoadStockIcon (id); stockIcons [id] = img; } return ApplicationContext.Toolkit.WrapImage (img); } public override void SaveToStream (object backend, System.IO.Stream stream, ImageFileType fileType) { NSImage img = backend as NSImage; if (img == null) throw new NotSupportedException (); var imageData = img.AsTiff (); var imageRep = (NSBitmapImageRep) NSBitmapImageRep.ImageRepFromData (imageData); var props = new NSDictionary (); imageData = imageRep.RepresentationUsingTypeProperties (fileType.ToMacFileType (), props); using (var s = imageData.AsStream ()) { s.CopyTo (stream); } } public override bool IsBitmap (object handle) { NSImage img = handle as NSImage; return img != null && img.Representations ().OfType<NSBitmapImageRep> ().Any (); } public override object ConvertToBitmap (object handle, double width, double height, double scaleFactor, ImageFormat format) { int pixelWidth = (int)(width * scaleFactor); int pixelHeight = (int)(height * scaleFactor); if (handle is CustomImage) { var flags = CGBitmapFlags.ByteOrderDefault; int bytesPerRow; switch (format) { case ImageFormat.ARGB32: bytesPerRow = pixelWidth * 4; flags |= CGBitmapFlags.PremultipliedFirst; break; case ImageFormat.RGB24: bytesPerRow = pixelWidth * 3; flags |= CGBitmapFlags.None; break; default: throw new NotImplementedException ("ImageFormat: " + format.ToString ()); } var bmp = new CGBitmapContext (IntPtr.Zero, pixelWidth, pixelHeight, 8, bytesPerRow, Util.DeviceRGBColorSpace, flags); bmp.TranslateCTM (0, pixelHeight); bmp.ScaleCTM ((float)scaleFactor, (float)-scaleFactor); var ctx = new CGContextBackend { Context = bmp, Size = new SizeF ((float)width, (float)height), InverseViewTransform = bmp.GetCTM ().Invert (), ScaleFactor = scaleFactor }; var ci = (CustomImage)handle; ci.DrawInContext (ctx); var img = new NSImage (((CGBitmapContext)bmp).ToImage (), new SizeF (pixelWidth, pixelHeight)); var imageData = img.AsTiff (); var imageRep = (NSBitmapImageRep)NSBitmapImageRep.ImageRepFromData (imageData); var im = new NSImage (); im.AddRepresentation (imageRep); im.Size = new SizeF ((float)width, (float)height); bmp.Dispose (); return im; } else { NSImage img = (NSImage)handle; NSBitmapImageRep bitmap = img.Representations ().OfType<NSBitmapImageRep> ().FirstOrDefault (); if (bitmap == null) { var imageData = img.AsTiff (); var imageRep = (NSBitmapImageRep)NSBitmapImageRep.ImageRepFromData (imageData); var im = new NSImage (); im.AddRepresentation (imageRep); im.Size = new SizeF ((float)width, (float)height); return im; } return handle; } } public override Xwt.Drawing.Color GetBitmapPixel (object handle, int x, int y) { NSImage img = (NSImage)handle; NSBitmapImageRep bitmap = img.Representations ().OfType<NSBitmapImageRep> ().FirstOrDefault (); if (bitmap != null) return bitmap.ColorAt (x, y).ToXwtColor (); else throw new InvalidOperationException ("Not a bitmnap image"); } public override void SetBitmapPixel (object handle, int x, int y, Xwt.Drawing.Color color) { NSImage img = (NSImage)handle; NSBitmapImageRep bitmap = img.Representations ().OfType<NSBitmapImageRep> ().FirstOrDefault (); if (bitmap != null) bitmap.SetColorAt (color.ToNSColor (), x, y); else throw new InvalidOperationException ("Not a bitmnap image"); } public override bool HasMultipleSizes (object handle) { NSImage img = (NSImage)handle; return img.Size.Width == 0 && img.Size.Height == 0; } public override Size GetSize (object handle) { NSImage img = (NSImage)handle; NSBitmapImageRep bitmap = img.Representations ().OfType<NSBitmapImageRep> ().FirstOrDefault (); if (bitmap != null) return new Size (bitmap.PixelsWide, bitmap.PixelsHigh); else return new Size ((int)img.Size.Width, (int)img.Size.Height); } public override object CopyBitmap (object handle) { return ((NSImage)handle).Copy (); } public override void CopyBitmapArea (object backend, int srcX, int srcY, int width, int height, object dest, int destX, int destY) { throw new NotImplementedException (); } public override object CropBitmap (object backend, int srcX, int srcY, int width, int height) { NSImage img = (NSImage)backend; NSBitmapImageRep bitmap = img.Representations ().OfType<NSBitmapImageRep> ().FirstOrDefault (); if (bitmap != null) { RectangleF empty = RectangleF.Empty; var cgi = bitmap.AsCGImage (ref empty, null, null).WithImageInRect (new RectangleF (srcX, srcY, width, height)); NSImage res = new NSImage (cgi, new SizeF (width, height)); cgi.Dispose (); return res; } else throw new InvalidOperationException ("Not a bitmnap image"); } static NSImage FromResource (string res) { var stream = typeof(ImageHandler).Assembly.GetManifestResourceStream (res); using (stream) using (NSData data = NSData.FromStream (stream)) { return new NSImage (data); } } static NSImage NSImageFromResource (string id) { return (NSImage) Toolkit.GetBackend (Xwt.Drawing.Image.FromResource (typeof(ImageHandler), id)); } static NSImage LoadStockIcon (string id) { switch (id) { case StockIconId.ZoomIn: return NSImageFromResource ("zoom-in.png"); case StockIconId.ZoomOut: return NSImageFromResource ("zoom-out.png"); } NSImage image = null; IntPtr iconRef; var type = Util.ToIconType (id); if (type != 0 && GetIconRef (-32768/*kOnSystemDisk*/, 1835098995/*kSystemIconsCreator*/, type, out iconRef) == 0) { try { image = new NSImage (Messaging.IntPtr_objc_msgSend_IntPtr (Messaging.IntPtr_objc_msgSend (cls_NSImage, sel_alloc), sel_initWithIconRef, iconRef)); // NSImage (IntPtr) ctor retains, but since it is the sole owner, we don't want that Messaging.void_objc_msgSend (image.Handle, sel_release); } finally { ReleaseIconRef (iconRef); } } return image; } [DllImport ("/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/LaunchServices")] static extern int GetIconRef (short vRefNum, int creator, int iconType, out IntPtr iconRef); [DllImport ("/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/LaunchServices")] static extern int ReleaseIconRef (IntPtr iconRef); } public class CustomImage: NSImage { ImageDrawCallback drawCallback; ApplicationContext actx; NSCustomImageRep imgRep; public CustomImage (ApplicationContext actx, ImageDrawCallback drawCallback) { this.actx = actx; this.drawCallback = drawCallback; imgRep = new NSCustomImageRep (new Selector ("drawIt:"), this); AddRepresentation (imgRep); } [Export ("drawIt:")] public void DrawIt (NSObject ob) { CGContext ctx = NSGraphicsContext.CurrentContext.GraphicsPort; if (!NSGraphicsContext.CurrentContext.IsFlipped) { // Custom drawing is done using flipped order, so if the target surface is not flipped, we need to flip it ctx.TranslateCTM (0, Size.Height); ctx.ScaleCTM (1, -1); } DrawInContext (ctx); } internal void DrawInContext (CGContext ctx) { var backend = new CGContextBackend { Context = ctx, InverseViewTransform = ctx.GetCTM ().Invert () }; DrawInContext (backend); } internal void DrawInContext (CGContextBackend ctx) { var s = ctx.Size != SizeF.Empty ? ctx.Size : Size; actx.InvokeUserCode (delegate { drawCallback (ctx, new Rectangle (0, 0, s.Width, s.Height)); }); } public override CGImage AsCGImage (ref RectangleF proposedDestRect, NSGraphicsContext referenceContext, NSDictionary hints) { return base.AsCGImage (ref proposedDestRect, referenceContext, hints); } public CustomImage Clone () { return new CustomImage (actx, drawCallback); } } }
// ******************************************************************************************************** // Product Name: DotSpatial.Positioning.dll // Description: A library for managing GPS connections. // ******************************************************************************************************** // The contents of this file are subject to the MIT License (MIT) // you may not use this file except in compliance with the License. You may obtain a copy of the License at // http://dotspatial.codeplex.com/license // // Software distributed under the License is distributed on an "AS IS" basis, WITHOUT WARRANTY OF // ANY KIND, either expressed or implied. See the License for the specific language governing rights and // limitations under the License. // // The Original Code is from http://geoframework.codeplex.com/ version 2.0 // // The Initial Developer of this original code is Jon Pearson. Submitted Oct. 21, 2010 by Ben Tombs (tidyup) // // Contributor(s): (Open source contributors should list themselves and their modifications here). // ------------------------------------------------------------------------------------------------------- // | Developer | Date | Comments // |--------------------------|------------|-------------------------------------------------------------- // | Tidyup (Ben Tombs) | 10/21/2010 | Original copy submitted from modified GeoFrameworks 2.0 // | Shade1974 (Ted Dunsford) | 10/21/2010 | Added file headers reviewed formatting with resharper. // ******************************************************************************************************** #if !PocketPC || DesignTime using System.ComponentModel; #endif namespace DotSpatial.Positioning { #if !PocketPC || DesignTime /// <summary> /// Represents a collection of interpolated coordinates using realistic acceleration and deceleration. /// </summary> /// <remarks><para>This class is used by several controls in the DotSpatial.Positioning namespace to give /// them a more realistic behavior. This class will interpolate coordinates between a /// given start and end point according to an interpolation technique, and return them /// as an array. Then, controls and other elements can be moved smoothly by applying /// the calculated values.</para> /// <para>Instances of this class are likely to be thread safe because the class uses /// thread synchronization when recalculating interpolated values.</para></remarks> [TypeConverter(typeof(ExpandableObjectConverter))] #endif public sealed class Interpolator2D { /// <summary> /// /// </summary> private Position _minimum = Position.Empty; /// <summary> /// /// </summary> private Position _maximum = Position.Empty; /// <summary> /// /// </summary> private int _count = 1; /// <summary> /// /// </summary> private InterpolationMethod _interpolationMethod = InterpolationMethod.Linear; /// <summary> /// /// </summary> private Position[] _values; /// <summary> /// /// </summary> private Interpolator _xValues; /// <summary> /// /// </summary> private Interpolator _yValues; /// <summary> /// Creates a new instance. /// </summary> public Interpolator2D() { } /// <summary> /// Creates a new instance using the specified start and end points. /// </summary> /// <param name="minimum">The minimum.</param> /// <param name="maximum">The maximum.</param> /// <param name="count">The count.</param> /// <remarks>This constructor provides a way to define the bounds of the interpolator, /// as well as its number of points. A higher level of points yield a smoother /// result but take longer to iterate through.</remarks> ////[CLSCompliant(false)] public Interpolator2D(Position minimum, Position maximum, int count) { Count = count; _minimum = minimum; _maximum = maximum; Recalculate(); } /// <summary> /// Initializes a new instance of the <see cref="Interpolator2D"/> class. /// </summary> /// <param name="count">The count.</param> /// <param name="mode">The mode.</param> public Interpolator2D(int count, InterpolationMethod mode) { _count = count; _interpolationMethod = mode; Recalculate(); } /// <summary> /// Creates a new instance using the specified end points, count, and interpolation technique. /// </summary> /// <param name="minimum">The minimum.</param> /// <param name="maximum">The maximum.</param> /// <param name="count">The count.</param> /// <param name="mode">The mode.</param> ////[CLSCompliant(false)] public Interpolator2D(Position minimum, Position maximum, int count, InterpolationMethod mode) : this(minimum, maximum, count) { _interpolationMethod = mode; Recalculate(); } /// <summary> /// Returns the starting point of the series. /// </summary> /// <value>The minimum.</value> /// <remarks>Interpolated values are calculated between this point and the end point /// stored in the <see cref="Maximum"></see> property. Changing this property causes /// the series to be recalculated.</remarks> ////[CLSCompliant(false)] public Position Minimum { get { return _minimum; } set { if (_minimum.Equals(value)) return; _minimum = value; Recalculate(); } } /// <summary> /// Returns the ending point of the series. /// </summary> /// <value>The maximum.</value> /// <remarks>Interpolated values are calculated between this point and the start point /// stored in the <see cref="Minimum"></see> property. Changing this property causes /// the series to be recalculated.</remarks> ////[CLSCompliant(false)] public Position Maximum { get { return _maximum; } set { if (_maximum.Equals(value)) return; _maximum = value; Recalculate(); } } /// <summary> /// Returns a Position object from the interpolated series. /// </summary> public Position this[int index] { get { return _values[index]; } } /// <summary> /// Returns the number of calculated positions in the series. /// </summary> /// <value>The count.</value> public int Count { get { return _count; } set { if (_count == value) return; _count = value; // Redefine the array _values = new Position[_count]; // Recalculate the array Recalculate(); } } /// <summary> /// Indicates the interpolation technique used to calculate intermediate points. /// </summary> /// <value>The interpolation method.</value> /// <remarks>This property controls the acceleration and deceleration techniques /// used when calculating intermediate points. Changing this property causes the /// series to be recalculated.</remarks> public InterpolationMethod InterpolationMethod { get { return _interpolationMethod; } set { if (_interpolationMethod == value) return; _interpolationMethod = value; Recalculate(); } } // Recalculates all values according to the specified mode /// <summary> /// Recalculates this instance. /// </summary> private void Recalculate() { // Reinitialize X values _xValues = new Interpolator(_minimum.Longitude.DecimalDegrees, _maximum.Longitude.DecimalDegrees, Count); // Reinitialize Y values _yValues = new Interpolator(_minimum.Latitude.DecimalDegrees, _maximum.Latitude.DecimalDegrees, Count); // Convert the arrays into a MapRoute for (int iteration = 0; iteration < Count; iteration++) { // Add a new Position to the value collection _values[iteration] = new Position(new Latitude(_yValues[iteration]), new Longitude(_xValues[iteration])); } } /// <summary> /// Swaps this instance. /// </summary> public void Swap() { Position temp = _minimum; _minimum = _maximum; _maximum = temp; Recalculate(); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // // .NET SIMD to solve Burgers' equation // // Benchmark based on // http://taumuon-jabuka.blogspot.co.uk/2014/10/net-simd-to-solve-burgers-equation.html using Microsoft.Xunit.Performance; using System; using System.Linq; using System.Numerics; using System.Runtime.CompilerServices; [assembly: OptimizeForBenchmarks] [assembly: MeasureInstructionsRetired] public class Burgers { private static double BurgersAnalytical(double t, double x, double nu) { return -2 * nu * (-(-8 * t + 2 * x) * Math.Exp(-Math.Pow((-4 * t + x), 2) / (4 * nu * (t + 1))) / (4 * nu * (t + 1)) - (-8 * t + 2 * x - 12.5663706143592) * Math.Exp(-Math.Pow(-4 * t + x - 6.28318530717959, 2) / (4 * nu * (t + 1))) / (4 * nu * (t + 1))) / (Math.Exp(-Math.Pow(-4 * t + x - 6.28318530717959, 2) / (4 * nu * (t + 1))) + Math.Exp(-Math.Pow(-4 * t + x, 2) / (4 * nu * (t + 1)))) + 4; } private static double[] linspace(double first, double last, int num) { var step = (last - first) / (double)num; return Enumerable.Range(0, num).Select(v => (v * step) + first).ToArray(); } private static double[] GetAnalytical(double[] x, double t, double nu) { double[] u = new double[x.Length]; for (int i = 0; i < x.Length; ++i) { u[i] = BurgersAnalytical(t, x[i], nu); } return u; } private static double[] GetCalculated0(int nt, int nx, double dx, double dt, double nu, double[] initial) { double[] u = new double[nx]; Array.Copy(initial, u, u.Length); for (int tStep = 0; tStep < nt; tStep++) { double[] un = new double[nx]; Array.Copy(u, un, u.Length); for (int i = 1; i < nx - 1; i++) { u[i] = un[i] - un[i] * dt / dx * (un[i] - un[i - 1]) + Math.Pow(nu * dt / dx, 2.0) * (un[i + 1] - 2 * un[i] + un[i - 1]); } u[0] = un[0] - un[0] * dt / dx * (un[0] - un[nx - 1]) + Math.Pow(nu * dt / dx, 2.0) * (un[1] - 2 * un[0] + un[nx - 1]); u[nx - 1] = un[nx - 1] - un[nx - 1] * dt / dx * (un[nx - 1] - un[nx - 2]) + Math.Pow(nu * dt / dx, 2.0) * (un[0] - 2 * un[nx - 1] + un[nx - 2]); } return u; } // Reduce new array allocation and copying, ping-pong between them private static double[] GetCalculated1(int nt, int nx, double dx, double dt, double nu, double[] initial) { double[] u = new double[nx]; double[] un = new double[nx]; Array.Copy(initial, un, un.Length); for (int tStep = 0; tStep < nt; tStep++) { for (int i = 1; i < nx - 1; i++) { u[i] = un[i] - un[i] * dt / dx * (un[i] - un[i - 1]) + Math.Pow(nu * dt / dx, 2.0) * (un[i + 1] - 2 * un[i] + un[i - 1]); } u[0] = un[0] - un[0] * dt / dx * (un[0] - un[nx - 1]) + Math.Pow(nu * dt / dx, 2.0) * (un[1] - 2 * un[0] + un[nx - 1]); u[nx - 1] = un[nx - 1] - un[nx - 1] * dt / dx * (un[nx - 1] - un[nx - 2]) + Math.Pow(nu * dt / dx, 2.0) * (un[0] - 2 * un[nx - 1] + un[nx - 2]); double[] swap = u; u = un; un = swap; } return un; } // Pull calculation of (nu * dt / dx)^2 out into a variable private static double[] GetCalculated2(int nt, int nx, double dx, double dt, double nu, double[] initial) { double[] u = new double[nx]; double[] un = new double[nx]; Array.Copy(initial, un, un.Length); double factor = Math.Pow(nu * dt / dx, 2.0); for (int tStep = 0; tStep < nt; tStep++) { for (int i = 1; i < nx - 1; i++) { u[i] = un[i] - un[i] * dt / dx * (un[i] - un[i - 1]) + factor * (un[i + 1] - 2 * un[i] + un[i - 1]); } u[0] = un[0] - un[0] * dt / dx * (un[0] - un[nx - 1]) + factor * (un[1] - 2 * un[0] + un[nx - 1]); u[nx - 1] = un[nx - 1] - un[nx - 1] * dt / dx * (un[nx - 1] - un[nx - 2]) + factor * (un[0] - 2 * un[nx - 1] + un[nx - 2]); double[] swap = u; u = un; un = swap; } return un; } // SIMD private static double[] GetCalculated3(int nt, int nx, double dx, double dt, double nu, double[] initial) { var nx2 = nx + (Vector<double>.Count - (nx % Vector<double>.Count)); double[] u = new double[nx2]; double[] un = new double[nx2]; Array.Copy(initial, un, initial.Length); double factor = Math.Pow(nu * dt / dx, 2.0); for (int tStep = 0; tStep < nt; tStep++) { for (int i = 1; i < nx2 - Vector<double>.Count + 1; i += Vector<double>.Count) { var vectorIn0 = new Vector<double>(un, i); var vectorInPrev = new Vector<double>(un, i - 1); var vectorInNext = new Vector<double>(un, i + 1); var vectorOut = vectorIn0 - vectorIn0 * (dt / dx) * (vectorIn0 - vectorInPrev) + factor * (vectorInNext - 2.0 * vectorIn0 + vectorInPrev); vectorOut.CopyTo(u, i); } u[0] = un[0] - un[0] * dt / dx * (un[0] - un[nx - 1]) + factor * (un[1] - 2 * un[0] + un[nx - 1]); u[nx - 1] = un[nx - 1] - un[nx - 1] * dt / dx * (un[nx - 1] - un[nx - 2]) + factor * (un[0] - 2 * un[nx - 1] + un[nx - 2]); double[] swap = u; u = un; un = swap; } return un; } public static int Main() { if (!Vector.IsHardwareAccelerated) { Console.WriteLine("Not hardware accelerated!"); } else { Console.WriteLine("Vector<double>.Length: " + Vector<double>.Count); } int nx = 10001; #if DEBUG int nt = 10; #else int nt = 10000; #endif double dx = 2.0 * Math.PI / (nx - 1.0); double nu = 0.07; double dt = dx * nu; double[] x = linspace(0.0, 2.0 * Math.PI, nx); double[] initial = GetAnalytical(x, 0.0, nu); // Warmup GetCalculated0(1, nx, dx, dt, nu, initial); GetCalculated1(1, nx, dx, dt, nu, initial); GetCalculated2(1, nx, dx, dt, nu, initial); GetCalculated3(1, nx, dx, dt, nu, initial); double[][] results = new double[4][]; var stopwatch = new System.Diagnostics.Stopwatch(); stopwatch.Start(); results[0] = GetCalculated0(nt, nx, dx, dt, nu, initial); stopwatch.Stop(); Console.WriteLine("Baseline: " + stopwatch.ElapsedMilliseconds); stopwatch.Reset(); stopwatch.Start(); results[1] = GetCalculated1(nt, nx, dx, dt, nu, initial); stopwatch.Stop(); Console.WriteLine("Reduce copy: " + stopwatch.ElapsedMilliseconds); stopwatch.Reset(); stopwatch.Start(); results[2] = GetCalculated2(nt, nx, dx, dt, nu, initial); stopwatch.Stop(); Console.WriteLine("CSE of Math.Pow: " + stopwatch.ElapsedMilliseconds); stopwatch.Reset(); stopwatch.Start(); results[3] = GetCalculated3(nt, nx, dx, dt, nu, initial); stopwatch.Stop(); Console.WriteLine("SIMD: " + stopwatch.ElapsedMilliseconds); stopwatch.Reset(); for (int i = 0; i < x.Length; i += 33) { double expected = results[0][i]; for (int j = 1; j < results.Length; j++) { bool valid = Math.Abs(expected - results[j][i]) < 1e-4; if (!valid) { Console.WriteLine("Failed to validate"); return -1; } } } return 100; } static volatile object VolatileObject; [MethodImpl(MethodImplOptions.NoInlining)] static void Escape(object obj) { VolatileObject = obj; } [Benchmark] public static void Test0() { int nx = 10001; int nt = 10000; double dx = 2.0 * Math.PI / (nx - 1.0); double nu = 0.07; double dt = dx * nu; double[] x = linspace(0.0, 2.0 * Math.PI, nx); double[] initial = GetAnalytical(x, 0.0, nu); foreach (var iteration in Benchmark.Iterations) { using (iteration.StartMeasurement()) { double[] results = GetCalculated0(nt, nx, dx, dt, nu, initial); Escape(results); } } } [Benchmark] public static void Test1() { int nx = 10001; int nt = 10000; double dx = 2.0 * Math.PI / (nx - 1.0); double nu = 0.07; double dt = dx * nu; double[] x = linspace(0.0, 2.0 * Math.PI, nx); double[] initial = GetAnalytical(x, 0.0, nu); foreach (var iteration in Benchmark.Iterations) { using (iteration.StartMeasurement()) { double[] results = GetCalculated1(nt, nx, dx, dt, nu, initial); Escape(results); } } } [Benchmark] public static void Test2() { int nx = 10001; int nt = 10000; double dx = 2.0 * Math.PI / (nx - 1.0); double nu = 0.07; double dt = dx * nu; double[] x = linspace(0.0, 2.0 * Math.PI, nx); double[] initial = GetAnalytical(x, 0.0, nu); foreach (var iteration in Benchmark.Iterations) { using (iteration.StartMeasurement()) { double[] results = GetCalculated2(nt, nx, dx, dt, nu, initial); Escape(results); } } } [Benchmark] public static void Test3() { // Make SIMD version work a bit harder.... int nx = 10001; int nt = 2 * 10000; double dx = 2.0 * Math.PI / (nx - 1.0); double nu = 0.07; double dt = dx * nu; double[] x = linspace(0.0, 2.0 * Math.PI, nx); double[] initial = GetAnalytical(x, 0.0, nu); foreach (var iteration in Benchmark.Iterations) { using (iteration.StartMeasurement()) { double[] results = GetCalculated3(nt, nx, dx, dt, nu, initial); Escape(results); } } } }
using System; using ChainUtils.BouncyCastle.Crypto.Digests; using ChainUtils.BouncyCastle.Crypto.Parameters; using ChainUtils.BouncyCastle.Security; namespace ChainUtils.BouncyCastle.Crypto.Encodings { /** * Optimal Asymmetric Encryption Padding (OAEP) - see PKCS 1 V 2. */ public class OaepEncoding : IAsymmetricBlockCipher { private byte[] defHash; private IDigest hash; private IDigest mgf1Hash; private IAsymmetricBlockCipher engine; private SecureRandom random; private bool forEncryption; public OaepEncoding( IAsymmetricBlockCipher cipher) : this(cipher, new Sha1Digest(), null) { } public OaepEncoding( IAsymmetricBlockCipher cipher, IDigest hash) : this(cipher, hash, null) { } public OaepEncoding( IAsymmetricBlockCipher cipher, IDigest hash, byte[] encodingParams) : this(cipher, hash, hash, encodingParams) { } public OaepEncoding( IAsymmetricBlockCipher cipher, IDigest hash, IDigest mgf1Hash, byte[] encodingParams) { engine = cipher; this.hash = hash; this.mgf1Hash = mgf1Hash; defHash = new byte[hash.GetDigestSize()]; if (encodingParams != null) { hash.BlockUpdate(encodingParams, 0, encodingParams.Length); } hash.DoFinal(defHash, 0); } public IAsymmetricBlockCipher GetUnderlyingCipher() { return engine; } public string AlgorithmName { get { return engine.AlgorithmName + "/OAEPPadding"; } } public void Init( bool forEncryption, ICipherParameters param) { if (param is ParametersWithRandom) { var rParam = (ParametersWithRandom)param; random = rParam.Random; } else { random = new SecureRandom(); } engine.Init(forEncryption, param); this.forEncryption = forEncryption; } public int GetInputBlockSize() { var baseBlockSize = engine.GetInputBlockSize(); if (forEncryption) { return baseBlockSize - 1 - 2 * defHash.Length; } else { return baseBlockSize; } } public int GetOutputBlockSize() { var baseBlockSize = engine.GetOutputBlockSize(); if (forEncryption) { return baseBlockSize; } else { return baseBlockSize - 1 - 2 * defHash.Length; } } public byte[] ProcessBlock( byte[] inBytes, int inOff, int inLen) { if (forEncryption) { return EncodeBlock(inBytes, inOff, inLen); } else { return DecodeBlock(inBytes, inOff, inLen); } } private byte[] EncodeBlock( byte[] inBytes, int inOff, int inLen) { var block = new byte[GetInputBlockSize() + 1 + 2 * defHash.Length]; // // copy in the message // Array.Copy(inBytes, inOff, block, block.Length - inLen, inLen); // // add sentinel // block[block.Length - inLen - 1] = 0x01; // // as the block is already zeroed - there's no need to add PS (the >= 0 pad of 0) // // // add the hash of the encoding params. // Array.Copy(defHash, 0, block, defHash.Length, defHash.Length); // // generate the seed. // var seed = random.GenerateSeed(defHash.Length); // // mask the message block. // var mask = maskGeneratorFunction1(seed, 0, seed.Length, block.Length - defHash.Length); for (var i = defHash.Length; i != block.Length; i++) { block[i] ^= mask[i - defHash.Length]; } // // add in the seed // Array.Copy(seed, 0, block, 0, defHash.Length); // // mask the seed. // mask = maskGeneratorFunction1( block, defHash.Length, block.Length - defHash.Length, defHash.Length); for (var i = 0; i != defHash.Length; i++) { block[i] ^= mask[i]; } return engine.ProcessBlock(block, 0, block.Length); } /** * @exception InvalidCipherTextException if the decrypted block turns out to * be badly formatted. */ private byte[] DecodeBlock( byte[] inBytes, int inOff, int inLen) { var data = engine.ProcessBlock(inBytes, inOff, inLen); byte[] block; // // as we may have zeros in our leading bytes for the block we produced // on encryption, we need to make sure our decrypted block comes back // the same size. // if (data.Length < engine.GetOutputBlockSize()) { block = new byte[engine.GetOutputBlockSize()]; Array.Copy(data, 0, block, block.Length - data.Length, data.Length); } else { block = data; } if (block.Length < (2 * defHash.Length) + 1) { throw new InvalidCipherTextException("data too short"); } // // unmask the seed. // var mask = maskGeneratorFunction1( block, defHash.Length, block.Length - defHash.Length, defHash.Length); for (var i = 0; i != defHash.Length; i++) { block[i] ^= mask[i]; } // // unmask the message block. // mask = maskGeneratorFunction1(block, 0, defHash.Length, block.Length - defHash.Length); for (var i = defHash.Length; i != block.Length; i++) { block[i] ^= mask[i - defHash.Length]; } // // check the hash of the encoding params. // long check to try to avoid this been a source of a timing attack. // { var diff = 0; for (var i = 0; i < defHash.Length; ++i) { diff |= (byte)(defHash[i] ^ block[defHash.Length + i]); } if (diff != 0) throw new InvalidCipherTextException("data hash wrong"); } // // find the data block // int start; for (start = 2 * defHash.Length; start != block.Length; start++) { if (block[start] != 0) { break; } } if (start >= (block.Length - 1) || block[start] != 1) { throw new InvalidCipherTextException("data start wrong " + start); } start++; // // extract the data block // var output = new byte[block.Length - start]; Array.Copy(block, start, output, 0, output.Length); return output; } /** * int to octet string. */ private void ItoOSP( int i, byte[] sp) { sp[0] = (byte)((uint)i >> 24); sp[1] = (byte)((uint)i >> 16); sp[2] = (byte)((uint)i >> 8); sp[3] = (byte)((uint)i >> 0); } /** * mask generator function, as described in PKCS1v2. */ private byte[] maskGeneratorFunction1( byte[] Z, int zOff, int zLen, int length) { var mask = new byte[length]; var hashBuf = new byte[mgf1Hash.GetDigestSize()]; var C = new byte[4]; var counter = 0; hash.Reset(); do { ItoOSP(counter, C); mgf1Hash.BlockUpdate(Z, zOff, zLen); mgf1Hash.BlockUpdate(C, 0, C.Length); mgf1Hash.DoFinal(hashBuf, 0); Array.Copy(hashBuf, 0, mask, counter * hashBuf.Length, hashBuf.Length); } while (++counter < (length / hashBuf.Length)); if ((counter * hashBuf.Length) < length) { ItoOSP(counter, C); mgf1Hash.BlockUpdate(Z, zOff, zLen); mgf1Hash.BlockUpdate(C, 0, C.Length); mgf1Hash.DoFinal(hashBuf, 0); Array.Copy(hashBuf, 0, mask, counter * hashBuf.Length, mask.Length - (counter * hashBuf.Length)); } return mask; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ using System.Collections.Generic; using ParquetSharp.Example.Data; using ParquetSharp.Example.Data.Simple; using ParquetSharp.Hadoop; using ParquetSharp.Hadoop.Example; using ParquetSharp.Hadoop.Util; using ParquetSharp.Schema; namespace ParquetHadoopTests.Filter2.RecordLevel { public class PhoneBookWriter { private const string schemaString = "message user {\n" + " required int64 id;\n" + " optional binary name (UTF8);\n" + " optional group location {\n" + " optional double lon;\n" + " optional double lat;\n" + " }\n" + " optional group phoneNumbers {\n" + " repeated group phone {\n" + " required int64 number;\n" + " optional binary kind (UTF8);\n" + " }\n" + " }\n" + "}\n"; private static readonly MessageType schema = MessageTypeParser.parseMessageType(schemaString); public class Location { private readonly double? lon; private readonly double? lat; public Location(double? lon, double? lat) { this.lon = lon; this.lat = lat; } public double? getLon() { return lon; } public double? getLat() { return lat; } public override bool Equals(object o) { if (object.ReferenceEquals(this, o)) return true; if (o == null) return false; Location location = (Location)o; return lat.Equals(location.lat) && lon.Equals(location.lon); } public override int GetHashCode() { int result = lon != null ? lon.GetHashCode() : 0; result = 31 * result + (lat != null ? lat.GetHashCode() : 0); return result; } } public class PhoneNumber { private readonly long number; private readonly string kind; public PhoneNumber(long number, string kind) { this.number = number; this.kind = kind; } public long getNumber() { return number; } public string getKind() { return kind; } public override bool Equals(object o) { if (this == o) return true; if (o == null || GetType() != o.GetType()) return false; PhoneNumber that = (PhoneNumber)o; if (number != that.number) return false; if (kind != null ? !kind.Equals(that.kind) : that.kind != null) return false; return true; } public override int GetHashCode() { int result = (int)(number ^ (number >> 32)); result = 31 * result + (kind != null ? kind.GetHashCode() : 0); return result; } } public class User { private readonly long id; private readonly string name; private readonly List<PhoneNumber> phoneNumbers; private readonly Location location; public User(long id, string name, List<PhoneNumber> phoneNumbers, Location location) { this.id = id; this.name = name; this.phoneNumbers = phoneNumbers; this.location = location; } public long getId() { return id; } public string getName() { return name; } public List<PhoneNumber> getPhoneNumbers() { return phoneNumbers; } public Location getLocation() { return location; } public bool equals(object o) { if (this == o) return true; if (o == null || GetType() != o.GetType()) return false; User user = (User)o; if (id != user.id) return false; if (location != null ? !location.Equals(user.location) : user.location != null) return false; if (name != null ? !name.Equals(user.name) : user.name != null) return false; if (phoneNumbers != null ? !phoneNumbers.Equals(user.phoneNumbers) : user.phoneNumbers != null) return false; return true; } public override int GetHashCode() { int result = (int)(id ^ (id >> 32)); result = 31 * result + (name != null ? name.GetHashCode() : 0); result = 31 * result + (phoneNumbers != null ? phoneNumbers.GetHashCode() : 0); result = 31 * result + (location != null ? location.GetHashCode() : 0); return result; } } public static SimpleGroup groupFromUser(User user) { SimpleGroup root = new SimpleGroup(schema); root.append("id", user.getId()); if (user.getName() != null) { root.append("name", user.getName()); } if (user.getPhoneNumbers() != null) { Group phoneNumbers = root.addGroup("phoneNumbers"); foreach (PhoneNumber number in user.getPhoneNumbers()) { Group phone = phoneNumbers.addGroup("phone"); phone.append("number", number.getNumber()); if (number.getKind() != null) { phone.append("kind", number.getKind()); } } } if (user.getLocation() != null) { Group location = root.addGroup("location"); if (user.getLocation().getLon() != null) { location.append("lon", user.getLocation().getLon()); } if (user.getLocation().getLat() != null) { location.append("lat", user.getLocation().getLat()); } } return root; } public static File writeToFile(List<User> users) { File f = File.createTempFile("phonebook", ".parquet"); f.deleteOnExit(); if (!f.delete()) { throw new IOException("couldn't delete tmp file" + f); } writeToFile(f, users); return f; } public static void writeToFile(File f, List<User> users) { Configuration conf = new Configuration(); GroupWriteSupport.setSchema(schema, conf); ParquetWriter<Group> writer = new ParquetWriter<Group>(new Path(f.getAbsolutePath()), conf, new GroupWriteSupport()); foreach (User u in users) { writer.write(groupFromUser(u)); } writer.close(); } public static List<Group> readFile(File f, Filter filter) { Configuration conf = new Configuration(); GroupWriteSupport.setSchema(schema, conf); ParquetReader<Group> reader = ParquetReader<Group>.builder(new GroupReadSupport(), new Path(f.getAbsolutePath())) .withConf(conf) .withFilter(filter) .build(); Group current; List<Group> users = new List<Group>(); current = reader.read(); while (current != null) { users.Add(current); current = reader.read(); } return users; } public static void Main(string[] args) { File f = new File(args[0]); writeToFile(f, TestRecordLevelFilters.makeUsers()); } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Linq; using System.Threading; using Microsoft.CodeAnalysis.CodeGeneration; using Microsoft.CodeAnalysis.Editing; using Microsoft.CodeAnalysis.ImplementType; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Shared.Utilities; namespace Microsoft.CodeAnalysis.ImplementInterface { internal abstract partial class AbstractImplementInterfaceService { internal partial class ImplementInterfaceCodeAction { private ISymbol GenerateProperty( Compilation compilation, IPropertySymbol property, Accessibility accessibility, DeclarationModifiers modifiers, bool generateAbstractly, bool useExplicitInterfaceSymbol, string memberName, ImplementTypePropertyGenerationBehavior propertyGenerationBehavior, CancellationToken cancellationToken) { var factory = this.Document.GetLanguageService<SyntaxGenerator>(); var attributesToRemove = AttributesToRemove(compilation); var getAccessor = GenerateGetAccessor( compilation, property, accessibility, generateAbstractly, useExplicitInterfaceSymbol, propertyGenerationBehavior, attributesToRemove, cancellationToken); var setAccessor = GenerateSetAccessor( compilation, property, accessibility, generateAbstractly, useExplicitInterfaceSymbol, propertyGenerationBehavior, attributesToRemove, cancellationToken); var syntaxFacts = Document.GetLanguageService<ISyntaxFactsService>(); var parameterNames = NameGenerator.EnsureUniqueness( property.Parameters.Select(p => p.Name).ToList(), isCaseSensitive: syntaxFacts.IsCaseSensitive); var updatedProperty = property.RenameParameters(parameterNames); updatedProperty = updatedProperty.RemoveAttributeFromParameters(attributesToRemove); // TODO(cyrusn): Delegate through throughMember if it's non-null. return CodeGenerationSymbolFactory.CreatePropertySymbol( updatedProperty, accessibility: accessibility, modifiers: modifiers, explicitInterfaceSymbol: useExplicitInterfaceSymbol ? property : null, name: memberName, getMethod: getAccessor, setMethod: setAccessor); } /// <summary> /// Lists compiler attributes that we want to remove. /// The TupleElementNames attribute is compiler generated (it is used for naming tuple element names). /// We never want to place it in source code. /// Same thing for the Dynamic attribute. /// </summary> private INamedTypeSymbol[] AttributesToRemove(Compilation compilation) { return new[] { compilation.ComAliasNameAttributeType(), compilation.TupleElementNamesAttributeType(), compilation.DynamicAttributeType() }; } private IMethodSymbol GenerateSetAccessor( Compilation compilation, IPropertySymbol property, Accessibility accessibility, bool generateAbstractly, bool useExplicitInterfaceSymbol, ImplementTypePropertyGenerationBehavior propertyGenerationBehavior, INamedTypeSymbol[] attributesToRemove, CancellationToken cancellationToken) { if (property.SetMethod == null) { return null; } if (property.GetMethod == null) { // Can't have an auto-prop with just a setter. propertyGenerationBehavior = ImplementTypePropertyGenerationBehavior.PreferThrowingProperties; } var setMethod = property.SetMethod.RemoveInaccessibleAttributesAndAttributesOfTypes( this.State.ClassOrStructType, attributesToRemove); return CodeGenerationSymbolFactory.CreateAccessorSymbol( setMethod, attributes: default(ImmutableArray<AttributeData>), accessibility: accessibility, explicitInterfaceSymbol: useExplicitInterfaceSymbol ? property.SetMethod : null, statements: GetSetAccessorStatements( compilation, property, generateAbstractly, propertyGenerationBehavior, cancellationToken)); } private IMethodSymbol GenerateGetAccessor( Compilation compilation, IPropertySymbol property, Accessibility accessibility, bool generateAbstractly, bool useExplicitInterfaceSymbol, ImplementTypePropertyGenerationBehavior propertyGenerationBehavior, INamedTypeSymbol[] attributesToRemove, CancellationToken cancellationToken) { if (property.GetMethod == null) { return null; } var getMethod = property.GetMethod.RemoveInaccessibleAttributesAndAttributesOfTypes( this.State.ClassOrStructType, attributesToRemove); return CodeGenerationSymbolFactory.CreateAccessorSymbol( getMethod, attributes: default(ImmutableArray<AttributeData>), accessibility: accessibility, explicitInterfaceSymbol: useExplicitInterfaceSymbol ? property.GetMethod : null, statements: GetGetAccessorStatements( compilation, property, generateAbstractly, propertyGenerationBehavior, cancellationToken)); } private ImmutableArray<SyntaxNode> GetSetAccessorStatements( Compilation compilation, IPropertySymbol property, bool generateAbstractly, ImplementTypePropertyGenerationBehavior propertyGenerationBehavior, CancellationToken cancellationToken) { if (generateAbstractly) { return default(ImmutableArray<SyntaxNode>); } var factory = this.Document.GetLanguageService<SyntaxGenerator>(); if (ThroughMember != null) { var throughExpression = CreateThroughExpression(factory); SyntaxNode expression; if (property.IsIndexer) { expression = throughExpression; } else { expression = factory.MemberAccessExpression( throughExpression, factory.IdentifierName(property.Name)); } if (property.Parameters.Length > 0) { var arguments = factory.CreateArguments(property.Parameters.As<IParameterSymbol>()); expression = factory.ElementAccessExpression(expression, arguments); } expression = factory.AssignmentStatement(expression, factory.IdentifierName("value")); return ImmutableArray.Create(factory.ExpressionStatement(expression)); } return propertyGenerationBehavior == ImplementTypePropertyGenerationBehavior.PreferAutoProperties ? default(ImmutableArray<SyntaxNode>) : factory.CreateThrowNotImplementedStatementBlock(compilation); } private ImmutableArray<SyntaxNode> GetGetAccessorStatements( Compilation compilation, IPropertySymbol property, bool generateAbstractly, ImplementTypePropertyGenerationBehavior propertyGenerationBehavior, CancellationToken cancellationToken) { if (generateAbstractly) { return default(ImmutableArray<SyntaxNode>); } var factory = this.Document.GetLanguageService<SyntaxGenerator>(); if (ThroughMember != null) { var throughExpression = CreateThroughExpression(factory); SyntaxNode expression; if (property.IsIndexer) { expression = throughExpression; } else { expression = factory.MemberAccessExpression( throughExpression, factory.IdentifierName(property.Name)); } if (property.Parameters.Length > 0) { var arguments = factory.CreateArguments(property.Parameters.As<IParameterSymbol>()); expression = factory.ElementAccessExpression(expression, arguments); } return ImmutableArray.Create(factory.ReturnStatement(expression)); } return propertyGenerationBehavior == ImplementTypePropertyGenerationBehavior.PreferAutoProperties ? default(ImmutableArray<SyntaxNode>) : factory.CreateThrowNotImplementedStatementBlock(compilation); } } } }
using System; using System.Collections.Generic; using System.Linq; namespace Avalonia.Styling { /// <summary> /// Extension methods for <see cref="Selector"/>. /// </summary> public static class Selectors { /// <summary> /// Returns a selector which matches a previous selector's child. /// </summary> /// <param name="previous">The previous selector.</param> /// <returns>The selector.</returns> public static Selector Child(this Selector previous) { return new ChildSelector(previous); } /// <summary> /// Returns a selector which matches a control's style class. /// </summary> /// <param name="previous">The previous selector.</param> /// <param name="name">The name of the style class.</param> /// <returns>The selector.</returns> public static Selector Class(this Selector previous, string name) { Contract.Requires<ArgumentNullException>(name != null); Contract.Requires<ArgumentException>(!string.IsNullOrWhiteSpace(name)); var tac = previous as TypeNameAndClassSelector; if (tac != null) { tac.Classes.Add(name); return tac; } else { return TypeNameAndClassSelector.ForClass(previous, name); } } /// <summary> /// Returns a selector which matches a descendant of a previous selector. /// </summary> /// <param name="previous">The previous selector.</param> /// <returns>The selector.</returns> public static Selector Descendant(this Selector previous) { return new DescendantSelector(previous); } /// <summary> /// Returns a selector which matches a type or a derived type. /// </summary> /// <param name="previous">The previous selector.</param> /// <param name="type">The type.</param> /// <returns>The selector.</returns> public static Selector Is(this Selector previous, Type type) { Contract.Requires<ArgumentNullException>(type != null); return TypeNameAndClassSelector.Is(previous, type); } /// <summary> /// Returns a selector which matches a type or a derived type. /// </summary> /// <typeparam name="T">The type.</typeparam> /// <param name="previous">The previous selector.</param> /// <returns>The selector.</returns> public static Selector Is<T>(this Selector previous) where T : IStyleable { return previous.Is(typeof(T)); } /// <summary> /// Returns a selector which matches a control's Name. /// </summary> /// <param name="previous">The previous selector.</param> /// <param name="name">The name.</param> /// <returns>The selector.</returns> public static Selector Name(this Selector previous, string name) { Contract.Requires<ArgumentNullException>(name != null); Contract.Requires<ArgumentException>(!string.IsNullOrWhiteSpace(name)); var tac = previous as TypeNameAndClassSelector; if (tac != null) { tac.Name = name; return tac; } else { return TypeNameAndClassSelector.ForName(previous, name); } } /// <summary> /// Returns a selector which inverts the results of selector argument. /// </summary> /// <param name="previous">The previous selector.</param> /// <param name="argument">The selector to be not-ed.</param> /// <returns>The selector.</returns> public static Selector Not(this Selector previous, Func<Selector, Selector> argument) { return new NotSelector(previous, argument(null)); } /// <summary> /// Returns a selector which inverts the results of selector argument. /// </summary> /// <param name="previous">The previous selector.</param> /// <param name="argument">The selector to be not-ed.</param> /// <returns>The selector.</returns> public static Selector Not(this Selector previous, Selector argument) { return new NotSelector(previous, argument); } /// <inheritdoc cref="NthChildSelector"/> /// <inheritdoc cref="NthChildSelector(Selector?, int, int)"/> /// <returns>The selector.</returns> public static Selector NthChild(this Selector previous, int step, int offset) { return new NthChildSelector(previous, step, offset); } /// <inheritdoc cref="NthLastChildSelector"/> /// <inheritdoc cref="NthLastChildSelector(Selector?, int, int)"/> /// <returns>The selector.</returns> public static Selector NthLastChild(this Selector previous, int step, int offset) { return new NthLastChildSelector(previous, step, offset); } /// <summary> /// Returns a selector which matches a type. /// </summary> /// <param name="previous">The previous selector.</param> /// <param name="type">The type.</param> /// <returns>The selector.</returns> public static Selector OfType(this Selector previous, Type type) { Contract.Requires<ArgumentNullException>(type != null); return TypeNameAndClassSelector.OfType(previous, type); } /// <summary> /// Returns a selector which matches a type. /// </summary> /// <typeparam name="T">The type.</typeparam> /// <param name="previous">The previous selector.</param> /// <returns>The selector.</returns> public static Selector OfType<T>(this Selector previous) where T : IStyleable { return previous.OfType(typeof(T)); } /// <summary> /// Returns a selector which ORs selectors. /// </summary> /// <param name="selectors">The selectors to be OR'd.</param> /// <returns>The selector.</returns> public static Selector Or(params Selector[] selectors) { return new OrSelector(selectors); } /// <summary> /// Returns a selector which ORs selectors. /// </summary> /// <param name="selectors">The selectors to be OR'd.</param> /// <returns>The selector.</returns> public static Selector Or(IReadOnlyList<Selector> selectors) { return new OrSelector(selectors); } /// <summary> /// Returns a selector which matches a control with the specified property value. /// </summary> /// <typeparam name="T">The property type.</typeparam> /// <param name="previous">The previous selector.</param> /// <param name="property">The property.</param> /// <param name="value">The property value.</param> /// <returns>The selector.</returns> public static Selector PropertyEquals<T>(this Selector previous, AvaloniaProperty<T> property, object value) { Contract.Requires<ArgumentNullException>(property != null); return new PropertyEqualsSelector(previous, property, value); } /// <summary> /// Returns a selector which matches a control with the specified property value. /// </summary> /// <param name="previous">The previous selector.</param> /// <param name="property">The property.</param> /// <param name="value">The property value.</param> /// <returns>The selector.</returns> public static Selector PropertyEquals(this Selector previous, AvaloniaProperty property, object value) { Contract.Requires<ArgumentNullException>(property != null); return new PropertyEqualsSelector(previous, property, value); } /// <summary> /// Returns a selector which enters a lookless control's template. /// </summary> /// <param name="previous">The previous selector.</param> /// <returns>The selector.</returns> public static Selector Template(this Selector previous) { return new TemplateSelector(previous); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Threading; using System.Collections; using System.Collections.Generic; using Microsoft.Build.Framework; using Microsoft.Build.BuildEngine.Shared; namespace Microsoft.Build.BuildEngine { /// <summary> /// This class provides a multiple-writer, single-reader queue. This queue can be written to /// by multiple threads at a time, but it is designed to be only read by a single thread. /// The way is works is as follows: we have two queues, one for reading from, and one for /// writing to. The writing queue is protected by a mutex so that multiple threads can write to /// it. When a reading thread wants to read all the queued items, we swap the writing queue /// for another (empty) one. The writing queue then becomes the reading queue, and the empty /// queue becomes the new writing queue. This allows the reader to safely read from the swapped /// out queue without regard to new items being written to the other queue. /// </summary> /// <typeparam name="T"></typeparam> internal sealed class DualQueue<T> { #region Constructors /// <summary> /// Default constructor. /// </summary> internal DualQueue() { this.queueReadyEvent = new ManualResetEvent(false /* event is reset initially */); this.queueEmptyEvent = null; this.queueLock = new object(); this.backingQueueA = new Queue<T>(); this.backingQueueB = new Queue<T>(); this.queue = this.backingQueueA; } #endregion #region Properties /// <summary> /// Event indicating that there are items in the queue /// </summary> internal WaitHandle QueueReadyEvent { get { return this.queueReadyEvent; } } /// <summary> /// Event indicating that the queue is empty /// </summary> internal WaitHandle QueueEmptyEvent { get { // Lazily allocate the queue empty event lock (queueLock) { if (this.queueEmptyEvent == null) { this.queueEmptyEvent = new ManualResetEvent(false /* event is reset initially */); } } return this.queueEmptyEvent; } } /// <summary> /// Primairly used for testing to get the count of items posted to the queue /// </summary> /// <returns></returns> internal int Count { get { // Sum both as the number of items is the sum of items in both queues Queue<T> readingQueue = backingQueueB; // figure out the current reading queue if (queue == backingQueueB) { readingQueue = backingQueueA; } return readingQueue.Count + writingQueueCount; } } /// <summary> /// The count of items in the writing queue. Used to decide if the queue is backing up /// </summary> /// <returns></returns> internal int WritingQueueCount { get { return writingQueueCount; } } #endregion #region Methods /// <summary> /// Adds the given item to the queue. /// </summary> /// <param name="item"></param> internal void Enqueue(T item) { lock (queueLock) { // queue the event queue.Enqueue(item); writingQueueCount++; // if the queue transitions from empty to non-empty reset the queue empty event and raise queue ready event if ( writingQueueCount == 1) { // raise the event saying queue contains data queueReadyEvent.Set(); // reset queue empty if (queueEmptyEvent != null) { queueEmptyEvent.Reset(); } } } } /// <summary> /// Adds the given items to the queue. /// </summary> /// <param name="items"></param> internal void EnqueueArray(T[] items) { lock (queueLock) { // queue the event foreach (T item in items) { queue.Enqueue(item); } writingQueueCount += items.Length; // if the queue transitions from empty to non-empty reset the queue empty event if (writingQueueCount == items.Length) { // raise the event saying queue contains data queueReadyEvent.Set(); // reset queue empty if (queueEmptyEvent != null) { queueEmptyEvent.Reset(); } } } } /// <summary> /// Clear the contents of the queue /// </summary> internal void Clear() { lock (queueLock) { backingQueueA.Clear(); backingQueueB.Clear(); writingQueueCount = 0; // reset queue ready event because the queue is now empty queueReadyEvent.Reset(); // raise queue empty event because the queue is now empty if (queueEmptyEvent != null) { queueEmptyEvent.Set(); } } } /// <summary> /// Gets an item off the queue. /// </summary> /// <returns>The top item off the queue, or null if queue is empty.</returns> internal T Dequeue() { return GetTopItem(true); } /// <summary> /// Get a pointer to the top item without dequeueing it /// </summary> /// <returns>The top item off the queue, or null if queue is empty.</returns> internal T Peek() { return GetTopItem(false); } /// <summary> /// Finds the top item in the queue. If passed in argument is true the top value is dequeued /// </summary> /// <returns>The top item off the queue, or null if queue is empty.</returns> private T GetTopItem(bool dequeue) { Queue<T> readingQueue = GetReadingQueue(); T item = default(T); if (readingQueue.Count > 0) { item = dequeue ? readingQueue.Dequeue() : readingQueue.Peek(); } // if the reading queue is now empty if (readingQueue.Count == 0) { // wait until the current writer (if any) is done with the posting queue lock (queueLock) { // confirm both queues are now empty -- this check is important because // a writer may have added to the queue while we were waiting for a lock if ((backingQueueA.Count == 0) && (backingQueueB.Count == 0)) { // signal there are no more items to read queueReadyEvent.Reset(); if (queueEmptyEvent != null) { queueEmptyEvent.Set(); } } } } return item; } /// <summary> /// Returns one of the two behind-the-scenes queues that is not being /// used for posting into. /// </summary> /// <returns>The queue to read from.</returns> private Queue<T> GetReadingQueue() { Queue<T> readingQueue = backingQueueB; // figure out the current reading queue if (queue == backingQueueB) { readingQueue = backingQueueA; } // if the current reading queue is non-empty, return it; otherwise, if // the current posting queue is non-empty, swap it for the reading queue // and return it instead; if both are empty, just return the current reading // queue -- this logic allows us to lock only when strictly necessary if (readingQueue.Count == 0) { lock (queueLock) { if (queue.Count > 0) { Queue<T> postingQueue = queue; queue = readingQueue; readingQueue = postingQueue; writingQueueCount = 0; } } } return readingQueue; } /// <summary> /// Primairly used for unit tests to verify a item is in one of the internal queues /// </summary> /// <param name="item">Items to check for in the two internal queues</param> /// <returns></returns> internal bool Contains(T item) { // The dual queue in general contains an item if the item exists // in one or even both of the backing queues return backingQueueA.Contains(item) || backingQueueB.Contains(item); } #endregion #region Data /// <summary> /// This event is set when the queue contains items to read. /// </summary> private ManualResetEvent queueReadyEvent; /// <summary> /// This event is set when the queue is empty /// </summary> private ManualResetEvent queueEmptyEvent; /// <summary> /// This object protects the posting queue. /// </summary> private object queueLock; /// <summary> /// This queue reference serves as the "posting queue". This queue reference /// points to one of the two queues that are swapped behind the scenes. /// </summary> private Queue<T> queue; /// <summary> /// One of the two behind-the-scenes queues that are swapped. /// </summary> private Queue<T> backingQueueA; /// <summary> /// One of the two behind-the-scenes queues that are swapped. /// </summary> private Queue<T> backingQueueB; /// <summary> /// Count of the current writer queue - we only own the reader queue in Count so we have to keep /// the count for the writer queue separately. /// </summary> private int writingQueueCount; #endregion } }
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Collections.Generic; using System.IO; using System.Threading; using System.Threading.Tasks; using osu.Framework.Allocation; using osu.Framework.Audio; using osu.Framework.Audio.Sample; using osu.Framework.Bindables; using osu.Framework.Graphics.OpenGL.Textures; using osu.Framework.Graphics.Shaders; using osu.Framework.Graphics.Textures; using osu.Framework.IO.Stores; using osu.Framework.Platform; using osu.Game.Rulesets.Configuration; namespace osu.Game.Rulesets.UI { public class DrawableRulesetDependencies : DependencyContainer, IDisposable { /// <summary> /// The texture store to be used for the ruleset. /// </summary> public TextureStore TextureStore { get; } /// <summary> /// The sample store to be used for the ruleset. /// </summary> /// <remarks> /// This is the local sample store pointing to the ruleset sample resources, /// the cached sample store (<see cref="FallbackSampleStore"/>) retrieves from /// this store and falls back to the parent store if this store doesn't have the requested sample. /// </remarks> public ISampleStore SampleStore { get; } /// <summary> /// The shader manager to be used for the ruleset. /// </summary> public ShaderManager ShaderManager { get; } /// <summary> /// The ruleset config manager. /// </summary> public IRulesetConfigManager RulesetConfigManager { get; private set; } public DrawableRulesetDependencies(Ruleset ruleset, IReadOnlyDependencyContainer parent) : base(parent) { var resources = ruleset.CreateResourceStore(); if (resources != null) { TextureStore = new TextureStore(parent.Get<GameHost>().CreateTextureLoaderStore(new NamespacedResourceStore<byte[]>(resources, @"Textures"))); CacheAs(TextureStore = new FallbackTextureStore(TextureStore, parent.Get<TextureStore>())); SampleStore = parent.Get<AudioManager>().GetSampleStore(new NamespacedResourceStore<byte[]>(resources, @"Samples")); SampleStore.PlaybackConcurrency = OsuGameBase.SAMPLE_CONCURRENCY; CacheAs(SampleStore = new FallbackSampleStore(SampleStore, parent.Get<ISampleStore>())); ShaderManager = new ShaderManager(new NamespacedResourceStore<byte[]>(resources, @"Shaders")); CacheAs(ShaderManager = new FallbackShaderManager(ShaderManager, parent.Get<ShaderManager>())); } RulesetConfigManager = parent.Get<IRulesetConfigCache>().GetConfigFor(ruleset); if (RulesetConfigManager != null) Cache(RulesetConfigManager); } #region Disposal ~DrawableRulesetDependencies() { // required to potentially clean up sample store from audio hierarchy. Dispose(false); } public void Dispose() { Dispose(true); GC.SuppressFinalize(this); } private bool isDisposed; protected void Dispose(bool disposing) { if (isDisposed) return; isDisposed = true; SampleStore?.Dispose(); TextureStore?.Dispose(); ShaderManager?.Dispose(); RulesetConfigManager = null; } #endregion /// <summary> /// A sample store which adds a fallback source and prevents disposal of the fallback source. /// </summary> private class FallbackSampleStore : ISampleStore { private readonly ISampleStore primary; private readonly ISampleStore fallback; public FallbackSampleStore(ISampleStore primary, ISampleStore fallback) { this.primary = primary; this.fallback = fallback; } public Sample Get(string name) => primary.Get(name) ?? fallback.Get(name); public Task<Sample> GetAsync(string name, CancellationToken cancellationToken = default) => primary.GetAsync(name, cancellationToken) ?? fallback.GetAsync(name, cancellationToken); public Stream GetStream(string name) => primary.GetStream(name) ?? fallback.GetStream(name); public IEnumerable<string> GetAvailableResources() => throw new NotSupportedException(); public void AddAdjustment(AdjustableProperty type, IBindable<double> adjustBindable) => throw new NotSupportedException(); public void RemoveAdjustment(AdjustableProperty type, IBindable<double> adjustBindable) => throw new NotSupportedException(); public void RemoveAllAdjustments(AdjustableProperty type) => throw new NotSupportedException(); public void BindAdjustments(IAggregateAudioAdjustment component) => throw new NotImplementedException(); public void UnbindAdjustments(IAggregateAudioAdjustment component) => throw new NotImplementedException(); public BindableNumber<double> Volume => throw new NotSupportedException(); public BindableNumber<double> Balance => throw new NotSupportedException(); public BindableNumber<double> Frequency => throw new NotSupportedException(); public BindableNumber<double> Tempo => throw new NotSupportedException(); public IBindable<double> AggregateVolume => throw new NotSupportedException(); public IBindable<double> AggregateBalance => throw new NotSupportedException(); public IBindable<double> AggregateFrequency => throw new NotSupportedException(); public IBindable<double> AggregateTempo => throw new NotSupportedException(); public int PlaybackConcurrency { get => throw new NotSupportedException(); set => throw new NotSupportedException(); } public void Dispose() { primary?.Dispose(); } } /// <summary> /// A texture store which adds a fallback source and prevents disposal of the fallback source. /// </summary> private class FallbackTextureStore : TextureStore { private readonly TextureStore primary; private readonly TextureStore fallback; public FallbackTextureStore(TextureStore primary, TextureStore fallback) { this.primary = primary; this.fallback = fallback; } public override Texture Get(string name, WrapMode wrapModeS, WrapMode wrapModeT) => primary.Get(name, wrapModeS, wrapModeT) ?? fallback.Get(name, wrapModeS, wrapModeT); protected override void Dispose(bool disposing) { base.Dispose(disposing); primary?.Dispose(); } } private class FallbackShaderManager : ShaderManager { private readonly ShaderManager primary; private readonly ShaderManager fallback; public FallbackShaderManager(ShaderManager primary, ShaderManager fallback) : base(new ResourceStore<byte[]>()) { this.primary = primary; this.fallback = fallback; } public override byte[] LoadRaw(string name) => primary.LoadRaw(name) ?? fallback.LoadRaw(name); protected override void Dispose(bool disposing) { base.Dispose(disposing); primary?.Dispose(); } } } }
using System; using System.Runtime.InteropServices; using System.Linq; namespace SteamNative { internal unsafe class SteamMatchmaking : IDisposable { // // Holds a platform specific implentation // internal Platform.Interface platform; internal Facepunch.Steamworks.BaseSteamworks steamworks; // // Constructor decides which implementation to use based on current platform // internal SteamMatchmaking( Facepunch.Steamworks.BaseSteamworks steamworks, IntPtr pointer ) { this.steamworks = steamworks; if ( Platform.IsWindows64 ) platform = new Platform.Win64( pointer ); else if ( Platform.IsWindows32 ) platform = new Platform.Win32( pointer ); else if ( Platform.IsLinux32 ) platform = new Platform.Linux32( pointer ); else if ( Platform.IsLinux64 ) platform = new Platform.Linux64( pointer ); else if ( Platform.IsOsx ) platform = new Platform.Mac( pointer ); } // // Class is invalid if we don't have a valid implementation // public bool IsValid{ get{ return platform != null && platform.IsValid; } } // // When shutting down clear all the internals to avoid accidental use // public virtual void Dispose() { if ( platform != null ) { platform.Dispose(); platform = null; } } // int public int AddFavoriteGame( AppId_t nAppID /*AppId_t*/, uint nIP /*uint32*/, ushort nConnPort /*uint16*/, ushort nQueryPort /*uint16*/, uint unFlags /*uint32*/, uint rTime32LastPlayedOnServer /*uint32*/ ) { return platform.ISteamMatchmaking_AddFavoriteGame( nAppID.Value, nIP, nConnPort, nQueryPort, unFlags, rTime32LastPlayedOnServer ); } // void public void AddRequestLobbyListCompatibleMembersFilter( CSteamID steamIDLobby /*class CSteamID*/ ) { platform.ISteamMatchmaking_AddRequestLobbyListCompatibleMembersFilter( steamIDLobby.Value ); } // void public void AddRequestLobbyListDistanceFilter( LobbyDistanceFilter eLobbyDistanceFilter /*ELobbyDistanceFilter*/ ) { platform.ISteamMatchmaking_AddRequestLobbyListDistanceFilter( eLobbyDistanceFilter ); } // void public void AddRequestLobbyListFilterSlotsAvailable( int nSlotsAvailable /*int*/ ) { platform.ISteamMatchmaking_AddRequestLobbyListFilterSlotsAvailable( nSlotsAvailable ); } // void public void AddRequestLobbyListNearValueFilter( string pchKeyToMatch /*const char **/, int nValueToBeCloseTo /*int*/ ) { platform.ISteamMatchmaking_AddRequestLobbyListNearValueFilter( pchKeyToMatch, nValueToBeCloseTo ); } // void public void AddRequestLobbyListNumericalFilter( string pchKeyToMatch /*const char **/, int nValueToMatch /*int*/, LobbyComparison eComparisonType /*ELobbyComparison*/ ) { platform.ISteamMatchmaking_AddRequestLobbyListNumericalFilter( pchKeyToMatch, nValueToMatch, eComparisonType ); } // void public void AddRequestLobbyListResultCountFilter( int cMaxResults /*int*/ ) { platform.ISteamMatchmaking_AddRequestLobbyListResultCountFilter( cMaxResults ); } // void public void AddRequestLobbyListStringFilter( string pchKeyToMatch /*const char **/, string pchValueToMatch /*const char **/, LobbyComparison eComparisonType /*ELobbyComparison*/ ) { platform.ISteamMatchmaking_AddRequestLobbyListStringFilter( pchKeyToMatch, pchValueToMatch, eComparisonType ); } // SteamAPICall_t public CallbackHandle CreateLobby( LobbyType eLobbyType /*ELobbyType*/, int cMaxMembers /*int*/, Action<LobbyCreated_t, bool> CallbackFunction = null /*Action<LobbyCreated_t, bool>*/ ) { SteamAPICall_t callback = 0; callback = platform.ISteamMatchmaking_CreateLobby( eLobbyType, cMaxMembers ); if ( CallbackFunction == null ) return null; return LobbyCreated_t.CallResult( steamworks, callback, CallbackFunction ); } // bool public bool DeleteLobbyData( CSteamID steamIDLobby /*class CSteamID*/, string pchKey /*const char **/ ) { return platform.ISteamMatchmaking_DeleteLobbyData( steamIDLobby.Value, pchKey ); } // bool public bool GetFavoriteGame( int iGame /*int*/, ref AppId_t pnAppID /*AppId_t **/, out uint pnIP /*uint32 **/, out ushort pnConnPort /*uint16 **/, out ushort pnQueryPort /*uint16 **/, out uint punFlags /*uint32 **/, out uint pRTime32LastPlayedOnServer /*uint32 **/ ) { return platform.ISteamMatchmaking_GetFavoriteGame( iGame, ref pnAppID.Value, out pnIP, out pnConnPort, out pnQueryPort, out punFlags, out pRTime32LastPlayedOnServer ); } // int public int GetFavoriteGameCount() { return platform.ISteamMatchmaking_GetFavoriteGameCount(); } // ulong public ulong GetLobbyByIndex( int iLobby /*int*/ ) { return platform.ISteamMatchmaking_GetLobbyByIndex( iLobby ); } // int public int GetLobbyChatEntry( CSteamID steamIDLobby /*class CSteamID*/, int iChatID /*int*/, out CSteamID pSteamIDUser /*class CSteamID **/, IntPtr pvData /*void **/, int cubData /*int*/, out ChatEntryType peChatEntryType /*EChatEntryType **/ ) { return platform.ISteamMatchmaking_GetLobbyChatEntry( steamIDLobby.Value, iChatID, out pSteamIDUser.Value, (IntPtr) pvData, cubData, out peChatEntryType ); } // string // with: Detect_StringReturn public string GetLobbyData( CSteamID steamIDLobby /*class CSteamID*/, string pchKey /*const char **/ ) { IntPtr string_pointer; string_pointer = platform.ISteamMatchmaking_GetLobbyData( steamIDLobby.Value, pchKey ); return Marshal.PtrToStringAnsi( string_pointer ); } // bool // with: Detect_StringFetch False // with: Detect_StringFetch False public bool GetLobbyDataByIndex( CSteamID steamIDLobby /*class CSteamID*/, int iLobbyData /*int*/, out string pchKey /*char **/, out string pchValue /*char **/ ) { bool bSuccess = default( bool ); pchKey = string.Empty; System.Text.StringBuilder pchKey_sb = Helpers.TakeStringBuilder(); int cchKeyBufferSize = 4096; pchValue = string.Empty; System.Text.StringBuilder pchValue_sb = Helpers.TakeStringBuilder(); int cchValueBufferSize = 4096; bSuccess = platform.ISteamMatchmaking_GetLobbyDataByIndex( steamIDLobby.Value, iLobbyData, pchKey_sb, cchKeyBufferSize, pchValue_sb, cchValueBufferSize ); if ( !bSuccess ) return bSuccess; pchValue = pchValue_sb.ToString(); if ( !bSuccess ) return bSuccess; pchKey = pchKey_sb.ToString(); return bSuccess; } // int public int GetLobbyDataCount( CSteamID steamIDLobby /*class CSteamID*/ ) { return platform.ISteamMatchmaking_GetLobbyDataCount( steamIDLobby.Value ); } // bool public bool GetLobbyGameServer( CSteamID steamIDLobby /*class CSteamID*/, out uint punGameServerIP /*uint32 **/, out ushort punGameServerPort /*uint16 **/, out CSteamID psteamIDGameServer /*class CSteamID **/ ) { return platform.ISteamMatchmaking_GetLobbyGameServer( steamIDLobby.Value, out punGameServerIP, out punGameServerPort, out psteamIDGameServer.Value ); } // ulong public ulong GetLobbyMemberByIndex( CSteamID steamIDLobby /*class CSteamID*/, int iMember /*int*/ ) { return platform.ISteamMatchmaking_GetLobbyMemberByIndex( steamIDLobby.Value, iMember ); } // string // with: Detect_StringReturn public string GetLobbyMemberData( CSteamID steamIDLobby /*class CSteamID*/, CSteamID steamIDUser /*class CSteamID*/, string pchKey /*const char **/ ) { IntPtr string_pointer; string_pointer = platform.ISteamMatchmaking_GetLobbyMemberData( steamIDLobby.Value, steamIDUser.Value, pchKey ); return Marshal.PtrToStringAnsi( string_pointer ); } // int public int GetLobbyMemberLimit( CSteamID steamIDLobby /*class CSteamID*/ ) { return platform.ISteamMatchmaking_GetLobbyMemberLimit( steamIDLobby.Value ); } // ulong public ulong GetLobbyOwner( CSteamID steamIDLobby /*class CSteamID*/ ) { return platform.ISteamMatchmaking_GetLobbyOwner( steamIDLobby.Value ); } // int public int GetNumLobbyMembers( CSteamID steamIDLobby /*class CSteamID*/ ) { return platform.ISteamMatchmaking_GetNumLobbyMembers( steamIDLobby.Value ); } // bool public bool InviteUserToLobby( CSteamID steamIDLobby /*class CSteamID*/, CSteamID steamIDInvitee /*class CSteamID*/ ) { return platform.ISteamMatchmaking_InviteUserToLobby( steamIDLobby.Value, steamIDInvitee.Value ); } // SteamAPICall_t public CallbackHandle JoinLobby( CSteamID steamIDLobby /*class CSteamID*/, Action<LobbyEnter_t, bool> CallbackFunction = null /*Action<LobbyEnter_t, bool>*/ ) { SteamAPICall_t callback = 0; callback = platform.ISteamMatchmaking_JoinLobby( steamIDLobby.Value ); if ( CallbackFunction == null ) return null; return LobbyEnter_t.CallResult( steamworks, callback, CallbackFunction ); } // void public void LeaveLobby( CSteamID steamIDLobby /*class CSteamID*/ ) { platform.ISteamMatchmaking_LeaveLobby( steamIDLobby.Value ); } // bool public bool RemoveFavoriteGame( AppId_t nAppID /*AppId_t*/, uint nIP /*uint32*/, ushort nConnPort /*uint16*/, ushort nQueryPort /*uint16*/, uint unFlags /*uint32*/ ) { return platform.ISteamMatchmaking_RemoveFavoriteGame( nAppID.Value, nIP, nConnPort, nQueryPort, unFlags ); } // bool public bool RequestLobbyData( CSteamID steamIDLobby /*class CSteamID*/ ) { return platform.ISteamMatchmaking_RequestLobbyData( steamIDLobby.Value ); } // SteamAPICall_t public CallbackHandle RequestLobbyList( Action<LobbyMatchList_t, bool> CallbackFunction = null /*Action<LobbyMatchList_t, bool>*/ ) { SteamAPICall_t callback = 0; callback = platform.ISteamMatchmaking_RequestLobbyList(); if ( CallbackFunction == null ) return null; return LobbyMatchList_t.CallResult( steamworks, callback, CallbackFunction ); } // bool public bool SendLobbyChatMsg( CSteamID steamIDLobby /*class CSteamID*/, IntPtr pvMsgBody /*const void **/, int cubMsgBody /*int*/ ) { return platform.ISteamMatchmaking_SendLobbyChatMsg( steamIDLobby.Value, (IntPtr) pvMsgBody, cubMsgBody ); } // bool public bool SetLinkedLobby( CSteamID steamIDLobby /*class CSteamID*/, CSteamID steamIDLobbyDependent /*class CSteamID*/ ) { return platform.ISteamMatchmaking_SetLinkedLobby( steamIDLobby.Value, steamIDLobbyDependent.Value ); } // bool public bool SetLobbyData( CSteamID steamIDLobby /*class CSteamID*/, string pchKey /*const char **/, string pchValue /*const char **/ ) { return platform.ISteamMatchmaking_SetLobbyData( steamIDLobby.Value, pchKey, pchValue ); } // void public void SetLobbyGameServer( CSteamID steamIDLobby /*class CSteamID*/, uint unGameServerIP /*uint32*/, ushort unGameServerPort /*uint16*/, CSteamID steamIDGameServer /*class CSteamID*/ ) { platform.ISteamMatchmaking_SetLobbyGameServer( steamIDLobby.Value, unGameServerIP, unGameServerPort, steamIDGameServer.Value ); } // bool public bool SetLobbyJoinable( CSteamID steamIDLobby /*class CSteamID*/, bool bLobbyJoinable /*bool*/ ) { return platform.ISteamMatchmaking_SetLobbyJoinable( steamIDLobby.Value, bLobbyJoinable ); } // void public void SetLobbyMemberData( CSteamID steamIDLobby /*class CSteamID*/, string pchKey /*const char **/, string pchValue /*const char **/ ) { platform.ISteamMatchmaking_SetLobbyMemberData( steamIDLobby.Value, pchKey, pchValue ); } // bool public bool SetLobbyMemberLimit( CSteamID steamIDLobby /*class CSteamID*/, int cMaxMembers /*int*/ ) { return platform.ISteamMatchmaking_SetLobbyMemberLimit( steamIDLobby.Value, cMaxMembers ); } // bool public bool SetLobbyOwner( CSteamID steamIDLobby /*class CSteamID*/, CSteamID steamIDNewOwner /*class CSteamID*/ ) { return platform.ISteamMatchmaking_SetLobbyOwner( steamIDLobby.Value, steamIDNewOwner.Value ); } // bool public bool SetLobbyType( CSteamID steamIDLobby /*class CSteamID*/, LobbyType eLobbyType /*ELobbyType*/ ) { return platform.ISteamMatchmaking_SetLobbyType( steamIDLobby.Value, eLobbyType ); } } }
using System; using System.Collections.Generic; namespace ClosedXML.Excel { public enum XLPivotTableTheme { PivotStyleDark1, PivotStyleDark10, PivotStyleDark11, PivotStyleDark12, PivotStyleDark13, PivotStyleDark14, PivotStyleDark15, PivotStyleDark16, PivotStyleDark17, PivotStyleDark18, PivotStyleDark19, PivotStyleDark2, PivotStyleDark20, PivotStyleDark21, PivotStyleDark22, PivotStyleDark23, PivotStyleDark24, PivotStyleDark25, PivotStyleDark26, PivotStyleDark27, PivotStyleDark28, PivotStyleDark3, PivotStyleDark4, PivotStyleDark5, PivotStyleDark6, PivotStyleDark7, PivotStyleDark8, PivotStyleDark9, PivotStyleLight1, PivotStyleLight10, PivotStyleLight11, PivotStyleLight12, PivotStyleLight13, PivotStyleLight14, PivotStyleLight15, PivotStyleLight16, PivotStyleLight17, PivotStyleLight18, PivotStyleLight19, PivotStyleLight2, PivotStyleLight20, PivotStyleLight21, PivotStyleLight22, PivotStyleLight23, PivotStyleLight24, PivotStyleLight25, PivotStyleLight26, PivotStyleLight27, PivotStyleLight28, PivotStyleLight3, PivotStyleLight4, PivotStyleLight5, PivotStyleLight6, PivotStyleLight7, PivotStyleLight8, PivotStyleLight9, PivotStyleMedium1, PivotStyleMedium10, PivotStyleMedium11, PivotStyleMedium12, PivotStyleMedium13, PivotStyleMedium14, PivotStyleMedium15, PivotStyleMedium16, PivotStyleMedium17, PivotStyleMedium18, PivotStyleMedium19, PivotStyleMedium2, PivotStyleMedium20, PivotStyleMedium21, PivotStyleMedium22, PivotStyleMedium23, PivotStyleMedium24, PivotStyleMedium25, PivotStyleMedium26, PivotStyleMedium27, PivotStyleMedium28, PivotStyleMedium3, PivotStyleMedium4, PivotStyleMedium5, PivotStyleMedium6, PivotStyleMedium7, PivotStyleMedium8, PivotStyleMedium9 } public enum XLPivotSortType { Default = 0, Ascending = 1, Descending = 2 } public enum XLPivotSubtotals { DoNotShow, AtTop, AtBottom } public enum XLFilterAreaOrder { DownThenOver, OverThenDown } public enum XLItemsToRetain { Automatic, None, Max } public enum XLPivotTableSourceType { Range, Table } public interface IXLPivotTable { XLPivotTableTheme Theme { get; set; } IXLPivotFields Fields { get; } IXLPivotFields ReportFilters { get; } IXLPivotFields ColumnLabels { get; } IXLPivotFields RowLabels { get; } IXLPivotValues Values { get; } String Name { get; set; } String Title { get; set; } String Description { get; set; } String ColumnHeaderCaption { get; set; } String RowHeaderCaption { get; set; } IXLCell TargetCell { get; set; } IXLRange SourceRange { get; set; } IXLTable SourceTable { get; set; } XLPivotTableSourceType SourceType { get; } IEnumerable<String> SourceRangeFieldsAvailable { get; } Boolean MergeAndCenterWithLabels { get; set; } // MergeItem Int32 RowLabelIndent { get; set; } // Indent XLFilterAreaOrder FilterAreaOrder { get; set; } // PageOverThenDown Int32 FilterFieldsPageWrap { get; set; } // PageWrap String ErrorValueReplacement { get; set; } // ErrorCaption String EmptyCellReplacement { get; set; } // MissingCaption Boolean AutofitColumns { get; set; } //UseAutoFormatting Boolean PreserveCellFormatting { get; set; } // PreserveFormatting Boolean ShowGrandTotalsRows { get; set; } // RowGrandTotals Boolean ShowGrandTotalsColumns { get; set; } // ColumnGrandTotals Boolean FilteredItemsInSubtotals { get; set; } // Subtotal filtered page items Boolean AllowMultipleFilters { get; set; } // MultipleFieldFilters Boolean UseCustomListsForSorting { get; set; } // CustomListSort Boolean ShowExpandCollapseButtons { get; set; } Boolean ShowContextualTooltips { get; set; } Boolean ShowPropertiesInTooltips { get; set; } Boolean DisplayCaptionsAndDropdowns { get; set; } Boolean ClassicPivotTableLayout { get; set; } Boolean ShowValuesRow { get; set; } Boolean ShowEmptyItemsOnRows { get; set; } Boolean ShowEmptyItemsOnColumns { get; set; } Boolean DisplayItemLabels { get; set; } Boolean SortFieldsAtoZ { get; set; } Boolean PrintExpandCollapsedButtons { get; set; } Boolean RepeatRowLabels { get; set; } Boolean PrintTitles { get; set; } Boolean SaveSourceData { get; set; } Boolean EnableShowDetails { get; set; } Boolean RefreshDataOnOpen { get; set; } XLItemsToRetain ItemsToRetainPerField { get; set; } Boolean EnableCellEditing { get; set; } IXLPivotTable SetName(String value); IXLPivotTable SetTitle(String value); IXLPivotTable SetDescription(String value); IXLPivotTable SetMergeAndCenterWithLabels(); IXLPivotTable SetMergeAndCenterWithLabels(Boolean value); IXLPivotTable SetRowLabelIndent(Int32 value); IXLPivotTable SetFilterAreaOrder(XLFilterAreaOrder value); IXLPivotTable SetFilterFieldsPageWrap(Int32 value); IXLPivotTable SetErrorValueReplacement(String value); IXLPivotTable SetEmptyCellReplacement(String value); IXLPivotTable SetAutofitColumns(); IXLPivotTable SetAutofitColumns(Boolean value); IXLPivotTable SetPreserveCellFormatting(); IXLPivotTable SetPreserveCellFormatting(Boolean value); IXLPivotTable SetShowGrandTotalsRows(); IXLPivotTable SetShowGrandTotalsRows(Boolean value); IXLPivotTable SetShowGrandTotalsColumns(); IXLPivotTable SetShowGrandTotalsColumns(Boolean value); IXLPivotTable SetFilteredItemsInSubtotals(); IXLPivotTable SetFilteredItemsInSubtotals(Boolean value); IXLPivotTable SetAllowMultipleFilters(); IXLPivotTable SetAllowMultipleFilters(Boolean value); IXLPivotTable SetUseCustomListsForSorting(); IXLPivotTable SetUseCustomListsForSorting(Boolean value); IXLPivotTable SetShowExpandCollapseButtons(); IXLPivotTable SetShowExpandCollapseButtons(Boolean value); IXLPivotTable SetShowContextualTooltips(); IXLPivotTable SetShowContextualTooltips(Boolean value); IXLPivotTable SetShowPropertiesInTooltips(); IXLPivotTable SetShowPropertiesInTooltips(Boolean value); IXLPivotTable SetDisplayCaptionsAndDropdowns(); IXLPivotTable SetDisplayCaptionsAndDropdowns(Boolean value); IXLPivotTable SetClassicPivotTableLayout(); IXLPivotTable SetClassicPivotTableLayout(Boolean value); IXLPivotTable SetShowValuesRow(); IXLPivotTable SetShowValuesRow(Boolean value); IXLPivotTable SetShowEmptyItemsOnRows(); IXLPivotTable SetShowEmptyItemsOnRows(Boolean value); IXLPivotTable SetShowEmptyItemsOnColumns(); IXLPivotTable SetShowEmptyItemsOnColumns(Boolean value); IXLPivotTable SetDisplayItemLabels(); IXLPivotTable SetDisplayItemLabels(Boolean value); IXLPivotTable SetSortFieldsAtoZ(); IXLPivotTable SetSortFieldsAtoZ(Boolean value); IXLPivotTable SetPrintExpandCollapsedButtons(); IXLPivotTable SetPrintExpandCollapsedButtons(Boolean value); IXLPivotTable SetRepeatRowLabels(); IXLPivotTable SetRepeatRowLabels(Boolean value); IXLPivotTable SetPrintTitles(); IXLPivotTable SetPrintTitles(Boolean value); IXLPivotTable SetSaveSourceData(); IXLPivotTable SetSaveSourceData(Boolean value); IXLPivotTable SetEnableShowDetails(); IXLPivotTable SetEnableShowDetails(Boolean value); IXLPivotTable SetRefreshDataOnOpen(); IXLPivotTable SetRefreshDataOnOpen(Boolean value); IXLPivotTable SetItemsToRetainPerField(XLItemsToRetain value); IXLPivotTable SetEnableCellEditing(); IXLPivotTable SetEnableCellEditing(Boolean value); IXLPivotTable SetColumnHeaderCaption(String value); IXLPivotTable SetRowHeaderCaption(String value); Boolean ShowRowHeaders { get; set; } Boolean ShowColumnHeaders { get; set; } Boolean ShowRowStripes { get; set; } Boolean ShowColumnStripes { get; set; } XLPivotSubtotals Subtotals { get; set; } XLPivotLayout Layout { set; } Boolean InsertBlankLines { set; } IXLPivotTable SetShowRowHeaders(); IXLPivotTable SetShowRowHeaders(Boolean value); IXLPivotTable SetShowColumnHeaders(); IXLPivotTable SetShowColumnHeaders(Boolean value); IXLPivotTable SetShowRowStripes(); IXLPivotTable SetShowRowStripes(Boolean value); IXLPivotTable SetShowColumnStripes(); IXLPivotTable SetShowColumnStripes(Boolean value); IXLPivotTable SetSubtotals(XLPivotSubtotals value); IXLPivotTable SetLayout(XLPivotLayout value); IXLPivotTable SetInsertBlankLines(); IXLPivotTable SetInsertBlankLines(Boolean value); IXLWorksheet Worksheet { get; } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Buffers; using System.Diagnostics; using System.Threading; namespace System.Text { public abstract class EncoderFallback { private static EncoderFallback s_replacementFallback; // Default fallback, uses no best fit & "?" private static EncoderFallback s_exceptionFallback; // Get each of our generic fallbacks. public static EncoderFallback ReplacementFallback { get { if (s_replacementFallback == null) Interlocked.CompareExchange<EncoderFallback>(ref s_replacementFallback, new EncoderReplacementFallback(), null); return s_replacementFallback; } } public static EncoderFallback ExceptionFallback { get { if (s_exceptionFallback == null) Interlocked.CompareExchange<EncoderFallback>(ref s_exceptionFallback, new EncoderExceptionFallback(), null); return s_exceptionFallback; } } // Fallback // // Return the appropriate unicode string alternative to the character that need to fall back. // Most implementations will be: // return new MyCustomEncoderFallbackBuffer(this); public abstract EncoderFallbackBuffer CreateFallbackBuffer(); // Maximum number of characters that this instance of this fallback could return public abstract int MaxCharCount { get; } } public abstract class EncoderFallbackBuffer { // Most implementations will probably need an implementation-specific constructor // Public methods that cannot be overridden that let us do our fallback thing // These wrap the internal methods so that we can check for people doing stuff that is incorrect public abstract bool Fallback(char charUnknown, int index); public abstract bool Fallback(char charUnknownHigh, char charUnknownLow, int index); // Get next character public abstract char GetNextChar(); // Back up a character public abstract bool MovePrevious(); // How many chars left in this fallback? public abstract int Remaining { get; } // Not sure if this should be public or not. // Clear the buffer public virtual void Reset() { while (GetNextChar() != (char)0) ; } // Internal items to help us figure out what we're doing as far as error messages, etc. // These help us with our performance and messages internally internal unsafe char* charStart; internal unsafe char* charEnd; internal EncoderNLS encoder; // TODO: MAKE ME PRIVATE internal bool setEncoder; internal bool bUsedEncoder; internal bool bFallingBack = false; internal int iRecursionCount = 0; private const int iMaxRecursion = 250; private Encoding encoding; private int originalCharCount; // Internal Reset // For example, what if someone fails a conversion and wants to reset one of our fallback buffers? internal unsafe void InternalReset() { charStart = null; bFallingBack = false; iRecursionCount = 0; Reset(); } // Set the above values // This can't be part of the constructor because EncoderFallbacks would have to know how to implement these. internal unsafe void InternalInitialize(char* charStart, char* charEnd, EncoderNLS encoder, bool setEncoder) { this.charStart = charStart; this.charEnd = charEnd; this.encoder = encoder; this.setEncoder = setEncoder; this.bUsedEncoder = false; this.bFallingBack = false; this.iRecursionCount = 0; } internal static EncoderFallbackBuffer CreateAndInitialize(Encoding encoding, EncoderNLS encoder, int originalCharCount) { // The original char count is only used for keeping track of what 'index' value needs // to be passed to the abstract Fallback method. The index value is calculated by subtracting // 'chars.Length' (where chars is expected to be the entire remaining input buffer) // from the 'originalCharCount' value specified here. EncoderFallbackBuffer fallbackBuffer = (encoder is null) ? encoding.EncoderFallback.CreateFallbackBuffer() : encoder.FallbackBuffer; fallbackBuffer.encoding = encoding; fallbackBuffer.encoder = encoder; fallbackBuffer.originalCharCount = originalCharCount; return fallbackBuffer; } internal char InternalGetNextChar() { char ch = GetNextChar(); bFallingBack = (ch != 0); if (ch == 0) iRecursionCount = 0; return ch; } private bool InternalFallback(ReadOnlySpan<char> chars, out int charsConsumed) { Debug.Assert(!chars.IsEmpty, "Caller shouldn't invoke this if there's no data to fall back."); // First, try falling back a single BMP character or a standalone low surrogate. // If the first char is a high surrogate, we'll try to combine it with the next // char in the input sequence. char firstChar = chars[0]; char secondChar = default; if (!chars.IsEmpty) { firstChar = chars[0]; if (1 < (uint)chars.Length) { secondChar = chars[1]; } } // Ask the subclassed type to initiate fallback logic. int index = originalCharCount - chars.Length; if (!char.IsSurrogatePair(firstChar, secondChar)) { // This code path is also used when 'firstChar' is a standalone surrogate or // if it's a high surrogate at the end of the input buffer. charsConsumed = 1; return Fallback(firstChar, index); } else { charsConsumed = 2; return Fallback(firstChar, secondChar, index); } } internal int InternalFallbackGetByteCount(ReadOnlySpan<char> chars, out int charsConsumed) { int bytesWritten = 0; if (InternalFallback(chars, out charsConsumed)) { // There's data in the fallback buffer - pull it out now. bytesWritten = DrainRemainingDataForGetByteCount(); } return bytesWritten; } internal bool TryInternalFallbackGetBytes(ReadOnlySpan<char> chars, Span<byte> bytes, out int charsConsumed, out int bytesWritten) { if (InternalFallback(chars, out charsConsumed)) { // There's data in the fallback buffer - pull it out now. return TryDrainRemainingDataForGetBytes(bytes, out bytesWritten); } else { // There's no data in the fallback buffer. bytesWritten = 0; return true; // true = didn't run out of space in destination buffer } } internal bool TryDrainRemainingDataForGetBytes(Span<byte> bytes, out int bytesWritten) { int originalBytesLength = bytes.Length; Rune thisRune; while ((thisRune = GetNextRune()).Value != 0) { switch (encoding.EncodeRune(thisRune, bytes, out int bytesWrittenJustNow)) { case OperationStatus.Done: bytes = bytes.Slice(bytesWrittenJustNow); continue; case OperationStatus.DestinationTooSmall: // Since we're not consuming the Rune we just read, back up as many chars as necessary // to undo the read we just performed, then report to our caller that we ran out of space. for (int i = 0; i < thisRune.Utf16SequenceLength; i++) { MovePrevious(); } bytesWritten = originalBytesLength - bytes.Length; return false; // ran out of destination buffer case OperationStatus.InvalidData: // We can't fallback the fallback. We can't make forward progress, so report to our caller // that something went terribly wrong. The error message contains the fallback char that // couldn't be converted. (Ideally we'd provide the first char that originally triggered // the fallback, but it's complicated to keep this state around, and a fallback producing // invalid data should be a very rare occurrence.) ThrowLastCharRecursive(thisRune.Value); break; // will never be hit; call above throws default: Debug.Fail("Unexpected return value."); break; } } bytesWritten = originalBytesLength - bytes.Length; return true; // finished successfully } internal int DrainRemainingDataForGetByteCount() { int totalByteCount = 0; Rune thisRune; while ((thisRune = GetNextRune()).Value != 0) { if (!encoding.TryGetByteCount(thisRune, out int byteCountThisIteration)) { // We can't fallback the fallback. We can't make forward progress, so report to our caller // that something went terribly wrong. The error message contains the fallback char that // couldn't be converted. (Ideally we'd provide the first char that originally triggered // the fallback, but it's complicated to keep this state around, and a fallback producing // invalid data should be a very rare occurrence.) ThrowLastCharRecursive(thisRune.Value); } Debug.Assert(byteCountThisIteration >= 0, "Encoding shouldn't have returned a negative byte count."); // We need to check for overflow while tallying the fallback byte count. totalByteCount += byteCountThisIteration; if (totalByteCount < 0) { InternalReset(); Encoding.ThrowConversionOverflow(); } } return totalByteCount; } private Rune GetNextRune() { char firstChar = GetNextChar(); if (Rune.TryCreate(firstChar, out Rune value) || Rune.TryCreate(firstChar, GetNextChar(), out value)) { return value; } throw new ArgumentException(SR.Argument_InvalidCharSequenceNoIndex); } // Fallback the current character using the remaining buffer and encoder if necessary // This can only be called by our encodings (other have to use the public fallback methods), so // we can use our EncoderNLS here too. // setEncoder is true if we're calling from a GetBytes method, false if we're calling from a GetByteCount // // Note that this could also change the contents of this.encoder, which is the same // object that the caller is using, so the caller could mess up the encoder for us // if they aren't careful. internal unsafe virtual bool InternalFallback(char ch, ref char* chars) { // Shouldn't have null charStart Debug.Assert(charStart != null, "[EncoderFallback.InternalFallbackBuffer]Fallback buffer is not initialized"); // Get our index, remember chars was preincremented to point at next char, so have to -1 int index = (int)(chars - charStart) - 1; // See if it was a high surrogate if (char.IsHighSurrogate(ch)) { // See if there's a low surrogate to go with it if (chars >= this.charEnd) { // Nothing left in input buffer // No input, return 0 if mustflush is false if (this.encoder != null && !this.encoder.MustFlush) { // Done, nothing to fallback if (this.setEncoder) { bUsedEncoder = true; this.encoder._charLeftOver = ch; } bFallingBack = false; return false; } } else { // Might have a low surrogate char cNext = *chars; if (char.IsLowSurrogate(cNext)) { // If already falling back then fail if (bFallingBack && iRecursionCount++ > iMaxRecursion) ThrowLastCharRecursive(char.ConvertToUtf32(ch, cNext)); // Next is a surrogate, add it as surrogate pair, and increment chars chars++; bFallingBack = Fallback(ch, cNext, index); return bFallingBack; } // Next isn't a low surrogate, just fallback the high surrogate } } // If already falling back then fail if (bFallingBack && iRecursionCount++ > iMaxRecursion) ThrowLastCharRecursive((int)ch); // Fall back our char bFallingBack = Fallback(ch, index); return bFallingBack; } // private helper methods internal void ThrowLastCharRecursive(int charRecursive) { // Throw it, using our complete character throw new ArgumentException( SR.Format(SR.Argument_RecursiveFallback, charRecursive), "chars"); } } }
#region License, Terms and Author(s) // // Mannex - Extension methods for .NET // Copyright (c) 2009 Atif Aziz. All rights reserved. // // Author(s): // // Atif Aziz, http://www.raboof.com // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #endregion namespace Mannex { #region Imports using System; using System.Collections.Generic; using System.Diagnostics; using System.Text; using System.Text.RegularExpressions; using IO; #endregion /// <summary> /// Extension methods for <see cref="string"/>. /// </summary> static partial class StringExtensions { /// <summary> /// Masks an empty string with a given mask such that the result /// is never an empty string. If the input string is null or /// empty then it is masked, otherwise the original is returned. /// </summary> /// <remarks> /// Use this method to guarantee that you never get an empty /// string. Bear in mind, however, that if the mask itself is an /// empty string then this method could yield an empty string! /// </remarks> [DebuggerStepThrough] public static string MaskEmpty(this string str, string mask) { return !string.IsNullOrEmpty(str) ? str : mask; } /// <summary> /// Returns a section of a string from a give starting point on. /// </summary> /// <remarks> /// If <paramref name="start"/> is negative, it is treated as /// <c>length</c> + <paramref name="start" /> where <c>length</c> /// is the length of the string. If <paramref name="start"/> /// is greater or equal to the length of the string then /// no characters are copied to the new string. /// </remarks> [DebuggerStepThrough] public static string Slice(this string str, int start) { return Slice(str, start, null); } /// <summary> /// Returns a section of a string. /// </summary> /// <remarks> /// This method copies up to, but not including, the element /// indicated by <paramref name="end"/>. If <paramref name="start"/> /// is negative, it is treated as <c>length</c> + <paramref name="start" /> /// where <c>length</c> is the length of the string. If /// <paramref name="end"/> is negative, it is treated as <c>length</c> + /// <paramref name="end"/> where <c>length</c> is the length of the /// string. If <paramref name="end"/> occurs before <paramref name="start"/>, /// no characters are copied to the new string. /// </remarks> [DebuggerStepThrough] public static string Slice(this string str, int start, int? end) { if (str == null) throw new ArgumentNullException("str"); return SliceImpl(str, start, end ?? str.Length); } private static string SliceImpl(this string str, int start, int end) { if (str == null) throw new ArgumentNullException("str"); var length = str.Length; if (start < 0) { start = length + start; if (start < 0) start = 0; } else { if (start > length) start = length; } if (end < 0) { end = length + end; if (end < 0) end = 0; } else { if (end > length) end = length; } var sliceLength = end - start; return sliceLength > 0 ? str.Substring(start, sliceLength) : string.Empty; } /// <summary> /// Embeds string into <paramref name="target"/>, using {0} /// within <paramref name="target"/> as the point of embedding. /// </summary> public static string Embed(this string str, string target) { if (str == null) throw new ArgumentNullException("str"); if (target == null) throw new ArgumentNullException("target"); return string.Format(target, str); } /// <summary> /// Wraps string between two other string where the first /// indicates the left side and the second indicates the /// right side. /// </summary> public static string Wrap(this string str, string lhs, string rhs) { if (str == null) throw new ArgumentNullException("str"); return lhs + str + rhs; } /// <summary> /// Enquotes string with <paramref name="quote"/>, escaping occurences /// of <paramref name="quote"/> itself with <paramref name="escape"/>. /// </summary> public static string Quote(this string str, string quote, string escape) { if (str == null) throw new ArgumentNullException("str"); StringBuilder sb = null; var start = 0; int index; while ((index = str.IndexOf(quote, start)) >= 0) { if (sb == null) sb = new StringBuilder(str.Length + 10).Append(quote); sb.Append(str, start, index - start); sb.Append(escape); start = index + quote.Length; } return sb != null ? sb.Append(str, start, str.Length - start).Append(quote).ToString() : str.Wrap(quote, quote); } /// <summary> /// Format string using <paramref name="args"/> as sources for /// replacements and a function, <paramref name="binder"/>, that /// determines how to bind and resolve replacement tokens. /// </summary> public static string FormatWith(this string format, Func<string, object[], IFormatProvider, string> binder, params object[] args) { return format.FormatWith(null, binder, args); } /// <summary> /// Format string using <paramref name="args"/> as sources for /// replacements and a function, <paramref name="binder"/>, that /// determines how to bind and resolve replacement tokens. In /// addition, <paramref name="provider"/> is used for cultural /// formatting. /// </summary> public static string FormatWith(this string format, IFormatProvider provider, Func<string, object[], IFormatProvider, string> binder, params object[] args) { if (format == null) throw new ArgumentNullException("format"); if (binder == null) throw new ArgumentNullException("binder"); Debug.Assert(binder != null); var result = new StringBuilder(format.Length * 2); var token = new StringBuilder(); var e = format.GetEnumerator(); while (e.MoveNext()) { var ch = e.Current; if (ch == '{') { while (true) { if (!e.MoveNext()) throw new FormatException(); ch = e.Current; if (ch == '}') { if (token.Length == 0) throw new FormatException(); result.Append(binder(token.ToString(), args, provider)); token.Length = 0; break; } if (ch == '{') { result.Append(ch); break; } token.Append(ch); } } else if (ch == '}') { if (!e.MoveNext() || e.Current != '}') throw new FormatException(); result.Append('}'); } else { result.Append(ch); } } return result.ToString(); } /// <summary> /// Splits a string into a pair using a specified character to /// separate the two. /// </summary> /// <remarks> /// Neither half in the resulting pair is ever <c>null</c>. /// </remarks> public static T Split<T>(this string str, char separator, Func<string, string, T> resultFunc) { if (str == null) throw new ArgumentNullException("str"); if (resultFunc == null) throw new ArgumentNullException("resultFunc"); return SplitRemoving(str, str.IndexOf(separator), 1, resultFunc); } /// <summary> /// Splits a string into three parts using any of a specified set of /// characters to separate the three. /// </summary> /// <remarks> /// None of the resulting parts is ever <c>null</c>. /// </remarks> public static T Split<T>(this string str, char separator, Func<string, string, string, T> resultFunc) { if (str == null) throw new ArgumentNullException("str"); if (resultFunc == null) throw new ArgumentNullException("resultFunc"); return str.Split(separator, (a, rest) => rest.Split(separator, (b, c) => resultFunc(a, b, c))); } /// <summary> /// Splits a string into four parts using any of a specified set of /// characters to separate the four. /// </summary> /// <remarks> /// None of the resulting parts is ever <c>null</c>. /// </remarks> public static T Split<T>(this string str, char separator, Func<string, string, string, string, T> resultFunc) { if (str == null) throw new ArgumentNullException("str"); if (resultFunc == null) throw new ArgumentNullException("resultFunc"); return str.Split(separator, (a, b, rest) => rest.Split(separator, (c, d) => resultFunc(a, b, c, d))); } /// <summary> /// Splits a string into a pair using any of a specified set of /// characters to separate the two. /// </summary> /// <remarks> /// Neither half in the resulting pair is ever <c>null</c>. /// </remarks> public static T Split<T>(this string str, char[] separators, Func<string, string, T> resultFunc) { if (str == null) throw new ArgumentNullException("str"); if (resultFunc == null) throw new ArgumentNullException("resultFunc"); return separators == null || separators.Length == 0 ? resultFunc(str, string.Empty) : SplitRemoving(str, str.IndexOfAny(separators), 1, resultFunc); } /// <summary> /// Splits a string into three parts using any of a specified set of /// characters to separate the three. /// </summary> /// <remarks> /// None of the resulting parts is ever <c>null</c>. /// </remarks> public static T Split<T>(this string str, char[] separators, Func<string, string, string, T> resultFunc) { if (str == null) throw new ArgumentNullException("str"); if (resultFunc == null) throw new ArgumentNullException("resultFunc"); return str.Split(separators, (a, rest) => rest.Split(separators, (b, c) => resultFunc(a, b, c))); } /// <summary> /// Splits a string into four parts using any of a specified set of /// characters to separate the four. /// </summary> /// <remarks> /// None of the resulting parts is ever <c>null</c>. /// </remarks> public static T Split<T>(this string str, char[] separators, Func<string, string, string, string, T> resultFunc) { if (str == null) throw new ArgumentNullException("str"); if (resultFunc == null) throw new ArgumentNullException("resultFunc"); return str.Split(separators, (a, b, rest) => rest.Split(separators, (c, d) => resultFunc(a, b, c, d))); } /// <summary> /// Splits a string into a pair by removing a portion of the string. /// </summary> /// <remarks> /// Neither half in the resulting pair is ever <c>null</c>. /// </remarks> private static T SplitRemoving<T>(string str, int index, int count, Func<string, string, T> resultFunc) { Debug.Assert(str != null); Debug.Assert(count > 0); Debug.Assert(resultFunc != null); var a = index < 0 ? str : str.Substring(0, index); var b = index < 0 || index + 1 >= str.Length ? string.Empty : str.Substring(index + count); return resultFunc(a, b); } /// <summary> /// Splits string into lines where a line is terminated /// by CR and LF, or just CR or just LF. /// </summary> /// <remarks> /// This method uses deferred exection. /// </remarks> public static IEnumerable<string> SplitIntoLines(this string str) { if (str == null) throw new ArgumentNullException("str"); return SplitIntoLinesImpl(str); } private static IEnumerable<string> SplitIntoLinesImpl(string str) { using (var reader = str.Read()) foreach (var line in reader.ReadLines()) yield return line; } /// <summary> /// Collapses all sequences of white space (as deifned by Unicode /// and identified by <see cref="char.IsWhiteSpace(char)"/>) to a /// single space and trims all leading and trailing white space. /// </summary> public static string NormalizeWhiteSpace(this string str) { if (str == null) throw new ArgumentNullException("str"); return Regex.Replace(str, @"\s+", " ").Trim(); } /// <summary> /// Retrieves left, middle and right substrings from this instance /// given the character position and length of the middle substring. /// </summary> /// <returns> /// Returns a zero-base, single-dimension, array of three elements /// containing the left, middle and right substrings, respectively. /// </returns> /// <remarks> /// This function never returns <c>null</c> for any of the /// substrings. For example, even when <paramref name="index"/> is /// zero, the first substring will be an empty string, but not null. /// </remarks> public static string[] Substrings(this string str, int index, int length) { return Substrings(str, index, length, (left, mid, right) => new[] { left, mid, right }); } /// <summary> /// Retrieves left, middle and right substrings from this instance /// given the character position and length of the middle substring. /// An additional parameter specifies a function that is used to /// project the final result. /// </summary> /// <remarks> /// This function never supplies <c>null</c> for any of the /// substrings. For example, even when <paramref name="index"/> is /// zero, the first substring will be an empty string, but not /// <c>null</c>. /// </remarks> public static T Substrings<T>(this string str, int index, int length, Func<string, string, string, T> resultor) { if (str == null) throw new ArgumentNullException("str"); if (resultor == null) throw new ArgumentNullException("resultor"); return resultor(str.Substring(0, index), str.Substring(index, length), str.Substring(index + length)); } } } namespace Mannex.IO { #region Imports using System; using System.Collections.Generic; using System.IO; using System.Linq; #endregion /// <summary> /// Extension methods for <see cref="TextReader"/>. /// </summary> static partial class TextReaderExtensions { /// <summary> /// Reads all lines from reader using deferred semantics. /// </summary> public static IEnumerable<string> ReadLines(this TextReader reader) { if (reader == null) throw new ArgumentNullException("reader"); return ReadLinesImpl(reader); } private static IEnumerable<string> ReadLinesImpl(this TextReader reader) { for (var line = reader.ReadLine(); line != null; line = reader.ReadLine()) yield return line; } // Concat derived from StackOverflow answer[1] by Rex Morgan[2]. // // [1] http://stackoverflow.com/a/2925722/6682 // [2] http://www.rexmorgan.net/ /// <summary> /// Returns a new <see cref="TextReader"/> that represents the /// concatenated content of one or more supplied /// <see cref="TextReader"/> objects. /// </summary> /// <remarks> /// If any of the <see cref="TextReader"/> objects is <c>null</c> /// then it is treated as being empty; no exception is thrown. /// </remarks> public static TextReader Concat(this TextReader first, IEnumerable<TextReader> others) { if (first == null) throw new ArgumentNullException("first"); if (others == null) throw new ArgumentNullException("others"); return Concat(first, others.ToArray()); } /// <summary> /// Returns a new <see cref="TextReader"/> that represents the /// concatenated content of one or more supplied /// <see cref="TextReader"/> objects. /// </summary> /// <remarks> /// If any of the <see cref="TextReader"/> objects is <c>null</c> /// then it is treated as being empty; no exception is thrown. /// </remarks> public static TextReader Concat(this TextReader first, params TextReader[] others) { if (first == null) throw new ArgumentNullException("first"); if (others == null) throw new ArgumentNullException("others"); return new ChainedTextReader(new[] { first }.Concat(others)); } sealed class ChainedTextReader : TextReader { private TextReader[] _readers; public ChainedTextReader(IEnumerable<TextReader> readers) { if (readers == null) throw new ArgumentNullException("readers"); _readers = readers.Select(r => r ?? Null) /*sentinel */ .Concat(new TextReader[] { null }) .ToArray(); } private TextReader GetReader() { if (_readers == null) throw new ObjectDisposedException(null); return _readers[0]; } public override int Peek() { var reader = GetReader(); return reader == null ? -1 : reader.Peek(); } public override int Read() { while (true) { var reader = GetReader(); if (reader == null) return -1; var ch = reader.Read(); if (ch >= 0) return ch; _readers.Rotate(); } } public override int Read(char[] buffer, int index, int count) { while (true) { var reader = GetReader(); if (reader == null) return 0; var read = reader.Read(buffer, index, count); if (read > 0) return read; _readers.Rotate(); } } public override void Close() { OnDisposeOrClose(r => r.Close()); } protected override void Dispose(bool disposing) { base.Dispose(disposing); OnDisposeOrClose(r => r.Dispose()); } void OnDisposeOrClose(Action<TextReader> action) { if (_readers == null) return; foreach (var reader in _readers.Where(reader => reader != null)) action(reader); _readers = null; } } } } namespace Mannex { #region Imports using System; using System.Text; #endregion /// <summary> /// Extension methods for <see cref="Array"/> sub-types. /// </summary> static partial class ArrayExtensions { /// <summary> /// Formats bytes in hexadecimal format, appending to the /// supplied <see cref="StringBuilder"/>. /// </summary> public static string ToHex(this byte[] buffer) { return ToHex(buffer, 0, buffer.Length); } /// <summary> /// Formats bytes in hexadecimal format, appending to the /// supplied <see cref="StringBuilder"/>. /// </summary> public static string ToHex(this byte[] buffer, int index, int count) { return ToHex(buffer, index, count, null).ToString(); } /// <summary> /// Formats bytes in hexadecimal format, appending to the /// supplied <see cref="StringBuilder"/>. /// </summary> public static StringBuilder ToHex(this byte[] buffer, int index, int count, StringBuilder sb) { if (buffer == null) throw new ArgumentNullException("buffer"); if (index < 0 || index > buffer.Length) throw new ArgumentOutOfRangeException("index"); if (index + count > buffer.Length) throw new ArgumentOutOfRangeException("count"); if (sb == null) sb = new StringBuilder(count * 2); for (var i = index; i < index + count; i++) { const string hexdigits = "0123456789abcdef"; var b = buffer[i]; sb.Append(hexdigits[b/16]); sb.Append(hexdigits[b%16]); } return sb; } /// <summary> /// Rotates the elements of the array (in-place) such that all /// elements are shifted left by one position, with the exception of /// the first element which assumes the last position in the array. /// </summary> public static void Rotate<T>(this T[] array) { if (array == null) throw new ArgumentNullException("array"); if (array.Length == 0) return; var first = array[0]; Array.Copy(array, 1, array, 0, array.Length - 1); array[array.Length - 1] = first; } } } namespace Mannex.IO { #region Imports using System; using System.Collections.Generic; using System.Diagnostics; using System.IO; using System.Linq; using System.Text.RegularExpressions; #endregion /// <summary> /// Extension methods for <see cref="string"/>. /// </summary> static partial class StringExtensions { private static readonly char[] _badFileNameChars; private static readonly string _badFileNameCharsPattern; private static readonly char[] _badPathChars; private static readonly string _badPathCharsPattern; static StringExtensions() { _badFileNameChars = Path.GetInvalidFileNameChars(); _badFileNameCharsPattern = Patternize(_badFileNameChars); _badPathChars = Path.GetInvalidPathChars(); _badPathCharsPattern = Patternize(_badPathChars); } private static string Patternize(IEnumerable<char> chars) { Debug.Assert(chars != null); return "[" + string.Join(string.Empty, chars.Select(ch => Regex.Escape(ch.ToString())).ToArray()) + "]"; } /// <summary> /// Makes the content of the string safe for use as a file name /// by replacing all invalid characters, those returned by /// <see cref="Path.GetInvalidFileNameChars"/>, with an underscore. /// </summary> /// <remarks> /// This method is not guaranteed to replace the complete set of /// characters that are invalid in file and directory names. /// The full set of invalid characters can vary by file system. /// </remarks> public static string ToFileNameSafe(this string str) { return ToFileNameSafe(str, null); } /// <summary> /// Makes the content of the string safe for use as a file name /// by replacing all invalid characters, those returned by /// <see cref="Path.GetInvalidFileNameChars"/>, with /// <paramref name="replacement"/>. /// </summary> /// <remarks> /// <para> /// The <paramref name="replacement"/> string itself cannot /// carry any invalid file name characters. If /// <paramref name="replacement"/> is <c>null</c> or empty /// then it assumes the value of an underscore.</para> /// <para> /// This method is not guaranteed to replace the complete set of /// characters that are invalid in file and directory names. /// The full set of invalid characters can vary by file system. /// </para> /// </remarks> public static string ToFileNameSafe(this string str, string replacement) { return SanitizePathComponent(str, (replacement ?? string.Empty).MaskEmpty("_"), _badFileNameChars, _badFileNameCharsPattern); } /// <summary> /// Makes the content of the string safe for use as a file name /// by replacing all invalid characters, those returned by /// <see cref="Path.GetInvalidPathChars"/>, with an underscore. /// </summary> /// <remarks> /// This method is not guaranteed to replace the complete set of /// characters that are invalid in file and directory names. /// The full set of invalid characters can vary by file system. /// </remarks> public static string ToPathNameSafe(this string str) { return ToPathNameSafe(str, null); } /// <summary> /// Makes the content of the string safe for use as a file name /// by replacing all invalid characters, those returned by /// <see cref="Path.GetInvalidPathChars"/>, with /// <paramref name="replacement"/>. /// </summary> /// <remarks> /// The <paramref name="replacement"/> string itself cannot /// carry any invalid file name characters. If /// <paramref name="replacement"/> is <c>null</c> or empty /// then it assumes the value of an underscore. /// <para> /// This method is not guaranteed to replace the complete set of /// characters that are invalid in file and directory names. /// The full set of invalid characters can vary by file system. /// </para> /// </remarks> public static string ToPathNameSafe(this string str, string replacement) { return SanitizePathComponent(str, (replacement ?? string.Empty).MaskEmpty("_"), _badPathChars, _badPathCharsPattern); } private static string SanitizePathComponent(string str, string replacement, char[] badChars, string badPattern) { Debug.Assert(replacement != null); if (str == null) throw new ArgumentNullException("str"); if (str.Length == 0) throw new ArgumentException(null, "str"); if (replacement.IndexOfAny(badChars) >= 0) throw new ArgumentException(null, "replacement"); return Regex.Replace(str, badPattern, replacement); } /// <summary> /// Returns a <see cref="TextReader"/> for reading string. /// </summary> public static TextReader Read(this string str) { if (str == null) throw new ArgumentNullException("str"); return new StringReader(str); } } }
// ==++== // // Copyright (c) Microsoft Corporation. All rights reserved. // // ==--== /*============================================================ ** ** Class: BinaryWriter ** ** <OWNER>gpaperin</OWNER> ** ** Purpose: Provides a way to write primitives types in ** binary from a Stream, while also supporting writing Strings ** in a particular encoding. ** ** ===========================================================*/ using System; using System.Runtime; using System.Runtime.Serialization; using System.Text; using System.Diagnostics.Contracts; namespace System.IO { // This abstract base class represents a writer that can write // primitives to an arbitrary stream. A subclass can override methods to // give unique encodings. // [Serializable] [System.Runtime.InteropServices.ComVisible(true)] public class BinaryWriter : IDisposable { public static readonly BinaryWriter Null = new BinaryWriter(); protected Stream OutStream; private byte[] _buffer; // temp space for writing primitives to. private Encoding _encoding; private Encoder _encoder; [OptionalField] // New in .NET FX 4.5. False is the right default value. private bool _leaveOpen; // This field should never have been serialized and has not been used since before v2.0. // However, this type is serializable, and we need to keep the field name around when deserializing. // Also, we'll make .NET FX 4.5 not break if it's missing. #pragma warning disable 169 [OptionalField] private char[] _tmpOneCharBuffer; #pragma warning restore 169 // Perf optimization stuff private byte[] _largeByteBuffer; // temp space for writing chars. private int _maxChars; // max # of chars we can put in _largeByteBuffer // Size should be around the max number of chars/string * Encoding's max bytes/char private const int LargeByteBufferSize = 256; // Protected default constructor that sets the output stream // to a null stream (a bit bucket). protected BinaryWriter() { OutStream = Stream.Null; _buffer = new byte[16]; _encoding = new UTF8Encoding(false, true); _encoder = _encoding.GetEncoder(); } public BinaryWriter(Stream output) : this(output, new UTF8Encoding(false, true), false) { } public BinaryWriter(Stream output, Encoding encoding) : this(output, encoding, false) { } public BinaryWriter(Stream output, Encoding encoding, bool leaveOpen) { if (output==null) throw new ArgumentNullException("output"); if (encoding==null) throw new ArgumentNullException("encoding"); if (!output.CanWrite) throw new ArgumentException(Environment.GetResourceString("Argument_StreamNotWritable")); Contract.EndContractBlock(); OutStream = output; _buffer = new byte[16]; _encoding = encoding; _encoder = _encoding.GetEncoder(); _leaveOpen = leaveOpen; } // Closes this writer and releases any system resources associated with the // writer. Following a call to Close, any operations on the writer // may raise exceptions. public virtual void Close() { Dispose(true); } protected virtual void Dispose(bool disposing) { if (disposing) { if (_leaveOpen) OutStream.Flush(); else OutStream.Close(); } } public void Dispose() { Dispose(true); } /* * Returns the stream associate with the writer. It flushes all pending * writes before returning. All subclasses should override Flush to * ensure that all buffered data is sent to the stream. */ public virtual Stream BaseStream { get { Flush(); return OutStream; } } // Clears all buffers for this writer and causes any buffered data to be // written to the underlying device. public virtual void Flush() { OutStream.Flush(); } public virtual long Seek(int offset, SeekOrigin origin) { return OutStream.Seek(offset, origin); } // Writes a boolean to this stream. A single byte is written to the stream // with the value 0 representing false or the value 1 representing true. // public virtual void Write(bool value) { _buffer[0] = (byte) (value ? 1 : 0); OutStream.Write(_buffer, 0, 1); } // Writes a byte to this stream. The current position of the stream is // advanced by one. // public virtual void Write(byte value) { OutStream.WriteByte(value); } // Writes a signed byte to this stream. The current position of the stream // is advanced by one. // [CLSCompliant(false)] public virtual void Write(sbyte value) { OutStream.WriteByte((byte) value); } // Writes a byte array to this stream. // // This default implementation calls the Write(Object, int, int) // method to write the byte array. // public virtual void Write(byte[] buffer) { if (buffer == null) throw new ArgumentNullException("buffer"); Contract.EndContractBlock(); OutStream.Write(buffer, 0, buffer.Length); } // Writes a section of a byte array to this stream. // // This default implementation calls the Write(Object, int, int) // method to write the byte array. // public virtual void Write(byte[] buffer, int index, int count) { OutStream.Write(buffer, index, count); } // Writes a character to this stream. The current position of the stream is // advanced by two. // Note this method cannot handle surrogates properly in UTF-8. // [System.Security.SecuritySafeCritical] // auto-generated public unsafe virtual void Write(char ch) { if (Char.IsSurrogate(ch)) throw new ArgumentException(Environment.GetResourceString("Arg_SurrogatesNotAllowedAsSingleChar")); Contract.EndContractBlock(); Contract.Assert(_encoding.GetMaxByteCount(1) <= 16, "_encoding.GetMaxByteCount(1) <= 16)"); int numBytes = 0; fixed(byte * pBytes = _buffer) { numBytes = _encoder.GetBytes(&ch, 1, pBytes, 16, true); } OutStream.Write(_buffer, 0, numBytes); } // Writes a character array to this stream. // // This default implementation calls the Write(Object, int, int) // method to write the character array. // public virtual void Write(char[] chars) { if (chars == null) throw new ArgumentNullException("chars"); Contract.EndContractBlock(); byte[] bytes = _encoding.GetBytes(chars, 0, chars.Length); OutStream.Write(bytes, 0, bytes.Length); } // Writes a section of a character array to this stream. // // This default implementation calls the Write(Object, int, int) // method to write the character array. // public virtual void Write(char[] chars, int index, int count) { byte[] bytes = _encoding.GetBytes(chars, index, count); OutStream.Write(bytes, 0, bytes.Length); } // Writes a double to this stream. The current position of the stream is // advanced by eight. // [System.Security.SecuritySafeCritical] // auto-generated public unsafe virtual void Write(double value) { ulong TmpValue = *(ulong *)&value; _buffer[0] = (byte) TmpValue; _buffer[1] = (byte) (TmpValue >> 8); _buffer[2] = (byte) (TmpValue >> 16); _buffer[3] = (byte) (TmpValue >> 24); _buffer[4] = (byte) (TmpValue >> 32); _buffer[5] = (byte) (TmpValue >> 40); _buffer[6] = (byte) (TmpValue >> 48); _buffer[7] = (byte) (TmpValue >> 56); OutStream.Write(_buffer, 0, 8); } public virtual void Write(decimal value) { Decimal.GetBytes(value,_buffer); OutStream.Write(_buffer, 0, 16); } // Writes a two-byte signed integer to this stream. The current position of // the stream is advanced by two. // public virtual void Write(short value) { _buffer[0] = (byte) value; _buffer[1] = (byte) (value >> 8); OutStream.Write(_buffer, 0, 2); } // Writes a two-byte unsigned integer to this stream. The current position // of the stream is advanced by two. // [CLSCompliant(false)] public virtual void Write(ushort value) { _buffer[0] = (byte) value; _buffer[1] = (byte) (value >> 8); OutStream.Write(_buffer, 0, 2); } // Writes a four-byte signed integer to this stream. The current position // of the stream is advanced by four. // public virtual void Write(int value) { _buffer[0] = (byte) value; _buffer[1] = (byte) (value >> 8); _buffer[2] = (byte) (value >> 16); _buffer[3] = (byte) (value >> 24); OutStream.Write(_buffer, 0, 4); } // Writes a four-byte unsigned integer to this stream. The current position // of the stream is advanced by four. // [CLSCompliant(false)] public virtual void Write(uint value) { _buffer[0] = (byte) value; _buffer[1] = (byte) (value >> 8); _buffer[2] = (byte) (value >> 16); _buffer[3] = (byte) (value >> 24); OutStream.Write(_buffer, 0, 4); } // Writes an eight-byte signed integer to this stream. The current position // of the stream is advanced by eight. // public virtual void Write(long value) { _buffer[0] = (byte) value; _buffer[1] = (byte) (value >> 8); _buffer[2] = (byte) (value >> 16); _buffer[3] = (byte) (value >> 24); _buffer[4] = (byte) (value >> 32); _buffer[5] = (byte) (value >> 40); _buffer[6] = (byte) (value >> 48); _buffer[7] = (byte) (value >> 56); OutStream.Write(_buffer, 0, 8); } // Writes an eight-byte unsigned integer to this stream. The current // position of the stream is advanced by eight. // [CLSCompliant(false)] public virtual void Write(ulong value) { _buffer[0] = (byte) value; _buffer[1] = (byte) (value >> 8); _buffer[2] = (byte) (value >> 16); _buffer[3] = (byte) (value >> 24); _buffer[4] = (byte) (value >> 32); _buffer[5] = (byte) (value >> 40); _buffer[6] = (byte) (value >> 48); _buffer[7] = (byte) (value >> 56); OutStream.Write(_buffer, 0, 8); } // Writes a float to this stream. The current position of the stream is // advanced by four. // [System.Security.SecuritySafeCritical] // auto-generated public unsafe virtual void Write(float value) { uint TmpValue = *(uint *)&value; _buffer[0] = (byte) TmpValue; _buffer[1] = (byte) (TmpValue >> 8); _buffer[2] = (byte) (TmpValue >> 16); _buffer[3] = (byte) (TmpValue >> 24); OutStream.Write(_buffer, 0, 4); } // Writes a length-prefixed string to this stream in the BinaryWriter's // current Encoding. This method first writes the length of the string as // a four-byte unsigned integer, and then writes that many characters // to the stream. // [System.Security.SecuritySafeCritical] // auto-generated public unsafe virtual void Write(String value) { if (value==null) throw new ArgumentNullException("value"); Contract.EndContractBlock(); int len = _encoding.GetByteCount(value); Write7BitEncodedInt(len); if (_largeByteBuffer == null) { _largeByteBuffer = new byte[LargeByteBufferSize]; _maxChars = LargeByteBufferSize / _encoding.GetMaxByteCount(1); } if (len <= LargeByteBufferSize) { //Contract.Assert(len == _encoding.GetBytes(chars, 0, chars.Length, _largeByteBuffer, 0), "encoding's GetByteCount & GetBytes gave different answers! encoding type: "+_encoding.GetType().Name); _encoding.GetBytes(value, 0, value.Length, _largeByteBuffer, 0); OutStream.Write(_largeByteBuffer, 0, len); } else { // Aggressively try to not allocate memory in this loop for // runtime performance reasons. Use an Encoder to write out // the string correctly (handling surrogates crossing buffer // boundaries properly). int charStart = 0; int numLeft = value.Length; #if _DEBUG int totalBytes = 0; #endif while (numLeft > 0) { // Figure out how many chars to process this round. int charCount = (numLeft > _maxChars) ? _maxChars : numLeft; int byteLen; fixed(char* pChars = value) { fixed(byte* pBytes = _largeByteBuffer) { byteLen = _encoder.GetBytes(pChars + charStart, charCount, pBytes, LargeByteBufferSize, charCount == numLeft); } } #if _DEBUG totalBytes += byteLen; Contract.Assert (totalBytes <= len && byteLen <= LargeByteBufferSize, "BinaryWriter::Write(String) - More bytes encoded than expected!"); #endif OutStream.Write(_largeByteBuffer, 0, byteLen); charStart += charCount; numLeft -= charCount; } #if _DEBUG Contract.Assert(totalBytes == len, "BinaryWriter::Write(String) - Didn't write out all the bytes!"); #endif } } protected void Write7BitEncodedInt(int value) { // Write out an int 7 bits at a time. The high bit of the byte, // when on, tells reader to continue reading more bytes. uint v = (uint) value; // support negative numbers while (v >= 0x80) { Write((byte) (v | 0x80)); v >>= 7; } Write((byte)v); } } }
namespace KabMan.Client { partial class frmBlechDetail { /// <summary> /// Required designer variable. /// </summary> private System.ComponentModel.IContainer components = null; /// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } base.Dispose(disposing); } #region Windows Form Designer generated code /// <summary> /// Required method for Designer support - do not modify /// the contents of this method with the code editor. /// </summary> private void InitializeComponent() { this.components = new System.ComponentModel.Container(); DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule1 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule(); DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule2 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule(); DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule3 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule(); DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule4 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule(); DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule conditionValidationRule5 = new DevExpress.XtraEditors.DXErrorProvider.ConditionValidationRule(); this.layoutControl1 = new DevExpress.XtraLayout.LayoutControl(); this.btnCancel = new DevExpress.XtraEditors.SimpleButton(); this.btnSave = new DevExpress.XtraEditors.SimpleButton(); this.LkpLocation = new DevExpress.XtraEditors.LookUpEdit(); this.lkpDataCenter = new DevExpress.XtraEditors.LookUpEdit(); this.lkpSan = new DevExpress.XtraEditors.LookUpEdit(); this.lkpBlechType = new DevExpress.XtraEditors.LookUpEdit(); this.LkpObjectType = new DevExpress.XtraEditors.LookUpEdit(); this.layoutControlGroup1 = new DevExpress.XtraLayout.LayoutControlGroup(); this.layoutControlItem2 = new DevExpress.XtraLayout.LayoutControlItem(); this.layoutControlItem3 = new DevExpress.XtraLayout.LayoutControlItem(); this.layoutControlItem5 = new DevExpress.XtraLayout.LayoutControlItem(); this.emptySpaceItem1 = new DevExpress.XtraLayout.EmptySpaceItem(); this.layoutControlItem7 = new DevExpress.XtraLayout.LayoutControlItem(); this.layoutControlItem8 = new DevExpress.XtraLayout.LayoutControlItem(); this.layoutControlItem1 = new DevExpress.XtraLayout.LayoutControlItem(); this.layoutControlItem4 = new DevExpress.XtraLayout.LayoutControlItem(); this.dxValidationProvider1 = new DevExpress.XtraEditors.DXErrorProvider.DXValidationProvider(this.components); ((System.ComponentModel.ISupportInitialize)(this.layoutControl1)).BeginInit(); this.layoutControl1.SuspendLayout(); ((System.ComponentModel.ISupportInitialize)(this.LkpLocation.Properties)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.lkpDataCenter.Properties)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.lkpSan.Properties)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.lkpBlechType.Properties)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.LkpObjectType.Properties)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlGroup1)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem2)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem3)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem5)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.emptySpaceItem1)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem7)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem8)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem1)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem4)).BeginInit(); ((System.ComponentModel.ISupportInitialize)(this.dxValidationProvider1)).BeginInit(); this.SuspendLayout(); // // layoutControl1 // this.layoutControl1.AllowCustomizationMenu = false; this.layoutControl1.Appearance.DisabledLayoutGroupCaption.ForeColor = System.Drawing.SystemColors.GrayText; this.layoutControl1.Appearance.DisabledLayoutGroupCaption.Options.UseForeColor = true; this.layoutControl1.Appearance.DisabledLayoutItem.ForeColor = System.Drawing.SystemColors.GrayText; this.layoutControl1.Appearance.DisabledLayoutItem.Options.UseForeColor = true; this.layoutControl1.Controls.Add(this.btnCancel); this.layoutControl1.Controls.Add(this.btnSave); this.layoutControl1.Controls.Add(this.LkpLocation); this.layoutControl1.Controls.Add(this.lkpDataCenter); this.layoutControl1.Controls.Add(this.lkpSan); this.layoutControl1.Controls.Add(this.lkpBlechType); this.layoutControl1.Controls.Add(this.LkpObjectType); this.layoutControl1.Dock = System.Windows.Forms.DockStyle.Fill; this.layoutControl1.Location = new System.Drawing.Point(0, 0); this.layoutControl1.Name = "layoutControl1"; this.layoutControl1.Root = this.layoutControlGroup1; this.layoutControl1.Size = new System.Drawing.Size(277, 193); this.layoutControl1.TabIndex = 0; this.layoutControl1.Text = "layoutControl1"; // // btnCancel // this.btnCancel.Appearance.BackColor = System.Drawing.SystemColors.ButtonFace; this.btnCancel.Appearance.BackColor2 = System.Drawing.SystemColors.ButtonHighlight; this.btnCancel.Appearance.BorderColor = System.Drawing.Color.DimGray; this.btnCancel.Appearance.Options.UseBackColor = true; this.btnCancel.Appearance.Options.UseBorderColor = true; this.btnCancel.Appearance.Options.UseForeColor = true; this.btnCancel.Location = new System.Drawing.Point(214, 162); this.btnCancel.Name = "btnCancel"; this.btnCancel.Size = new System.Drawing.Size(57, 22); this.btnCancel.StyleController = this.layoutControl1; this.btnCancel.TabIndex = 1; this.btnCancel.Text = "Cancel"; this.btnCancel.Click += new System.EventHandler(this.btnCancel_Click); // // btnSave // this.btnSave.Appearance.BackColor = System.Drawing.SystemColors.ButtonFace; this.btnSave.Appearance.BackColor2 = System.Drawing.SystemColors.ButtonHighlight; this.btnSave.Appearance.BorderColor = System.Drawing.Color.DimGray; this.btnSave.Appearance.Options.UseBackColor = true; this.btnSave.Appearance.Options.UseBorderColor = true; this.btnSave.Appearance.Options.UseForeColor = true; this.btnSave.Location = new System.Drawing.Point(144, 162); this.btnSave.Name = "btnSave"; this.btnSave.Size = new System.Drawing.Size(59, 22); this.btnSave.StyleController = this.layoutControl1; this.btnSave.TabIndex = 1; this.btnSave.Text = "Save"; this.btnSave.Click += new System.EventHandler(this.btnSave_Click); // // LkpLocation // this.LkpLocation.Location = new System.Drawing.Point(99, 38); this.LkpLocation.Name = "LkpLocation"; this.LkpLocation.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] { new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Combo)}); this.LkpLocation.Properties.Columns.AddRange(new DevExpress.XtraEditors.Controls.LookUpColumnInfo[] { new DevExpress.XtraEditors.Controls.LookUpColumnInfo("FullLocationName", "Location Name", 20, DevExpress.Utils.FormatType.None, "", true, DevExpress.Utils.HorzAlignment.Default, DevExpress.Data.ColumnSortOrder.None)}); this.LkpLocation.Properties.NullText = ""; this.LkpLocation.Size = new System.Drawing.Size(172, 20); this.LkpLocation.StyleController = this.layoutControl1; this.LkpLocation.TabIndex = 1; conditionValidationRule1.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.IsNotBlank; conditionValidationRule1.ErrorText = "This value is not valid"; conditionValidationRule1.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning; this.dxValidationProvider1.SetValidationRule(this.LkpLocation, conditionValidationRule1); this.LkpLocation.EditValueChanged += new System.EventHandler(this.lkpLocation_EditValueChanged); this.LkpLocation.EditValueChanging += new DevExpress.XtraEditors.Controls.ChangingEventHandler(this.lkpLocation_EditValueChanging); // // lkpDataCenter // this.lkpDataCenter.Enabled = false; this.lkpDataCenter.Location = new System.Drawing.Point(99, 69); this.lkpDataCenter.Name = "lkpDataCenter"; this.lkpDataCenter.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] { new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Combo)}); this.lkpDataCenter.Properties.Columns.AddRange(new DevExpress.XtraEditors.Controls.LookUpColumnInfo[] { new DevExpress.XtraEditors.Controls.LookUpColumnInfo("RoomName", "Room", 20, DevExpress.Utils.FormatType.None, "", true, DevExpress.Utils.HorzAlignment.Default, DevExpress.Data.ColumnSortOrder.None)}); this.lkpDataCenter.Properties.NullText = ""; this.lkpDataCenter.Size = new System.Drawing.Size(172, 20); this.lkpDataCenter.StyleController = this.layoutControl1; this.lkpDataCenter.TabIndex = 5; conditionValidationRule2.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.IsNotBlank; conditionValidationRule2.ErrorText = "This value is not valid"; conditionValidationRule2.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning; this.dxValidationProvider1.SetValidationRule(this.lkpDataCenter, conditionValidationRule2); this.lkpDataCenter.EditValueChanged += new System.EventHandler(this.lkpDataCenter_EditValueChanged); // // lkpSan // this.lkpSan.Location = new System.Drawing.Point(99, 100); this.lkpSan.Name = "lkpSan"; this.lkpSan.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] { new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Combo), new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Plus, "Add Coordinate")}); this.lkpSan.Properties.Columns.AddRange(new DevExpress.XtraEditors.Controls.LookUpColumnInfo[] { new DevExpress.XtraEditors.Controls.LookUpColumnInfo("Field", "Caption", 20, DevExpress.Utils.FormatType.None, "", true, DevExpress.Utils.HorzAlignment.Default, DevExpress.Data.ColumnSortOrder.None)}); this.lkpSan.Properties.NullText = ""; this.lkpSan.Size = new System.Drawing.Size(172, 20); this.lkpSan.StyleController = this.layoutControl1; this.lkpSan.TabIndex = 1; conditionValidationRule3.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.IsNotBlank; conditionValidationRule3.ErrorText = "This value is not valid"; conditionValidationRule3.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning; this.dxValidationProvider1.SetValidationRule(this.lkpSan, conditionValidationRule3); // // lkpBlechType // this.lkpBlechType.Location = new System.Drawing.Point(99, 131); this.lkpBlechType.Name = "lkpBlechType"; this.lkpBlechType.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] { new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Combo)}); this.lkpBlechType.Properties.Columns.AddRange(new DevExpress.XtraEditors.Controls.LookUpColumnInfo[] { new DevExpress.XtraEditors.Controls.LookUpColumnInfo("Name", "Name", 20, DevExpress.Utils.FormatType.None, "", true, DevExpress.Utils.HorzAlignment.Default, DevExpress.Data.ColumnSortOrder.None)}); this.lkpBlechType.Properties.NullText = ""; this.lkpBlechType.Size = new System.Drawing.Size(172, 20); this.lkpBlechType.StyleController = this.layoutControl1; this.lkpBlechType.TabIndex = 1; conditionValidationRule4.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.IsNotBlank; conditionValidationRule4.ErrorText = "This value is not valid"; conditionValidationRule4.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning; this.dxValidationProvider1.SetValidationRule(this.lkpBlechType, conditionValidationRule4); // // LkpObjectType // this.LkpObjectType.Location = new System.Drawing.Point(99, 7); this.LkpObjectType.Name = "LkpObjectType"; this.LkpObjectType.Properties.Buttons.AddRange(new DevExpress.XtraEditors.Controls.EditorButton[] { new DevExpress.XtraEditors.Controls.EditorButton(DevExpress.XtraEditors.Controls.ButtonPredefines.Combo)}); this.LkpObjectType.Properties.Columns.AddRange(new DevExpress.XtraEditors.Controls.LookUpColumnInfo[] { new DevExpress.XtraEditors.Controls.LookUpColumnInfo("Name", "Object Name", 20, DevExpress.Utils.FormatType.None, "", true, DevExpress.Utils.HorzAlignment.Default, DevExpress.Data.ColumnSortOrder.None)}); this.LkpObjectType.Properties.NullText = ""; this.LkpObjectType.Size = new System.Drawing.Size(172, 20); this.LkpObjectType.StyleController = this.layoutControl1; this.LkpObjectType.TabIndex = 4; conditionValidationRule5.ConditionOperator = DevExpress.XtraEditors.DXErrorProvider.ConditionOperator.IsNotBlank; conditionValidationRule5.ErrorText = "This value is not valid"; conditionValidationRule5.ErrorType = DevExpress.XtraEditors.DXErrorProvider.ErrorType.Warning; this.dxValidationProvider1.SetValidationRule(this.LkpObjectType, conditionValidationRule5); this.LkpObjectType.EditValueChanging += new DevExpress.XtraEditors.Controls.ChangingEventHandler(this.LkpObjectType_EditValueChanging); // // layoutControlGroup1 // this.layoutControlGroup1.CustomizationFormText = "Root"; this.layoutControlGroup1.Items.AddRange(new DevExpress.XtraLayout.BaseLayoutItem[] { this.layoutControlItem2, this.layoutControlItem3, this.layoutControlItem5, this.emptySpaceItem1, this.layoutControlItem7, this.layoutControlItem8, this.layoutControlItem1, this.layoutControlItem4}); this.layoutControlGroup1.Location = new System.Drawing.Point(0, 0); this.layoutControlGroup1.Name = "Root"; this.layoutControlGroup1.Size = new System.Drawing.Size(277, 193); this.layoutControlGroup1.Spacing = new DevExpress.XtraLayout.Utils.Padding(0, 0, 0, 0); this.layoutControlGroup1.Text = "Root"; this.layoutControlGroup1.TextVisible = false; // // layoutControlItem2 // this.layoutControlItem2.Control = this.lkpBlechType; this.layoutControlItem2.CustomizationFormText = "Blech Type :"; this.layoutControlItem2.Location = new System.Drawing.Point(0, 124); this.layoutControlItem2.Name = "layoutControlItem2"; this.layoutControlItem2.Size = new System.Drawing.Size(275, 31); this.layoutControlItem2.Text = "Blech Type"; this.layoutControlItem2.TextLocation = DevExpress.Utils.Locations.Left; this.layoutControlItem2.TextSize = new System.Drawing.Size(87, 20); // // layoutControlItem3 // this.layoutControlItem3.Control = this.lkpSan; this.layoutControlItem3.CustomizationFormText = "San :"; this.layoutControlItem3.Location = new System.Drawing.Point(0, 93); this.layoutControlItem3.Name = "layoutControlItem3"; this.layoutControlItem3.Size = new System.Drawing.Size(275, 31); this.layoutControlItem3.Text = "San or Coordinate"; this.layoutControlItem3.TextLocation = DevExpress.Utils.Locations.Left; this.layoutControlItem3.TextSize = new System.Drawing.Size(87, 20); // // layoutControlItem5 // this.layoutControlItem5.Control = this.LkpLocation; this.layoutControlItem5.CustomizationFormText = "Location :"; this.layoutControlItem5.Location = new System.Drawing.Point(0, 31); this.layoutControlItem5.Name = "layoutControlItem5"; this.layoutControlItem5.Size = new System.Drawing.Size(275, 31); this.layoutControlItem5.Text = "Location"; this.layoutControlItem5.TextLocation = DevExpress.Utils.Locations.Left; this.layoutControlItem5.TextSize = new System.Drawing.Size(87, 20); // // emptySpaceItem1 // this.emptySpaceItem1.CustomizationFormText = "emptySpaceItem1"; this.emptySpaceItem1.Location = new System.Drawing.Point(0, 155); this.emptySpaceItem1.Name = "emptySpaceItem1"; this.emptySpaceItem1.Size = new System.Drawing.Size(137, 36); this.emptySpaceItem1.Text = "emptySpaceItem1"; this.emptySpaceItem1.TextSize = new System.Drawing.Size(0, 0); // // layoutControlItem7 // this.layoutControlItem7.Control = this.btnSave; this.layoutControlItem7.CustomizationFormText = "layoutControlItem7"; this.layoutControlItem7.Location = new System.Drawing.Point(137, 155); this.layoutControlItem7.Name = "layoutControlItem7"; this.layoutControlItem7.Size = new System.Drawing.Size(70, 36); this.layoutControlItem7.Text = "layoutControlItem7"; this.layoutControlItem7.TextLocation = DevExpress.Utils.Locations.Left; this.layoutControlItem7.TextSize = new System.Drawing.Size(0, 0); this.layoutControlItem7.TextToControlDistance = 0; this.layoutControlItem7.TextVisible = false; // // layoutControlItem8 // this.layoutControlItem8.Control = this.btnCancel; this.layoutControlItem8.CustomizationFormText = "layoutControlItem8"; this.layoutControlItem8.Location = new System.Drawing.Point(207, 155); this.layoutControlItem8.Name = "layoutControlItem8"; this.layoutControlItem8.Size = new System.Drawing.Size(68, 36); this.layoutControlItem8.Text = "layoutControlItem8"; this.layoutControlItem8.TextLocation = DevExpress.Utils.Locations.Left; this.layoutControlItem8.TextSize = new System.Drawing.Size(0, 0); this.layoutControlItem8.TextToControlDistance = 0; this.layoutControlItem8.TextVisible = false; // // layoutControlItem1 // this.layoutControlItem1.Control = this.LkpObjectType; this.layoutControlItem1.CustomizationFormText = "Object Type :"; this.layoutControlItem1.Location = new System.Drawing.Point(0, 0); this.layoutControlItem1.Name = "layoutControlItem1"; this.layoutControlItem1.Size = new System.Drawing.Size(275, 31); this.layoutControlItem1.Text = "Object Type"; this.layoutControlItem1.TextLocation = DevExpress.Utils.Locations.Left; this.layoutControlItem1.TextSize = new System.Drawing.Size(87, 20); // // layoutControlItem4 // this.layoutControlItem4.Control = this.lkpDataCenter; this.layoutControlItem4.CustomizationFormText = "Room :"; this.layoutControlItem4.Location = new System.Drawing.Point(0, 62); this.layoutControlItem4.Name = "layoutControlItem4"; this.layoutControlItem4.Size = new System.Drawing.Size(275, 31); this.layoutControlItem4.Text = "Data Center"; this.layoutControlItem4.TextLocation = DevExpress.Utils.Locations.Left; this.layoutControlItem4.TextSize = new System.Drawing.Size(87, 20); // // frmBlechDetail // this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F); this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font; this.ClientSize = new System.Drawing.Size(277, 193); this.Controls.Add(this.layoutControl1); this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedSingle; this.KeyPreview = true; this.MaximizeBox = false; this.MinimizeBox = false; this.Name = "frmBlechDetail"; this.ShowInTaskbar = false; this.StartPosition = System.Windows.Forms.FormStartPosition.CenterScreen; this.Text = "Blech Detail"; this.Load += new System.EventHandler(this.frmBlechDetail_Load); ((System.ComponentModel.ISupportInitialize)(this.layoutControl1)).EndInit(); this.layoutControl1.ResumeLayout(false); ((System.ComponentModel.ISupportInitialize)(this.LkpLocation.Properties)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.lkpDataCenter.Properties)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.lkpSan.Properties)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.lkpBlechType.Properties)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.LkpObjectType.Properties)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlGroup1)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem2)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem3)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem5)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.emptySpaceItem1)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem7)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem8)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem1)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.layoutControlItem4)).EndInit(); ((System.ComponentModel.ISupportInitialize)(this.dxValidationProvider1)).EndInit(); this.ResumeLayout(false); } #endregion private DevExpress.XtraLayout.LayoutControl layoutControl1; private DevExpress.XtraEditors.LookUpEdit lkpBlechType; private DevExpress.XtraEditors.LookUpEdit LkpObjectType; private DevExpress.XtraLayout.LayoutControlGroup layoutControlGroup1; private DevExpress.XtraLayout.LayoutControlItem layoutControlItem1; private DevExpress.XtraLayout.LayoutControlItem layoutControlItem2; private DevExpress.XtraEditors.LookUpEdit LkpLocation; private DevExpress.XtraEditors.LookUpEdit lkpDataCenter; private DevExpress.XtraEditors.LookUpEdit lkpSan; private DevExpress.XtraLayout.LayoutControlItem layoutControlItem3; private DevExpress.XtraLayout.LayoutControlItem layoutControlItem4; private DevExpress.XtraLayout.LayoutControlItem layoutControlItem5; private DevExpress.XtraEditors.SimpleButton btnCancel; private DevExpress.XtraEditors.SimpleButton btnSave; private DevExpress.XtraLayout.EmptySpaceItem emptySpaceItem1; private DevExpress.XtraLayout.LayoutControlItem layoutControlItem7; private DevExpress.XtraLayout.LayoutControlItem layoutControlItem8; private DevExpress.XtraEditors.DXErrorProvider.DXValidationProvider dxValidationProvider1; } }
using System; using System.Collections.Generic; using System.Linq; using Android.Animation; using Android.Views; using Android.Views.Animations; using Android.Widget; namespace Xamarin.Android.ExpandableSelector.Animation { public class ExpandableSelectorAnimator { private const string Y_ANIMATION = "translationY"; private const float CONTAINER_ANIMATION_OFFSET = 1.16f; private readonly int _animationDuration; private readonly int _collapseInterpolatorId; private readonly View _container; private readonly int _containerInterpolatorId; private readonly int _expandInterpolatorId; public ExpandableSelectorAnimator(View container, int animationDuration, int expandInterpolatorId, int collapseInterpolatorId, int containerInterpolatorId) { this._container = container; this._animationDuration = animationDuration; this._expandInterpolatorId = expandInterpolatorId; this._collapseInterpolatorId = collapseInterpolatorId; this._containerInterpolatorId = containerInterpolatorId; this.IsCollapsed = true; } /// <summary> /// Configures the ExpandableSelectorAnimator to change the first item visibility to View.VISIBLE /// View.INVISIBLE once the collapse/expand animation has been performed. /// </summary> public bool HideFirstItemOnCollapse { get; set; } /// <summary> /// Configures the List of buttons used to calculate the animation parameters. /// </summary> public List<View> Buttons { get; set; } public bool IsCollapsed { get; private set; } /// <summary> /// Returns true if the ExpandableSelector widget is expanded or false if is collapsed. /// </summary> public bool IsExpanded { get { return !this.IsCollapsed; } } public event EventHandler AnimationFinished; /// <summary> /// Expands the ExpandableSelector performing a resize animation and at the same time moves the /// buttons configures as childrens to the associated position given the order in the ListView /// used to keep the reference to the buttons. The visibility of the buttons inside the /// ExpandableSelector changes to View.VISIBLE before to perform the animation. /// </summary> public void Expand() { this.IsCollapsed = false; this.ChangeButtonsVisibility(ViewStates.Visible); this.ExpandButtons(); this.ExpandContainer(); } /// <summary> /// Collapses the ExpandableSelector performing a resize animation and at the same time moves the /// buttons configures as childrens to the associated position given the order in the List /// View /// used to keep the reference to the buttons. The visibility of the buttons inside the /// ExpandableSelector changes to View.INVISIBLE after the resize animation. /// </summary> public void Collapse() { this.IsCollapsed = true; this.IsCollapsed = true; this.CollapseButtons(); this.CollapseContainer(); } /// <summary> /// Configures the Button/ImageButton added to the ExpandableSelector to match with the initial /// configuration needed by the component. /// </summary> /// <param name="button"></param> public void InitializeButton(View button) { this.ChangeGravityToBottomCenterHorizontal(button); } /// <summary> /// Returns the component to the initial state without remove configuration related to animation /// durations of if the first item visibility has to be changed. /// </summary> public void Reset() { this.Buttons = new List<View>(); this.IsCollapsed = true; } private void ExpandButtons() { int numberOfButtons = this.Buttons.Count; var animations = new Animator[numberOfButtons]; for (int i = 0; i < numberOfButtons; i++) { View button = this.Buttons[i]; ITimeInterpolator interpolator = this.GetExpandAnimatorInterpolation(); float toY = this.CalculateExpandedYPosition(i); animations[i] = this.CreateAnimatorForButton(interpolator, button, toY); } this.PlayAnimatorsTogether(animations); } private void CollapseButtons() { int numberOfButtons = this.Buttons.Count; ITimeInterpolator interpolator = this.GetCollapseAnimatorInterpolation(); var animations = new Animator[numberOfButtons]; for (int i = 0; i < numberOfButtons; i++) { View button = this.Buttons[i]; int toY = 0; animations[i] = this.CreateAnimatorForButton(interpolator, button, toY); } this.PlayAnimatorsTogether(animations); } private void ExpandContainer() { int toWidth = this._container.Width; int toHeight = this.GetSumHeight(); IInterpolator interpolator = this.GetContainerAnimationInterpolator(); ResizeAnimation resizeAnimation = this.CreateResizeAnimation(toWidth, interpolator, toHeight); this._container.StartAnimation(resizeAnimation); if (this._container.Animation != null) { this._container.Animation.AnimationEnd += delegate { this.ChangeButtonsVisibility(ViewStates.Visible); this.OnAnimationFinished(); }; } } private void CollapseContainer() { int toWidth = this._container.Width; float toHeight = this.GetFirstItemHeight(); IInterpolator interpolator = this.GetContainerAnimationInterpolator(); ResizeAnimation resizeAnimation = this.CreateResizeAnimation(toWidth, interpolator, toHeight); if (this._container.Animation != null) { this._container.Animation.AnimationEnd += delegate { this.ChangeButtonsVisibility(ViewStates.Invisible); this.OnAnimationFinished(); }; } this._container.StartAnimation(resizeAnimation); if (this._container.Animation != null) { this._container.Animation.AnimationEnd += delegate { this.ChangeButtonsVisibility(ViewStates.Invisible); this.OnAnimationFinished(); }; } } private ObjectAnimator CreateAnimatorForButton(ITimeInterpolator interpolator, View button, float toY) { ObjectAnimator objectAnimator = ObjectAnimator.OfFloat(button, Y_ANIMATION, toY); objectAnimator.SetInterpolator(interpolator); objectAnimator.SetDuration(this._animationDuration); return objectAnimator; } private ResizeAnimation CreateResizeAnimation(float toWidth, IInterpolator interpolator, float toHeight) { var resizeAnimation = new ResizeAnimation(this._container, toWidth, toHeight) { Interpolator = interpolator, Duration = (long)(this._animationDuration * CONTAINER_ANIMATION_OFFSET) }; resizeAnimation.AnimationEnd += delegate { this.OnAnimationFinished(); }; return resizeAnimation; } private void PlayAnimatorsTogether(Animator[] animations) { var animatorSet = new AnimatorSet(); animatorSet.PlayTogether(animations); animatorSet.Start(); } private float CalculateExpandedYPosition(int buttonPosition) { int numberOfButtons = this.Buttons.Count; int y = 0; for (int i = numberOfButtons - 1; i > buttonPosition; i--) { View button = this.Buttons[i]; y = y - button.Height - this.GetMarginRight(button) - this.GetMarginLeft(button); } return y; } private void ChangeButtonsVisibility(ViewStates visibility) { int lastItem = this.HideFirstItemOnCollapse ? this.Buttons.Count : this.Buttons.Count - 1; for (int i = 0; i < lastItem; i++) { View button = this.Buttons[i]; button.Visibility = visibility; } } private ITimeInterpolator GetExpandAnimatorInterpolation() { try { return AnimationUtils.LoadInterpolator(this._container.Context, this._expandInterpolatorId); } catch (Exception ex) { return null; } } private ITimeInterpolator GetCollapseAnimatorInterpolation() { try { return AnimationUtils.LoadInterpolator(this._container.Context, this._collapseInterpolatorId); } catch (Exception ex) { return null; } } private IInterpolator GetContainerAnimationInterpolator() { try { return AnimationUtils.LoadInterpolator(this._container.Context, this._containerInterpolatorId); } catch (Exception ex) { return null; } } private int GetSumHeight() { return this.Buttons.Sum(button => button.Height + this.GetMarginRight(button) + this.GetMarginLeft(button)); } private int GetMarginRight(View view) { var layoutParams = (FrameLayout.LayoutParams)view.LayoutParameters; return layoutParams.RightMargin; } private int GetMarginLeft(View view) { var layoutParams = (FrameLayout.LayoutParams)view.LayoutParameters; return layoutParams.LeftMargin; } private float GetFirstItemHeight() { View firstButton = this.Buttons[0]; int height = firstButton.Height; var layoutParams = (FrameLayout.LayoutParams)firstButton.LayoutParameters; int topMargin = layoutParams.TopMargin; int bottomMargin = layoutParams.BottomMargin; return height + topMargin + bottomMargin; } private void ChangeGravityToBottomCenterHorizontal(View view) { var layoutParams = (FrameLayout.LayoutParams)view.LayoutParameters; layoutParams.Gravity = GravityFlags.Bottom | GravityFlags.CenterHorizontal; //ViewGroup.LayoutParams parameters = view.LayoutParameters; //var newLayoutParameters = new FrameLayout.LayoutParams(parameters.Width, parameters.Height, // GravityFlags.Bottom | GravityFlags.CenterHorizontal); //view.LayoutParameters = newLayoutParameters; } private void OnAnimationFinished() { EventHandler handler = this.AnimationFinished; if (handler != null) { handler(this, EventArgs.Empty); } } } }
using UnityEngine; using System.Collections.Generic; using Finegamedesign.Utils; namespace Finegamedesign.SmallWorld { [RequireComponent(typeof(Rigidbody2D), typeof(PhotonView))] public sealed class PhotonBody : MonoBehaviour { public static float eatRadiusThreshold = 1.125f; public static int playerLayer = 8; public static List<GameObject> mobiles = new List<GameObject>(); public bool isBot = false; public bool isStatic = false; public float particleResetDelay = 6.0f; public float startScale = 0.5f; public PhotonView photon; public Rigidbody2D body; public GameObject eatenParticle; private GameObject particle; private float particleResetTime = -1.0f; // AllBufferedViaServer // """ Sends the RPC to everyone (including this client) through the server and buffers it for players joining later. // This client executes the RPC like any other when it received it from the server. Benefit: The server's order of sending the RPCs is the same on all clients.""" // https://doc-api.photonengine.com/en/pun/current/group__public_api.html public static void RPC(PhotonView owner, string methodName) { owner.RPC(methodName, PhotonTargets.All); } public static void RPC(PhotonView owner, string methodName, Vector3 position) { owner.RPC(methodName, PhotonTargets.All, position); } public void Awake() { Setup(); } void OnEnable() { Setup(); } public void Setup() { if (!isStatic && mobiles.IndexOf(gameObject) <= -1) { mobiles.Add(gameObject); } if (!isStatic) { StartScale(); } body = GetComponent<Rigidbody2D>(); photon = GetComponent<PhotonView>(); if (null == particle) { particle = (GameObject) GameObject.Instantiate(eatenParticle, transform.position, Quaternion.identity); particle.SetActive(false); } } public void StartScale() { Vector3 scale = transform.localScale; scale.x = startScale; scale.y = startScale; scale.z = startScale; transform.localScale = scale; } void FixedUpdate() { if (null != gameObject && gameObject.activeSelf && particleResetTime <= Time.time) { particle.SetActive(false); } if (!photon.isMine || isBot || isStatic) { return; } UpdateMovement(); UpdateQuit(); } private void UpdateMovement() { Vector2 velocity = body.velocity; if (Input.GetAxisRaw("Horizontal") < -0.5f) { velocity.x = -1.0f; } else if (0.5f < Input.GetAxisRaw("Horizontal")) { velocity.x = 1.0f; } if (Input.GetAxisRaw("Vertical") < -0.5f) { velocity.y = -1.0f; } else if (0.5f < Input.GetAxisRaw("Vertical")) { velocity.y = 1.0f; } body.velocity = velocity.normalized; } public void MoveToward(Vector3 target) { Move(target - transform.position); } public void MoveAwayFrom(Vector3 away) { Move(transform.position - away); } public void Move(Vector3 direction) { Vector2 velocity = body.velocity; velocity.x = direction.x; velocity.y = direction.y; if (0.00001f < velocity.magnitude) { body.velocity = velocity.normalized; } else { body.velocity = Vector2.zero; } if (null != gameObject && gameObject.activeSelf && null != particle) { particle.SetActive(false); } } public void ResetPosition() { transform.localPosition = Vector3.zero; body.velocity = Vector2.zero; } private void UpdateQuit() { if (Input.GetKeyDown(KeyCode.Escape)) { Application.Quit(); } } void OnCollisionEnter2D(Collision2D other) { GameObject otherObject = other.gameObject; if (null == otherObject) { return; } if (gameObject.layer != otherObject.layer) { return; } if (MayEatSmaller(otherObject)) { otherObject.GetComponent<PhotonView>().RPC( "OnEaten", PhotonTargets.All); } } bool MayEatSmaller(GameObject otherObject) { Vector3 otherScale = otherObject.transform.localScale; float otherRadius = Mathf.Abs(otherScale.x); Vector3 scale = transform.localScale; float radius = Mathf.Abs(scale.x); if (otherRadius * eatRadiusThreshold <= radius) { float gain = Geometry2D.RadiusDifference(radius, otherRadius); scale.x += gain; scale.y += gain; transform.localScale = scale; return true; } return false; } [PunRPC] void Spawn(Vector3 position) { Setup(); if (null != gameObject && !gameObject.activeSelf) { gameObject.SetActive(true); } transform.position = position; } [PunRPC] public void OnEaten() { if (null == particle) { return; } Vector3 position = transform.position; position.z -= 2.0f; Vector3 scale = transform.localScale; scale.z = scale.x; scale.y = scale.x; particle.transform.position = position; particle.transform.localScale = scale; particle.SetActive(true); particleResetTime = Time.time + particleResetDelay; gameObject.SetActive(false); } } }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the ds-2015-04-16.normal.json service model. */ using System; using Amazon.Runtime; namespace Amazon.DirectoryService { /// <summary> /// Constants used for properties of type DirectorySize. /// </summary> public class DirectorySize : ConstantClass { /// <summary> /// Constant Large for DirectorySize /// </summary> public static readonly DirectorySize Large = new DirectorySize("Large"); /// <summary> /// Constant Small for DirectorySize /// </summary> public static readonly DirectorySize Small = new DirectorySize("Small"); /// <summary> /// This constant constructor does not need to be called if the constant /// you are attempting to use is already defined as a static instance of /// this class. /// This constructor should be used to construct constants that are not /// defined as statics, for instance if attempting to use a feature that is /// newer than the current version of the SDK. /// </summary> public DirectorySize(string value) : base(value) { } /// <summary> /// Finds the constant for the unique value. /// </summary> /// <param name="value">The unique value for the constant</param> /// <returns>The constant for the unique value</returns> public static DirectorySize FindValue(string value) { return FindValue<DirectorySize>(value); } /// <summary> /// Utility method to convert strings to the constant class. /// </summary> /// <param name="value">The string value to convert to the constant class.</param> /// <returns></returns> public static implicit operator DirectorySize(string value) { return FindValue(value); } } /// <summary> /// Constants used for properties of type DirectoryStage. /// </summary> public class DirectoryStage : ConstantClass { /// <summary> /// Constant Active for DirectoryStage /// </summary> public static readonly DirectoryStage Active = new DirectoryStage("Active"); /// <summary> /// Constant Created for DirectoryStage /// </summary> public static readonly DirectoryStage Created = new DirectoryStage("Created"); /// <summary> /// Constant Creating for DirectoryStage /// </summary> public static readonly DirectoryStage Creating = new DirectoryStage("Creating"); /// <summary> /// Constant Deleted for DirectoryStage /// </summary> public static readonly DirectoryStage Deleted = new DirectoryStage("Deleted"); /// <summary> /// Constant Deleting for DirectoryStage /// </summary> public static readonly DirectoryStage Deleting = new DirectoryStage("Deleting"); /// <summary> /// Constant Failed for DirectoryStage /// </summary> public static readonly DirectoryStage Failed = new DirectoryStage("Failed"); /// <summary> /// Constant Impaired for DirectoryStage /// </summary> public static readonly DirectoryStage Impaired = new DirectoryStage("Impaired"); /// <summary> /// Constant Inoperable for DirectoryStage /// </summary> public static readonly DirectoryStage Inoperable = new DirectoryStage("Inoperable"); /// <summary> /// Constant Requested for DirectoryStage /// </summary> public static readonly DirectoryStage Requested = new DirectoryStage("Requested"); /// <summary> /// Constant RestoreFailed for DirectoryStage /// </summary> public static readonly DirectoryStage RestoreFailed = new DirectoryStage("RestoreFailed"); /// <summary> /// Constant Restoring for DirectoryStage /// </summary> public static readonly DirectoryStage Restoring = new DirectoryStage("Restoring"); /// <summary> /// This constant constructor does not need to be called if the constant /// you are attempting to use is already defined as a static instance of /// this class. /// This constructor should be used to construct constants that are not /// defined as statics, for instance if attempting to use a feature that is /// newer than the current version of the SDK. /// </summary> public DirectoryStage(string value) : base(value) { } /// <summary> /// Finds the constant for the unique value. /// </summary> /// <param name="value">The unique value for the constant</param> /// <returns>The constant for the unique value</returns> public static DirectoryStage FindValue(string value) { return FindValue<DirectoryStage>(value); } /// <summary> /// Utility method to convert strings to the constant class. /// </summary> /// <param name="value">The string value to convert to the constant class.</param> /// <returns></returns> public static implicit operator DirectoryStage(string value) { return FindValue(value); } } /// <summary> /// Constants used for properties of type DirectoryType. /// </summary> public class DirectoryType : ConstantClass { /// <summary> /// Constant ADConnector for DirectoryType /// </summary> public static readonly DirectoryType ADConnector = new DirectoryType("ADConnector"); /// <summary> /// Constant SimpleAD for DirectoryType /// </summary> public static readonly DirectoryType SimpleAD = new DirectoryType("SimpleAD"); /// <summary> /// This constant constructor does not need to be called if the constant /// you are attempting to use is already defined as a static instance of /// this class. /// This constructor should be used to construct constants that are not /// defined as statics, for instance if attempting to use a feature that is /// newer than the current version of the SDK. /// </summary> public DirectoryType(string value) : base(value) { } /// <summary> /// Finds the constant for the unique value. /// </summary> /// <param name="value">The unique value for the constant</param> /// <returns>The constant for the unique value</returns> public static DirectoryType FindValue(string value) { return FindValue<DirectoryType>(value); } /// <summary> /// Utility method to convert strings to the constant class. /// </summary> /// <param name="value">The string value to convert to the constant class.</param> /// <returns></returns> public static implicit operator DirectoryType(string value) { return FindValue(value); } } /// <summary> /// Constants used for properties of type RadiusAuthenticationProtocol. /// </summary> public class RadiusAuthenticationProtocol : ConstantClass { /// <summary> /// Constant CHAP for RadiusAuthenticationProtocol /// </summary> public static readonly RadiusAuthenticationProtocol CHAP = new RadiusAuthenticationProtocol("CHAP"); /// <summary> /// Constant MSCHAPv1 for RadiusAuthenticationProtocol /// </summary> public static readonly RadiusAuthenticationProtocol MSCHAPv1 = new RadiusAuthenticationProtocol("MS-CHAPv1"); /// <summary> /// Constant MSCHAPv2 for RadiusAuthenticationProtocol /// </summary> public static readonly RadiusAuthenticationProtocol MSCHAPv2 = new RadiusAuthenticationProtocol("MS-CHAPv2"); /// <summary> /// Constant PAP for RadiusAuthenticationProtocol /// </summary> public static readonly RadiusAuthenticationProtocol PAP = new RadiusAuthenticationProtocol("PAP"); /// <summary> /// This constant constructor does not need to be called if the constant /// you are attempting to use is already defined as a static instance of /// this class. /// This constructor should be used to construct constants that are not /// defined as statics, for instance if attempting to use a feature that is /// newer than the current version of the SDK. /// </summary> public RadiusAuthenticationProtocol(string value) : base(value) { } /// <summary> /// Finds the constant for the unique value. /// </summary> /// <param name="value">The unique value for the constant</param> /// <returns>The constant for the unique value</returns> public static RadiusAuthenticationProtocol FindValue(string value) { return FindValue<RadiusAuthenticationProtocol>(value); } /// <summary> /// Utility method to convert strings to the constant class. /// </summary> /// <param name="value">The string value to convert to the constant class.</param> /// <returns></returns> public static implicit operator RadiusAuthenticationProtocol(string value) { return FindValue(value); } } /// <summary> /// Constants used for properties of type RadiusStatus. /// </summary> public class RadiusStatus : ConstantClass { /// <summary> /// Constant Completed for RadiusStatus /// </summary> public static readonly RadiusStatus Completed = new RadiusStatus("Completed"); /// <summary> /// Constant Creating for RadiusStatus /// </summary> public static readonly RadiusStatus Creating = new RadiusStatus("Creating"); /// <summary> /// Constant Failed for RadiusStatus /// </summary> public static readonly RadiusStatus Failed = new RadiusStatus("Failed"); /// <summary> /// This constant constructor does not need to be called if the constant /// you are attempting to use is already defined as a static instance of /// this class. /// This constructor should be used to construct constants that are not /// defined as statics, for instance if attempting to use a feature that is /// newer than the current version of the SDK. /// </summary> public RadiusStatus(string value) : base(value) { } /// <summary> /// Finds the constant for the unique value. /// </summary> /// <param name="value">The unique value for the constant</param> /// <returns>The constant for the unique value</returns> public static RadiusStatus FindValue(string value) { return FindValue<RadiusStatus>(value); } /// <summary> /// Utility method to convert strings to the constant class. /// </summary> /// <param name="value">The string value to convert to the constant class.</param> /// <returns></returns> public static implicit operator RadiusStatus(string value) { return FindValue(value); } } /// <summary> /// Constants used for properties of type SnapshotStatus. /// </summary> public class SnapshotStatus : ConstantClass { /// <summary> /// Constant Completed for SnapshotStatus /// </summary> public static readonly SnapshotStatus Completed = new SnapshotStatus("Completed"); /// <summary> /// Constant Creating for SnapshotStatus /// </summary> public static readonly SnapshotStatus Creating = new SnapshotStatus("Creating"); /// <summary> /// Constant Failed for SnapshotStatus /// </summary> public static readonly SnapshotStatus Failed = new SnapshotStatus("Failed"); /// <summary> /// This constant constructor does not need to be called if the constant /// you are attempting to use is already defined as a static instance of /// this class. /// This constructor should be used to construct constants that are not /// defined as statics, for instance if attempting to use a feature that is /// newer than the current version of the SDK. /// </summary> public SnapshotStatus(string value) : base(value) { } /// <summary> /// Finds the constant for the unique value. /// </summary> /// <param name="value">The unique value for the constant</param> /// <returns>The constant for the unique value</returns> public static SnapshotStatus FindValue(string value) { return FindValue<SnapshotStatus>(value); } /// <summary> /// Utility method to convert strings to the constant class. /// </summary> /// <param name="value">The string value to convert to the constant class.</param> /// <returns></returns> public static implicit operator SnapshotStatus(string value) { return FindValue(value); } } /// <summary> /// Constants used for properties of type SnapshotType. /// </summary> public class SnapshotType : ConstantClass { /// <summary> /// Constant Auto for SnapshotType /// </summary> public static readonly SnapshotType Auto = new SnapshotType("Auto"); /// <summary> /// Constant Manual for SnapshotType /// </summary> public static readonly SnapshotType Manual = new SnapshotType("Manual"); /// <summary> /// This constant constructor does not need to be called if the constant /// you are attempting to use is already defined as a static instance of /// this class. /// This constructor should be used to construct constants that are not /// defined as statics, for instance if attempting to use a feature that is /// newer than the current version of the SDK. /// </summary> public SnapshotType(string value) : base(value) { } /// <summary> /// Finds the constant for the unique value. /// </summary> /// <param name="value">The unique value for the constant</param> /// <returns>The constant for the unique value</returns> public static SnapshotType FindValue(string value) { return FindValue<SnapshotType>(value); } /// <summary> /// Utility method to convert strings to the constant class. /// </summary> /// <param name="value">The string value to convert to the constant class.</param> /// <returns></returns> public static implicit operator SnapshotType(string value) { return FindValue(value); } } }
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; using System.IO; using OpenKh.Bbs; namespace OpenKh.Tools.EpdEditor { public partial class EPDForm : Form { public EPDForm() { InitializeComponent(); } Epd epd = new Epd(); Stream epdFile; private void UpdateEPDData() { // General Parameters Epd.StatusAilment ailments = Epd.GetStatusAilment(epd); StatusAilment_checkbox_01.Checked = ailments.bFly; StatusAilment_checkbox_02.Checked = ailments.bSmallDamageReaction; StatusAilment_checkbox_03.Checked = ailments.bSmallDamageReactionOnly; StatusAilment_checkbox_04.Checked = ailments.bHitback; StatusAilment_checkbox_05.Checked = ailments.bPoison; StatusAilment_checkbox_06.Checked = ailments.bSlow; StatusAilment_checkbox_07.Checked = ailments.bStop; StatusAilment_checkbox_08.Checked = ailments.bBind; StatusAilment_checkbox_09.Checked = ailments.bFaint; StatusAilment_checkbox_10.Checked = ailments.bFreeze; StatusAilment_checkbox_11.Checked = ailments.bBurn; StatusAilment_checkbox_12.Checked = ailments.bConfuse; StatusAilment_checkbox_13.Checked = ailments.bBlind; StatusAilment_checkbox_14.Checked = ailments.bDeath; StatusAilment_checkbox_15.Checked = ailments.bZeroGravity; StatusAilment_checkbox_16.Checked = ailments.bMini; StatusAilment_checkbox_17.Checked = ailments.bMagnet; StatusAilment_checkbox_18.Checked = ailments.bDegen; StatusAilment_checkbox_19.Checked = ailments.bSleep; MaxHealthBox.Text = epd.generalParameters.Health.ToString(); SizeBox.Text = epd.generalParameters.Size.ToString(); EXPMultiplierBox.Text = epd.generalParameters.ExperienceMultiplier.ToString(); PhysicalDamageBox.Text = epd.generalParameters.PhysicalDamageMultiplier.ToString(); FireDamageBox.Text = epd.generalParameters.FireDamageMultiplier.ToString(); IceDamageBox.Text = epd.generalParameters.IceDamageMultiplier.ToString(); ThunderDamageBox.Text = epd.generalParameters.ThunderDamageMultiplier.ToString(); DarknessDamageBox.Text = epd.generalParameters.DarknessDamageMultiplier.ToString(); SpecialDamageBox.Text = epd.generalParameters.NonElementalDamageMultiplier.ToString(); // Animations int i = 0; foreach(Control con in AnimationLayoutPanel.Controls) { con.Text = new String(epd.AnimationList[i]); i++; } // Other Parameters NumericDamageCeiling.Value = epd.otherParameters.DamageCeiling; NumericDamageFloor.Value = epd.otherParameters.DamageFloor; NumericWeight.Text = epd.otherParameters.fWeight.ToString(); Epd.EffectivenessFlag flag = Epd.GetEffectivenessFlag(epd); NumericPoison.Value = flag.Poison; NumericStop.Value = flag.Stop; NumericBind.Value = flag.Bind; NumericFaint.Value = flag.Faint; NumericBlind.Value = flag.Blind; NumericMini.Value = flag.Mini; NumericPrizeboxProbability.Value = epd.otherParameters.PrizeBoxProbability; // Technique Parameters for (int t = 0; t < epd.techniqueParameters.Count; t++) { TechControl techCon = new TechControl(); techCon.TechParamGBox.Text = "Parameter " + (t+1); techCon.TechniquePower.Text = epd.techniqueParameters[t].TechniquePowerCorrection.ToString(); techCon.NumericTechniqueNumber.Value = epd.techniqueParameters[t].TechniqueNumber; techCon.AttackKind.SelectedIndex = (epd.techniqueParameters[t].TechniqueKind > 0x38 ? 0 : epd.techniqueParameters[t].TechniqueKind); techCon.AttackAttribute.SelectedIndex = epd.techniqueParameters[t].TechniqueAttribute % 8; techCon.NumericSuccessRate.Value = epd.techniqueParameters[t].SuccessRate; TechniqueLayout.Controls.Add(techCon); } AddTechParam techParamPlus = new AddTechParam(); TechniqueLayout.Controls.Add(techParamPlus); // Drop Parameters for (int d = 0; d < epd.dropParameters.Count; d++) { DropControl dropCon = new DropControl(); dropCon.DropGBox.Text = "Drop Item " + (d+1); dropCon.ItemComboBox.SelectedIndex = (int)epd.dropParameters[d].ItemIndex; dropCon.NumericItemCount.Value = epd.dropParameters[d].ItemCount; dropCon.NumericItemProbability.Value = epd.dropParameters[d].Probability; DroppedLayout.Controls.Add(dropCon); } AddDropParam dropParamPlus = new AddDropParam(); DroppedLayout.Controls.Add(dropParamPlus); // Extra Parameters for (int e = 0; e < epd.extraParameters.Count; e++) { ExtraControl extraCon = new ExtraControl(); extraCon.ExtraParamGBox.Text = "Extra Param " + (e+1); extraCon.ParameterName.Text = epd.extraParameters[e].ParameterName; extraCon.ParameterValue.Text = epd.extraParameters[e].ParameterValue.ToString(); ExtraLayout.Controls.Add(extraCon); } AddExtraParam extraParamPlus = new AddExtraParam(); ExtraLayout.Controls.Add(extraParamPlus); } private void UpdateWriteInfo() { Epd.Header head = epd.header; epd = new Epd(); epd.header = head; // General Parameters epd.generalParameters = new Epd.GeneralParameters(); epd.generalParameters.StatusAilmentsFlag = Epd.GetStatusAilmentFromStates( StatusAilment_checkbox_01.Checked, StatusAilment_checkbox_02.Checked, StatusAilment_checkbox_03.Checked, StatusAilment_checkbox_04.Checked, StatusAilment_checkbox_05.Checked, StatusAilment_checkbox_06.Checked, StatusAilment_checkbox_07.Checked, StatusAilment_checkbox_08.Checked, StatusAilment_checkbox_09.Checked, StatusAilment_checkbox_10.Checked, StatusAilment_checkbox_11.Checked, StatusAilment_checkbox_12.Checked, StatusAilment_checkbox_13.Checked, StatusAilment_checkbox_14.Checked, StatusAilment_checkbox_15.Checked, StatusAilment_checkbox_16.Checked, StatusAilment_checkbox_17.Checked, StatusAilment_checkbox_18.Checked, StatusAilment_checkbox_19.Checked); epd.generalParameters.Health = float.Parse(MaxHealthBox.Text); epd.generalParameters.ExperienceMultiplier = float.Parse(EXPMultiplierBox.Text); epd.generalParameters.Size = uint.Parse(SizeBox.Text, System.Globalization.NumberStyles.Integer); epd.generalParameters.PhysicalDamageMultiplier = float.Parse(PhysicalDamageBox.Text); epd.generalParameters.FireDamageMultiplier = float.Parse(FireDamageBox.Text); epd.generalParameters.IceDamageMultiplier = float.Parse(IceDamageBox.Text); epd.generalParameters.ThunderDamageMultiplier = float.Parse(ThunderDamageBox.Text); epd.generalParameters.DarknessDamageMultiplier = float.Parse(DarknessDamageBox.Text); epd.generalParameters.NonElementalDamageMultiplier = float.Parse(SpecialDamageBox.Text); // Anim list epd.AnimationList = new List<char[]>(); foreach(TextBox txt in AnimationLayoutPanel.Controls) { char[] arr = new char[4]; if(txt.Text != "") { arr[0] = txt.Text.ToCharArray()[0]; arr[1] = txt.Text.ToCharArray()[1]; arr[2] = txt.Text.ToCharArray()[2]; arr[3] = (char)0; } epd.AnimationList.Add(arr); } // Other Parameters epd.otherParameters = new Epd.OtherParameters(); epd.otherParameters.DamageCeiling = Decimal.ToUInt16(NumericDamageCeiling.Value); epd.otherParameters.DamageFloor = Decimal.ToUInt16(NumericDamageFloor.Value); epd.otherParameters.fWeight = float.Parse(NumericWeight.Text); epd.otherParameters.EffectivenessFlag = Epd.GetEffectivenessFlagFromStates(decimal.ToUInt32(NumericPoison.Value), decimal.ToUInt32(NumericStop.Value), decimal.ToUInt32(NumericBind.Value), decimal.ToUInt32(NumericFaint.Value), decimal.ToUInt32(NumericBlind.Value), decimal.ToUInt32(NumericMini.Value)); epd.otherParameters.PrizeBoxProbability = decimal.ToSByte(NumericPrizeboxProbability.Value); epd.otherParameters.padding = new byte[3]; epd.otherParameters.TechniqueParameterCount = (uint)(TechniqueLayout.Controls.Count - 1 < 0 ? 0 : TechniqueLayout.Controls.Count - 1); epd.otherParameters.TechniqueParameterOffset = 0xA8; epd.otherParameters.DropItemsCount = (uint)(DroppedLayout.Controls.Count - 1 < 0 ? 0 : DroppedLayout.Controls.Count - 1); epd.otherParameters.DropItemsOffset = 0xA8 + (epd.otherParameters.TechniqueParameterCount * 8); epd.otherParameters.ExtraParametersCount = (uint)(ExtraLayout.Controls.Count - 1 < 0 ? 0 : ExtraLayout.Controls.Count - 1); epd.otherParameters.ExtraParametersOffset = 0xA8 + ((epd.otherParameters.TechniqueParameterCount + epd.otherParameters.DropItemsCount) * 8); // Technique Parameters epd.techniqueParameters = new List<Epd.TechniqueParameters>(); try { foreach (TechControl tech in TechniqueLayout.Controls) { Epd.TechniqueParameters param = new Epd.TechniqueParameters(); param.TechniquePowerCorrection = float.Parse(tech.TechniquePower.Text); param.TechniqueNumber = decimal.ToByte(tech.NumericTechniqueNumber.Value); param.TechniqueKind = (byte)tech.AttackKind.SelectedIndex; param.TechniqueAttribute = (byte)tech.AttackAttribute.SelectedIndex; param.SuccessRate = decimal.ToByte(tech.NumericSuccessRate.Value); epd.techniqueParameters.Add(param); } } catch (InvalidCastException) { Console.WriteLine("Cannot convert to this type."); } // Drop Parameters epd.dropParameters = new List<Epd.DropParameters>(); try { foreach (DropControl drop in DroppedLayout.Controls) { Epd.DropParameters param = new Epd.DropParameters(); param.ItemIndex = (uint)drop.ItemComboBox.SelectedIndex; param.ItemCount = decimal.ToUInt16(drop.NumericItemCount.Value); param.Probability = decimal.ToUInt16(drop.NumericItemProbability.Value); epd.dropParameters.Add(param); } } catch (InvalidCastException) { Console.WriteLine("Cannot convert to this type."); } // Extra Parameters epd.extraParameters = new List<Epd.ExtraParameters>(); try { foreach (ExtraControl extra in ExtraLayout.Controls) { Epd.ExtraParameters param = new Epd.ExtraParameters(); param.ParameterName = extra.ParameterName.Text; param.ParameterValue = float.Parse(extra.ParameterValue.Text); epd.extraParameters.Add(param); } } catch (InvalidCastException) { Console.WriteLine("Cannot convert to this type."); } } private void LoadEPDButton_Click(object sender, EventArgs e) { OpenFileDialog dialog = new OpenFileDialog(); dialog.Filter = "Enemy Parameter Data files (*.epd)|*.epd|All files (*.*)|*.*"; DialogResult result = dialog.ShowDialog(); if(result == DialogResult.OK) { TechniqueLayout.Controls.Clear(); DroppedLayout.Controls.Clear(); ExtraLayout.Controls.Clear(); epdFile = File.OpenRead(dialog.FileName); FileLoadedLabel.Text = "File currently loaded: " + dialog.FileName; epd = Epd.Read(epdFile); UpdateEPDData(); SaveEPDButton.Enabled = true; epdFile.Close(); } } private void SaveEPDButton_Click(object sender, EventArgs e) { SaveFileDialog dialog = new SaveFileDialog(); dialog.Filter = "Enemy Parameter Data files (*.epd)|*.epd|All files (*.*)|*.*"; DialogResult result = dialog.ShowDialog(); if (result == DialogResult.OK) { Stream epdOut = File.OpenWrite(dialog.FileName); UpdateWriteInfo(); Epd.Write(epdOut, epd); epdOut.Close(); MessageBox.Show("File saved successfully!", "Information", MessageBoxButtons.OK, MessageBoxIcon.Information); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Linq; using System.Text; using Xunit; namespace System.Tests { public static partial class BitConverterTests { [Fact] public static unsafe void IsLittleEndian() { short s = 1; Assert.Equal(BitConverter.IsLittleEndian, *((byte*)&s) == 1); } [Fact] public static void ValueArgumentNull() { Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToBoolean(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToChar(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToDouble(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToInt16(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToInt32(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToInt64(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToSingle(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToUInt16(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToUInt32(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToUInt64(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToString(null)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToString(null, 0)); Assert.Throws<ArgumentNullException>("value", () => BitConverter.ToString(null, 0, 0)); } [Fact] public static void StartIndexBeyondLength() { Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToBoolean(new byte[1], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToBoolean(new byte[1], 1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToBoolean(new byte[1], 2)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToChar(new byte[2], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToChar(new byte[2], 2)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToChar(new byte[2], 3)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToDouble(new byte[8], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToDouble(new byte[8], 8)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToDouble(new byte[8], 9)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToInt16(new byte[2], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToInt16(new byte[2], 2)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToInt16(new byte[2], 3)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToInt32(new byte[4], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToInt32(new byte[4], 4)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToInt32(new byte[4], 5)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToInt64(new byte[8], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToInt64(new byte[8], 8)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToInt64(new byte[8], 9)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToSingle(new byte[4], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToSingle(new byte[4], 4)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToSingle(new byte[4], 5)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToUInt16(new byte[2], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToUInt16(new byte[2], 2)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToUInt16(new byte[2], 3)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToUInt32(new byte[4], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToUInt32(new byte[4], 4)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToUInt32(new byte[4], 5)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToUInt64(new byte[8], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToUInt64(new byte[8], 8)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToUInt64(new byte[8], 9)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToString(new byte[1], -1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToString(new byte[1], 1)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToString(new byte[1], 2)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToString(new byte[1], -1, 0)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToString(new byte[1], 1, 0)); Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToString(new byte[1], 2, 0)); Assert.Throws<ArgumentOutOfRangeException>("length", () => BitConverter.ToString(new byte[1], 0, -1)); } [Fact] public static void StartIndexPlusNeededLengthTooLong() { Assert.Throws<ArgumentOutOfRangeException>("startIndex", () => BitConverter.ToBoolean(new byte[0], 0)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToChar(new byte[2], 1)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToDouble(new byte[8], 1)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToInt16(new byte[2], 1)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToInt32(new byte[4], 1)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToInt64(new byte[8], 1)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToSingle(new byte[4], 1)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToUInt16(new byte[2], 1)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToUInt32(new byte[4], 1)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToUInt64(new byte[8], 1)); Assert.Throws<ArgumentException>("value", () => BitConverter.ToString(new byte[2], 1, 2)); } [Fact] public static void DoubleToInt64Bits() { Double input = 123456.3234; Int64 result = BitConverter.DoubleToInt64Bits(input); Assert.Equal(4683220267154373240L, result); Double roundtripped = BitConverter.Int64BitsToDouble(result); Assert.Equal(input, roundtripped); } [Fact] public static void RoundtripBoolean() { Byte[] bytes = BitConverter.GetBytes(true); Assert.Equal(1, bytes.Length); Assert.Equal(1, bytes[0]); Assert.True(BitConverter.ToBoolean(bytes, 0)); bytes = BitConverter.GetBytes(false); Assert.Equal(1, bytes.Length); Assert.Equal(0, bytes[0]); Assert.False(BitConverter.ToBoolean(bytes, 0)); } [Fact] public static void RoundtripChar() { Char input = 'A'; Byte[] expected = { 0x41, 0 }; VerifyRoundtrip(BitConverter.GetBytes, BitConverter.ToChar, input, expected); } [Fact] public static void RoundtripDouble() { Double input = 123456.3234; Byte[] expected = { 0x78, 0x7a, 0xa5, 0x2c, 0x05, 0x24, 0xfe, 0x40 }; VerifyRoundtrip(BitConverter.GetBytes, BitConverter.ToDouble, input, expected); } [Fact] public static void RoundtripSingle() { Single input = 8392.34f; Byte[] expected = { 0x5c, 0x21, 0x03, 0x46 }; VerifyRoundtrip(BitConverter.GetBytes, BitConverter.ToSingle, input, expected); } [Fact] public static void RoundtripInt16() { Int16 input = 0x1234; Byte[] expected = { 0x34, 0x12 }; VerifyRoundtrip(BitConverter.GetBytes, BitConverter.ToInt16, input, expected); } [Fact] public static void RoundtripInt32() { Int32 input = 0x12345678; Byte[] expected = { 0x78, 0x56, 0x34, 0x12 }; VerifyRoundtrip(BitConverter.GetBytes, BitConverter.ToInt32, input, expected); } [Fact] public static void RoundtripInt64() { Int64 input = 0x0123456789abcdef; Byte[] expected = { 0xef, 0xcd, 0xab, 0x89, 0x67, 0x45, 0x23, 0x01 }; VerifyRoundtrip(BitConverter.GetBytes, BitConverter.ToInt64, input, expected); } [Fact] public static void RoundtripUInt16() { UInt16 input = 0x1234; Byte[] expected = { 0x34, 0x12 }; VerifyRoundtrip(BitConverter.GetBytes, BitConverter.ToUInt16, input, expected); } [Fact] public static void RoundtripUInt32() { UInt32 input = 0x12345678; Byte[] expected = { 0x78, 0x56, 0x34, 0x12 }; VerifyRoundtrip(BitConverter.GetBytes, BitConverter.ToUInt32, input, expected); } [Fact] public static void RoundtripUInt64() { UInt64 input = 0x0123456789abcdef; Byte[] expected = { 0xef, 0xcd, 0xab, 0x89, 0x67, 0x45, 0x23, 0x01 }; VerifyRoundtrip(BitConverter.GetBytes, BitConverter.ToUInt64, input, expected); } [Fact] public static void RoundtripString() { Byte[] bytes = { 0x12, 0x34, 0x56, 0x78, 0x9a }; Assert.Equal("12-34-56-78-9A", BitConverter.ToString(bytes)); Assert.Equal("56-78-9A", BitConverter.ToString(bytes, 2)); Assert.Equal("56", BitConverter.ToString(bytes, 2, 1)); Assert.Same(string.Empty, BitConverter.ToString(new byte[0])); Assert.Same(string.Empty, BitConverter.ToString(new byte[3], 1, 0)); } [Fact] public static void ToString_ByteArray_Long() { byte[] bytes = Enumerable.Range(0, 256 * 4).Select(i => unchecked((byte)i)).ToArray(); string expected = string.Join("-", bytes.Select(b => b.ToString("X2"))); Assert.Equal(expected, BitConverter.ToString(bytes)); Assert.Equal(expected.Substring(3, expected.Length - 6), BitConverter.ToString(bytes, 1, bytes.Length - 2)); } [Fact] public static void ToString_ByteArrayTooLong_Throws() { byte[] arr; try { arr = new byte[int.MaxValue / 3 + 1]; } catch (OutOfMemoryException) { // Exit out of the test if we don't have an enough contiguous memory // available to create a big enough array. return; } Assert.Throws<ArgumentOutOfRangeException>("length", () => BitConverter.ToString(arr)); } private static void VerifyRoundtrip<TInput>(Func<TInput, Byte[]> getBytes, Func<Byte[], int, TInput> convertBack, TInput input, Byte[] expectedBytes) { Byte[] bytes = getBytes(input); Assert.Equal(expectedBytes.Length, bytes.Length); if (!BitConverter.IsLittleEndian) { Array.Reverse(expectedBytes); } Assert.Equal(expectedBytes, bytes); Assert.Equal(input, convertBack(bytes, 0)); // Also try unaligned startIndex byte[] longerBytes = new byte[bytes.Length + 1]; longerBytes[0] = 0; Array.Copy(bytes, 0, longerBytes, 1, bytes.Length); Assert.Equal(input, convertBack(longerBytes, 1)); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Diagnostics; using System.Runtime; using System.Runtime.InteropServices; using Internal.Runtime; using Internal.Runtime.Augments; using Internal.Runtime.TypeLoader; using System.Collections.Generic; using System.Threading; using Internal.Metadata.NativeFormat; using Internal.TypeSystem; namespace Internal.Runtime.TypeLoader { internal static class RuntimeTypeHandleEETypeExtensions { public static unsafe EEType* ToEETypePtr(this RuntimeTypeHandle rtth) { return (EEType*)(*(IntPtr*)&rtth); } public static unsafe IntPtr ToIntPtr(this RuntimeTypeHandle rtth) { return *(IntPtr*)&rtth; } public static unsafe bool IsDynamicType(this RuntimeTypeHandle rtth) { return rtth.ToEETypePtr()->IsDynamicType; } public static unsafe int GetNumVtableSlots(this RuntimeTypeHandle rtth) { return rtth.ToEETypePtr()->NumVtableSlots; } public static unsafe IntPtr GetDictionary(this RuntimeTypeHandle rtth) { return EETypeCreator.GetDictionary(rtth.ToEETypePtr()); } public static unsafe void SetDictionary(this RuntimeTypeHandle rtth, int dictionarySlot, IntPtr dictionary) { Debug.Assert(rtth.ToEETypePtr()->IsDynamicType && dictionarySlot < rtth.GetNumVtableSlots()); *(IntPtr*)((byte*)rtth.ToEETypePtr() + sizeof(EEType) + dictionarySlot * IntPtr.Size) = dictionary; } public static unsafe void SetInterface(this RuntimeTypeHandle rtth, int interfaceIndex, RuntimeTypeHandle interfaceType) { rtth.ToEETypePtr()->InterfaceMap[interfaceIndex].InterfaceType = interfaceType.ToEETypePtr(); } public static unsafe void SetGenericDefinition(this RuntimeTypeHandle rtth, RuntimeTypeHandle genericDefinitionHandle) { rtth.ToEETypePtr()->GenericDefinition = genericDefinitionHandle.ToEETypePtr(); } public static unsafe void SetGenericArgument(this RuntimeTypeHandle rtth, int argumentIndex, RuntimeTypeHandle argumentType) { rtth.ToEETypePtr()->GenericArguments[argumentIndex].Value = argumentType.ToEETypePtr(); } public static unsafe void SetNullableType(this RuntimeTypeHandle rtth, RuntimeTypeHandle T_typeHandle) { rtth.ToEETypePtr()->NullableType = T_typeHandle.ToEETypePtr(); } public static unsafe void SetRelatedParameterType(this RuntimeTypeHandle rtth, RuntimeTypeHandle relatedTypeHandle) { rtth.ToEETypePtr()->RelatedParameterType = relatedTypeHandle.ToEETypePtr(); } public static unsafe void SetParameterizedTypeShape(this RuntimeTypeHandle rtth, uint value) { rtth.ToEETypePtr()->ParameterizedTypeShape = value; } public static unsafe void SetBaseType(this RuntimeTypeHandle rtth, RuntimeTypeHandle baseTypeHandle) { rtth.ToEETypePtr()->BaseType = baseTypeHandle.ToEETypePtr(); } public static unsafe void SetComponentSize(this RuntimeTypeHandle rtth, UInt16 componentSize) { rtth.ToEETypePtr()->ComponentSize = componentSize; } } internal class MemoryHelpers { public static int AlignUp(int val, int alignment) { Debug.Assert(val >= 0 && alignment >= 0); // alignment must be a power of 2 for this implementation to work (need modulo otherwise) Debug.Assert(0 == (alignment & (alignment - 1))); int result = (val + (alignment - 1)) & ~(alignment - 1); Debug.Assert(result >= val); // check for overflow return result; } public static unsafe void Memset(IntPtr destination, int length, byte value) { byte* pbDest = (byte*)destination.ToPointer(); while (length > 0) { *pbDest = value; pbDest++; length--; } } public static IntPtr AllocateMemory(int cbBytes) { return InteropExtensions.MemAlloc(new UIntPtr((uint)cbBytes)); } public static void FreeMemory(IntPtr memoryPtrToFree) { InteropExtensions.MemFree(memoryPtrToFree); } } internal unsafe class EETypeCreator { private static IntPtr s_emptyGCDesc; private static void CreateEETypeWorker(EEType* pTemplateEEType, UInt32 hashCodeOfNewType, int arity, bool requireVtableSlotMapping, TypeBuilderState state) { bool successful = false; IntPtr eeTypePtrPlusGCDesc = IntPtr.Zero; IntPtr dynamicDispatchMapPtr = IntPtr.Zero; DynamicModule* dynamicModulePtr = null; IntPtr gcStaticData = IntPtr.Zero; IntPtr gcStaticsIndirection = IntPtr.Zero; try { Debug.Assert((pTemplateEEType != null) || (state.TypeBeingBuilt as MetadataType != null)); // In some situations involving arrays we can find as a template a dynamically generated type. // In that case, the correct template would be the template used to create the dynamic type in the first // place. if (pTemplateEEType != null && pTemplateEEType->IsDynamicType) { pTemplateEEType = pTemplateEEType->DynamicTemplateType; } ModuleInfo moduleInfo = TypeLoaderEnvironment.GetModuleInfoForType(state.TypeBeingBuilt); dynamicModulePtr = moduleInfo.DynamicModulePtr; Debug.Assert(dynamicModulePtr != null); bool requiresDynamicDispatchMap = requireVtableSlotMapping && (pTemplateEEType != null) && pTemplateEEType->HasDispatchMap; uint valueTypeFieldPaddingEncoded = 0; int baseSize = 0; bool isValueType; bool hasFinalizer; bool isNullable; bool isArray; bool isGeneric; ushort componentSize = 0; ushort flags; ushort runtimeInterfacesLength = 0; bool isGenericEETypeDef = false; bool isAbstractClass; bool isByRefLike; #if EETYPE_TYPE_MANAGER IntPtr typeManager = IntPtr.Zero; #endif if (state.RuntimeInterfaces != null) { runtimeInterfacesLength = checked((ushort)state.RuntimeInterfaces.Length); } if (pTemplateEEType != null) { valueTypeFieldPaddingEncoded = EEType.ComputeValueTypeFieldPaddingFieldValue( pTemplateEEType->ValueTypeFieldPadding, (uint)pTemplateEEType->FieldAlignmentRequirement); baseSize = (int)pTemplateEEType->BaseSize; isValueType = pTemplateEEType->IsValueType; hasFinalizer = pTemplateEEType->IsFinalizable; isNullable = pTemplateEEType->IsNullable; componentSize = pTemplateEEType->ComponentSize; flags = pTemplateEEType->Flags; isArray = pTemplateEEType->IsArray; isGeneric = pTemplateEEType->IsGeneric; isAbstractClass = pTemplateEEType->IsAbstract && !pTemplateEEType->IsInterface; isByRefLike = pTemplateEEType->IsByRefLike; #if EETYPE_TYPE_MANAGER typeManager = pTemplateEEType->PointerToTypeManager; #endif Debug.Assert(pTemplateEEType->NumInterfaces == runtimeInterfacesLength); } else if (state.TypeBeingBuilt.IsGenericDefinition) { flags = (ushort)EETypeKind.GenericTypeDefEEType; isValueType = state.TypeBeingBuilt.IsValueType; if (isValueType) flags |= (ushort)EETypeFlags.ValueTypeFlag; if (state.TypeBeingBuilt.IsInterface) flags |= (ushort)EETypeFlags.IsInterfaceFlag; hasFinalizer = false; isArray = false; isNullable = false; isGeneric = false; isGenericEETypeDef = true; isAbstractClass = false; isByRefLike = false; componentSize = checked((ushort)state.TypeBeingBuilt.Instantiation.Length); baseSize = 0; } else { isValueType = state.TypeBeingBuilt.IsValueType; hasFinalizer = state.TypeBeingBuilt.HasFinalizer; isNullable = state.TypeBeingBuilt.GetTypeDefinition().IsNullable; flags = EETypeBuilderHelpers.ComputeFlags(state.TypeBeingBuilt); isArray = false; isGeneric = state.TypeBeingBuilt.HasInstantiation; isAbstractClass = (state.TypeBeingBuilt is MetadataType) && ((MetadataType)state.TypeBeingBuilt).IsAbstract && !state.TypeBeingBuilt.IsInterface; isByRefLike = (state.TypeBeingBuilt is DefType) && ((DefType)state.TypeBeingBuilt).IsByRefLike; if (state.TypeBeingBuilt.HasVariance) { state.GenericVarianceFlags = new int[state.TypeBeingBuilt.Instantiation.Length]; int i = 0; foreach (GenericParameterDesc gpd in state.TypeBeingBuilt.GetTypeDefinition().Instantiation) { state.GenericVarianceFlags[i] = (int)gpd.Variance; i++; } Debug.Assert(i == state.GenericVarianceFlags.Length); } } // TODO! Change to if template is Universal or non-Existent if (state.TypeSize.HasValue) { baseSize = state.TypeSize.Value; int baseSizeBeforeAlignment = baseSize; baseSize = MemoryHelpers.AlignUp(baseSize, IntPtr.Size); if (isValueType) { // Compute the valuetype padding size based on size before adding the object type pointer field to the size uint cbValueTypeFieldPadding = (uint)(baseSize - baseSizeBeforeAlignment); // Add Object type pointer field to base size baseSize += IntPtr.Size; valueTypeFieldPaddingEncoded = (uint)EEType.ComputeValueTypeFieldPaddingFieldValue(cbValueTypeFieldPadding, (uint)state.FieldAlignment.Value); } // Minimum base size is 3 pointers, and requires us to bump the size of an empty class type if (baseSize <= IntPtr.Size) { // ValueTypes should already have had their size bumped up by the normal type layout process Debug.Assert(!isValueType); baseSize += IntPtr.Size; } // Add sync block skew baseSize += IntPtr.Size; // Minimum basesize is 3 pointers Debug.Assert(baseSize >= (IntPtr.Size * 3)); } // Optional fields encoding int cbOptionalFieldsSize; OptionalFieldsRuntimeBuilder optionalFields; { optionalFields = new OptionalFieldsRuntimeBuilder(pTemplateEEType != null ? pTemplateEEType->OptionalFieldsPtr : null); UInt32 rareFlags = optionalFields.GetFieldValue(EETypeOptionalFieldTag.RareFlags, 0); rareFlags |= (uint)EETypeRareFlags.IsDynamicTypeFlag; // Set the IsDynamicTypeFlag rareFlags &= ~(uint)EETypeRareFlags.NullableTypeViaIATFlag; // Remove the NullableTypeViaIATFlag flag rareFlags &= ~(uint)EETypeRareFlags.HasSealedVTableEntriesFlag;// Remove the HasSealedVTableEntriesFlag // we'll set IsDynamicTypeWithSealedVTableEntriesFlag instead // Set the IsDynamicTypeWithSealedVTableEntriesFlag if needed if (state.NumSealedVTableEntries > 0) rareFlags |= (uint)EETypeRareFlags.IsDynamicTypeWithSealedVTableEntriesFlag; if (requiresDynamicDispatchMap) rareFlags |= (uint)EETypeRareFlags.HasDynamicallyAllocatedDispatchMapFlag; if (state.NonGcDataSize != 0) rareFlags |= (uint)EETypeRareFlags.IsDynamicTypeWithNonGcStatics; if (state.GcDataSize != 0) rareFlags |= (uint)EETypeRareFlags.IsDynamicTypeWithGcStatics; if (state.ThreadDataSize != 0) rareFlags |= (uint)EETypeRareFlags.IsDynamicTypeWithThreadStatics; #if ARM if (state.FieldAlignment == 8) rareFlags |= (uint)EETypeRareFlags.RequiresAlign8Flag; else rareFlags &= ~(uint)EETypeRareFlags.RequiresAlign8Flag; if (state.IsHFA) rareFlags |= (uint)EETypeRareFlags.IsHFAFlag; else rareFlags &= ~(uint)EETypeRareFlags.IsHFAFlag; #endif if (state.HasStaticConstructor) rareFlags |= (uint)EETypeRareFlags.HasCctorFlag; else rareFlags &= ~(uint)EETypeRareFlags.HasCctorFlag; if (isAbstractClass) rareFlags |= (uint)EETypeRareFlags.IsAbstractClassFlag; else rareFlags &= ~(uint)EETypeRareFlags.IsAbstractClassFlag; if (isByRefLike) rareFlags |= (uint)EETypeRareFlags.IsByRefLikeFlag; else rareFlags &= ~(uint)EETypeRareFlags.IsByRefLikeFlag; rareFlags |= (uint)EETypeRareFlags.HasDynamicModuleFlag; optionalFields.SetFieldValue(EETypeOptionalFieldTag.RareFlags, rareFlags); // Dispatch map is fetched either from template type, or from the dynamically allocated DispatchMap field optionalFields.ClearField(EETypeOptionalFieldTag.DispatchMap); optionalFields.ClearField(EETypeOptionalFieldTag.ValueTypeFieldPadding); if (valueTypeFieldPaddingEncoded != 0) optionalFields.SetFieldValue(EETypeOptionalFieldTag.ValueTypeFieldPadding, valueTypeFieldPaddingEncoded); // Compute size of optional fields encoding cbOptionalFieldsSize = optionalFields.Encode(); Debug.Assert(cbOptionalFieldsSize > 0); } // Note: The number of vtable slots on the EEType to create is not necessary equal to the number of // vtable slots on the template type for universal generics (see ComputeVTableLayout) ushort numVtableSlots = state.NumVTableSlots; // Compute the EEType size and allocate it EEType* pEEType; { // In order to get the size of the EEType to allocate we need the following information // 1) The number of VTable slots (from the TypeBuilderState) // 2) The number of Interfaces (from the template) // 3) Whether or not there is a finalizer (from the template) // 4) Optional fields size // 5) Whether or not the type is nullable (from the template) // 6) Whether or not the type has sealed virtuals (from the TypeBuilderState) int cbEEType = (int)EEType.GetSizeofEEType( numVtableSlots, runtimeInterfacesLength, hasFinalizer, true, isNullable, state.NumSealedVTableEntries > 0, isGeneric, state.NonGcDataSize != 0, state.GcDataSize != 0, state.ThreadDataSize != 0); // Dynamic types have an extra pointer-sized field that contains a pointer to their template type cbEEType += IntPtr.Size; // Check if we need another pointer sized field for a dynamic DispatchMap cbEEType += (requiresDynamicDispatchMap ? IntPtr.Size : 0); // Add another pointer sized field for a DynamicModule cbEEType += IntPtr.Size; int cbGCDesc = GetInstanceGCDescSize(state, pTemplateEEType, isValueType, isArray); int cbGCDescAligned = MemoryHelpers.AlignUp(cbGCDesc, IntPtr.Size); // Allocate enough space for the EEType + gcDescSize eeTypePtrPlusGCDesc = MemoryHelpers.AllocateMemory(cbGCDescAligned + cbEEType + cbOptionalFieldsSize); // Get the EEType pointer, and the template EEType pointer pEEType = (EEType*)(eeTypePtrPlusGCDesc + cbGCDescAligned); state.HalfBakedRuntimeTypeHandle = pEEType->ToRuntimeTypeHandle(); // Set basic EEType fields pEEType->ComponentSize = componentSize; pEEType->Flags = flags; pEEType->BaseSize = (uint)baseSize; pEEType->NumVtableSlots = numVtableSlots; pEEType->NumInterfaces = runtimeInterfacesLength; pEEType->HashCode = hashCodeOfNewType; #if EETYPE_TYPE_MANAGER pEEType->PointerToTypeManager = typeManager; #endif // Write the GCDesc bool isSzArray = isArray ? state.ArrayRank < 1 : false; int arrayRank = isArray ? state.ArrayRank.Value : 0; CreateInstanceGCDesc(state, pTemplateEEType, pEEType, baseSize, cbGCDesc, isValueType, isArray, isSzArray, arrayRank); Debug.Assert(pEEType->HasGCPointers == (cbGCDesc != 0)); #if GENERICS_FORCE_USG if (state.NonUniversalTemplateType != null) { Debug.Assert(state.NonUniversalInstanceGCDescSize == cbGCDesc, "Non-universal instance GCDesc size not matching with universal GCDesc size!"); Debug.Assert(cbGCDesc == 0 || pEEType->HasGCPointers); // The TestGCDescsForEquality helper will compare 2 GCDescs for equality, 4 bytes at a time (GCDesc contents treated as integers), and will read the // GCDesc data in *reverse* order for instance GCDescs (subtracts 4 from the pointer values at each iteration). // - For the first GCDesc, we use (pEEType - 4) to point to the first 4-byte integer directly preceeding the EEType // - For the second GCDesc, given that the state.NonUniversalInstanceGCDesc already points to the first byte preceeding the template EEType, we // subtract 3 to point to the first 4-byte integer directly preceeding the template EEtype TestGCDescsForEquality(new IntPtr((byte*)pEEType - 4), state.NonUniversalInstanceGCDesc - 3, cbGCDesc, true); } #endif // Copy the encoded optional fields buffer to the newly allocated memory, and update the OptionalFields field on the EEType // It is important to set the optional fields first on the newly created EEType, because all other 'setters' // will assert that the type is dynamic, just to make sure we are not making any changes to statically compiled types pEEType->OptionalFieldsPtr = (byte*)pEEType + cbEEType; optionalFields.WriteToEEType(pEEType, cbOptionalFieldsSize); #if CORERT pEEType->PointerToTypeManager = PermanentAllocatedMemoryBlobs.GetPointerToIntPtr(moduleInfo.Handle.GetIntPtrUNSAFE()); #endif pEEType->DynamicModule = dynamicModulePtr; // Copy VTable entries from template type int numSlotsFilled = 0; IntPtr* pVtable = (IntPtr*)((byte*)pEEType + sizeof(EEType)); if (pTemplateEEType != null) { IntPtr* pTemplateVtable = (IntPtr*)((byte*)pTemplateEEType + sizeof(EEType)); for (int i = 0; i < pTemplateEEType->NumVtableSlots; i++) { int vtableSlotInDynamicType = requireVtableSlotMapping ? state.VTableSlotsMapping.GetVTableSlotInTargetType(i) : i; if (vtableSlotInDynamicType != -1) { Debug.Assert(vtableSlotInDynamicType < numVtableSlots); IntPtr dictionaryPtrValue; if (requireVtableSlotMapping && state.VTableSlotsMapping.IsDictionarySlot(i, out dictionaryPtrValue)) { // This must be the dictionary pointer value of one of the base types of the // current universal generic type being constructed. pVtable[vtableSlotInDynamicType] = dictionaryPtrValue; // Assert that the current template vtable slot is also a NULL value since all // universal generic template types have NULL dictionary slot values in their vtables Debug.Assert(pTemplateVtable[i] == IntPtr.Zero); } else { pVtable[vtableSlotInDynamicType] = pTemplateVtable[i]; } numSlotsFilled++; } } } else if (isGenericEETypeDef) { // If creating a Generic Type Definition Debug.Assert(pEEType->NumVtableSlots == 0); } else { #if SUPPORTS_NATIVE_METADATA_TYPE_LOADING // Dynamically loaded type // Fill the vtable with vtable resolution thunks in all slots except for // the dictionary slots, which should be filled with dictionary pointers if those // dictionaries are already published. TypeDesc nextTypeToExamineForDictionarySlot = state.TypeBeingBuilt; TypeDesc typeWithDictionary; int nextDictionarySlot = GetMostDerivedDictionarySlot(ref nextTypeToExamineForDictionarySlot, out typeWithDictionary); for (int iSlot = pEEType->NumVtableSlots - 1; iSlot >= 0; iSlot--) { bool isDictionary = iSlot == nextDictionarySlot; if (!isDictionary) { pVtable[iSlot] = LazyVTableResolver.GetThunkForSlot(iSlot); } else { if (typeWithDictionary.RetrieveRuntimeTypeHandleIfPossible()) { pVtable[iSlot] = typeWithDictionary.RuntimeTypeHandle.GetDictionary(); } nextDictionarySlot = GetMostDerivedDictionarySlot(ref nextTypeToExamineForDictionarySlot, out typeWithDictionary); } numSlotsFilled++; } #else Environment.FailFast("Template type loader is null, but metadata based type loader is not in use"); #endif } Debug.Assert(numSlotsFilled == numVtableSlots); // Copy Pointer to finalizer method from the template type if (hasFinalizer) { if (pTemplateEEType != null) { pEEType->FinalizerCode = pTemplateEEType->FinalizerCode; } else { #if SUPPORTS_NATIVE_METADATA_TYPE_LOADING pEEType->FinalizerCode = LazyVTableResolver.GetFinalizerThunk(); #else Environment.FailFast("Template type loader is null, but metadata based type loader is not in use"); #endif } } } // Copy the sealed vtable entries if they exist on the template type if (state.NumSealedVTableEntries > 0) { state.HalfBakedSealedVTable = MemoryHelpers.AllocateMemory((int)state.NumSealedVTableEntries * IntPtr.Size); UInt32 cbSealedVirtualSlotsTypeOffset = pEEType->GetFieldOffset(EETypeField.ETF_SealedVirtualSlots); *((IntPtr*)((byte*)pEEType + cbSealedVirtualSlotsTypeOffset)) = state.HalfBakedSealedVTable; for (UInt16 i = 0; i < state.NumSealedVTableEntries; i++) { IntPtr value = pTemplateEEType->GetSealedVirtualSlot(i); pEEType->SetSealedVirtualSlot(value, i); } } // Create a new DispatchMap for the type if (requiresDynamicDispatchMap) { DispatchMap* pTemplateDispatchMap = (DispatchMap*)RuntimeAugments.GetDispatchMapForType(pTemplateEEType->ToRuntimeTypeHandle()); dynamicDispatchMapPtr = MemoryHelpers.AllocateMemory(pTemplateDispatchMap->Size); UInt32 cbDynamicDispatchMapOffset = pEEType->GetFieldOffset(EETypeField.ETF_DynamicDispatchMap); *((IntPtr*)((byte*)pEEType + cbDynamicDispatchMapOffset)) = dynamicDispatchMapPtr; DispatchMap* pDynamicDispatchMap = (DispatchMap*)dynamicDispatchMapPtr; pDynamicDispatchMap->NumEntries = pTemplateDispatchMap->NumEntries; for (int i = 0; i < pTemplateDispatchMap->NumEntries; i++) { DispatchMap.DispatchMapEntry* pTemplateEntry = (*pTemplateDispatchMap)[i]; DispatchMap.DispatchMapEntry* pDynamicEntry = (*pDynamicDispatchMap)[i]; pDynamicEntry->_usInterfaceIndex = pTemplateEntry->_usInterfaceIndex; pDynamicEntry->_usInterfaceMethodSlot = pTemplateEntry->_usInterfaceMethodSlot; if (pTemplateEntry->_usImplMethodSlot < pTemplateEEType->NumVtableSlots) { pDynamicEntry->_usImplMethodSlot = (ushort)state.VTableSlotsMapping.GetVTableSlotInTargetType(pTemplateEntry->_usImplMethodSlot); Debug.Assert(pDynamicEntry->_usImplMethodSlot < numVtableSlots); } else { // This is an entry in the sealed vtable. We need to adjust the slot number based on the number of vtable slots // in the dynamic EEType pDynamicEntry->_usImplMethodSlot = (ushort)(pTemplateEntry->_usImplMethodSlot - pTemplateEEType->NumVtableSlots + numVtableSlots); Debug.Assert(state.NumSealedVTableEntries > 0 && pDynamicEntry->_usImplMethodSlot >= numVtableSlots && (pDynamicEntry->_usImplMethodSlot - numVtableSlots) < state.NumSealedVTableEntries); } } } if (pTemplateEEType != null) { pEEType->DynamicTemplateType = pTemplateEEType; } else { // Use object as the template type for non-template based EETypes. This will // allow correct Module identification for types. if (state.TypeBeingBuilt.HasVariance) { // TODO! We need to have a variant EEType here if the type has variance, as the // CreateGenericInstanceDescForType requires it. However, this is a ridiculous api surface // When we remove GenericInstanceDescs from the product, get rid of this weird special // case pEEType->DynamicTemplateType = typeof(IEnumerable<int>).TypeHandle.ToEETypePtr(); } else { pEEType->DynamicTemplateType = typeof(object).TypeHandle.ToEETypePtr(); } } int nonGCStaticDataOffset = 0; if (!isArray && !isGenericEETypeDef) { nonGCStaticDataOffset = state.HasStaticConstructor ? -TypeBuilder.ClassConstructorOffset : 0; // create GC desc if (state.GcDataSize != 0 && state.GcStaticDesc == IntPtr.Zero) { if (state.GcStaticEEType != IntPtr.Zero) { // CoreRT Abi uses managed heap-allocated GC statics object obj = RuntimeAugments.NewObject(((EEType*)state.GcStaticEEType)->ToRuntimeTypeHandle()); gcStaticData = RuntimeAugments.RhHandleAlloc(obj, GCHandleType.Normal); // CoreRT references statics through an extra level of indirection (a table in the image). gcStaticsIndirection = MemoryHelpers.AllocateMemory(IntPtr.Size); *((IntPtr*)gcStaticsIndirection) = gcStaticData; pEEType->DynamicGcStaticsData = gcStaticsIndirection; } else { int cbStaticGCDesc; state.GcStaticDesc = CreateStaticGCDesc(state.StaticGCLayout, out state.AllocatedStaticGCDesc, out cbStaticGCDesc); #if GENERICS_FORCE_USG TestGCDescsForEquality(state.GcStaticDesc, state.NonUniversalStaticGCDesc, cbStaticGCDesc, false); #endif } } if (state.ThreadDataSize != 0 && state.ThreadStaticDesc == IntPtr.Zero) { int cbThreadStaticGCDesc; state.ThreadStaticDesc = CreateStaticGCDesc(state.ThreadStaticGCLayout, out state.AllocatedThreadStaticGCDesc, out cbThreadStaticGCDesc); #if GENERICS_FORCE_USG TestGCDescsForEquality(state.ThreadStaticDesc, state.NonUniversalThreadStaticGCDesc, cbThreadStaticGCDesc, false); #endif } // If we have a class constructor, our NonGcDataSize MUST be non-zero Debug.Assert(!state.HasStaticConstructor || (state.NonGcDataSize != 0)); } if (isGeneric) { if (!RuntimeAugments.CreateGenericInstanceDescForType(*(RuntimeTypeHandle*)&pEEType, arity, state.NonGcDataSize, nonGCStaticDataOffset, state.GcDataSize, (int)state.ThreadStaticOffset, state.GcStaticDesc, state.ThreadStaticDesc, state.GenericVarianceFlags)) { throw new OutOfMemoryException(); } } else { Debug.Assert(arity == 0 || isGenericEETypeDef); // We don't need to report the non-gc and gc static data regions and allocate them for non-generics, // as we currently place these fields directly into the image if (!isGenericEETypeDef && state.ThreadDataSize != 0) { // Types with thread static fields ALWAYS get a GID. The GID is used to perform GC // and lifetime management of the thread static data. However, these GIDs are only used for that // so the specified GcDataSize, etc are 0 if (!RuntimeAugments.CreateGenericInstanceDescForType(*(RuntimeTypeHandle*)&pEEType, 0, 0, 0, 0, (int)state.ThreadStaticOffset, IntPtr.Zero, state.ThreadStaticDesc, null)) { throw new OutOfMemoryException(); } } } if (state.Dictionary != null) state.HalfBakedDictionary = state.Dictionary.Allocate(); Debug.Assert(!state.HalfBakedRuntimeTypeHandle.IsNull()); Debug.Assert((state.NumSealedVTableEntries == 0 && state.HalfBakedSealedVTable == IntPtr.Zero) || (state.NumSealedVTableEntries > 0 && state.HalfBakedSealedVTable != IntPtr.Zero)); Debug.Assert((state.Dictionary == null && state.HalfBakedDictionary == IntPtr.Zero) || (state.Dictionary != null && state.HalfBakedDictionary != IntPtr.Zero)); successful = true; } finally { if (!successful) { if (eeTypePtrPlusGCDesc != IntPtr.Zero) MemoryHelpers.FreeMemory(eeTypePtrPlusGCDesc); if (dynamicDispatchMapPtr != IntPtr.Zero) MemoryHelpers.FreeMemory(dynamicDispatchMapPtr); if (state.HalfBakedSealedVTable != IntPtr.Zero) MemoryHelpers.FreeMemory(state.HalfBakedSealedVTable); if (state.HalfBakedDictionary != IntPtr.Zero) MemoryHelpers.FreeMemory(state.HalfBakedDictionary); if (state.AllocatedStaticGCDesc) MemoryHelpers.FreeMemory(state.GcStaticDesc); if (state.AllocatedThreadStaticGCDesc) MemoryHelpers.FreeMemory(state.ThreadStaticDesc); if (gcStaticData != IntPtr.Zero) RuntimeAugments.RhHandleFree(gcStaticData); if (gcStaticsIndirection != IntPtr.Zero) MemoryHelpers.FreeMemory(gcStaticsIndirection); } } } private static IntPtr CreateStaticGCDesc(LowLevelList<bool> gcBitfield, out bool allocated, out int cbGCDesc) { if (gcBitfield != null) { int series = CreateGCDesc(gcBitfield, 0, false, true, null); if (series > 0) { cbGCDesc = sizeof(int) + series * sizeof(int) * 2; IntPtr result = MemoryHelpers.AllocateMemory(cbGCDesc); CreateGCDesc(gcBitfield, 0, false, true, (void**)result.ToPointer()); allocated = true; return result; } } allocated = false; if (s_emptyGCDesc == IntPtr.Zero) { IntPtr ptr = MemoryHelpers.AllocateMemory(8); long* gcdesc = (long*)ptr.ToPointer(); *gcdesc = 0; if (Interlocked.CompareExchange(ref s_emptyGCDesc, ptr, IntPtr.Zero) != IntPtr.Zero) MemoryHelpers.FreeMemory(ptr); } cbGCDesc = IntPtr.Size; return s_emptyGCDesc; } private static void CreateInstanceGCDesc(TypeBuilderState state, EEType* pTemplateEEType, EEType* pEEType, int baseSize, int cbGCDesc, bool isValueType, bool isArray, bool isSzArray, int arrayRank) { var gcBitfield = state.InstanceGCLayout; if (isArray) { if (cbGCDesc != 0) { pEEType->HasGCPointers = true; if (state.IsArrayOfReferenceTypes) { IntPtr* gcDescStart = (IntPtr*)((byte*)pEEType - cbGCDesc); gcDescStart[0] = new IntPtr(-baseSize); gcDescStart[1] = new IntPtr(baseSize - sizeof(IntPtr)); gcDescStart[2] = new IntPtr(1); } else { CreateArrayGCDesc(gcBitfield, arrayRank, isSzArray, ((void**)pEEType) - 1); } } else { pEEType->HasGCPointers = false; } } else if (gcBitfield != null) { if (cbGCDesc != 0) { pEEType->HasGCPointers = true; CreateGCDesc(gcBitfield, baseSize, isValueType, false, ((void**)pEEType) - 1); } else { pEEType->HasGCPointers = false; } } else if (pTemplateEEType != null) { Buffer.MemoryCopy((byte*)pTemplateEEType - cbGCDesc, (byte*)pEEType - cbGCDesc, cbGCDesc, cbGCDesc); pEEType->HasGCPointers = pTemplateEEType->HasGCPointers; } else { pEEType->HasGCPointers = false; } } private static unsafe int GetInstanceGCDescSize(TypeBuilderState state, EEType* pTemplateEEType, bool isValueType, bool isArray) { var gcBitfield = state.InstanceGCLayout; if (isArray) { if (state.IsArrayOfReferenceTypes) { // Reference type arrays have a GC desc the size of 3 pointers return 3 * sizeof(IntPtr); } else { int series = 0; if (gcBitfield != null) series = CreateArrayGCDesc(gcBitfield, 1, true, null); return series > 0 ? (series + 2) * IntPtr.Size : 0; } } else if (gcBitfield != null) { int series = CreateGCDesc(gcBitfield, 0, isValueType, false, null); return series > 0 ? (series * 2 + 1) * IntPtr.Size : 0; } else if (pTemplateEEType != null) { return RuntimeAugments.GetGCDescSize(pTemplateEEType->ToRuntimeTypeHandle()); } else { return 0; } } private static unsafe int CreateArrayGCDesc(LowLevelList<bool> bitfield, int rank, bool isSzArray, void* gcdesc) { if (bitfield == null) return 0; void** baseOffsetPtr = (void**)gcdesc - 1; #if WIN64 int* ptr = (int*)baseOffsetPtr - 1; #else short* ptr = (short*)baseOffsetPtr - 1; #endif int baseOffset = 2; if (!isSzArray) { baseOffset += 2 * rank / (sizeof(IntPtr) / sizeof(int)); } int numSeries = 0; int i = 0; bool first = true; int last = 0; short numPtrs = 0; while (i < bitfield.Count) { if (bitfield[i]) { if (first) { baseOffset += i; first = false; } else if (gcdesc != null) { *ptr-- = (short)((i - last) * IntPtr.Size); *ptr-- = numPtrs; } numSeries++; numPtrs = 0; while ((i < bitfield.Count) && (bitfield[i])) { numPtrs++; i++; } last = i; } else { i++; } } if (gcdesc != null) { if (numSeries > 0) { *ptr-- = (short)((bitfield.Count - last + baseOffset - 2) * IntPtr.Size); *ptr-- = numPtrs; *(void**)gcdesc = (void*)-numSeries; *baseOffsetPtr = (void*)(baseOffset * IntPtr.Size); } } return numSeries; } private static unsafe int CreateGCDesc(LowLevelList<bool> bitfield, int size, bool isValueType, bool isStatic, void* gcdesc) { int offs = 0; // if this type is a class we have to account for the gcdesc. if (isValueType) offs = IntPtr.Size; if (bitfield == null) return 0; void** ptr = (void**)gcdesc - 1; int* staticPtr = isStatic ? ((int*)gcdesc + 1) : null; int numSeries = 0; int i = 0; while (i < bitfield.Count) { if (bitfield[i]) { numSeries++; int seriesOffset = i * IntPtr.Size + offs; int seriesSize = 0; while ((i < bitfield.Count) && (bitfield[i])) { seriesSize += IntPtr.Size; i++; } if (gcdesc != null) { if (staticPtr != null) { *staticPtr++ = seriesSize; *staticPtr++ = seriesOffset; } else { seriesSize = seriesSize - size; *ptr-- = (void*)seriesOffset; *ptr-- = (void*)seriesSize; } } } else { i++; } } if (gcdesc != null) { if (staticPtr != null) *(int*)gcdesc = numSeries; else *(void**)gcdesc = (void*)numSeries; } return numSeries; } [Conditional("GENERICS_FORCE_USG")] unsafe private static void TestGCDescsForEquality(IntPtr dynamicGCDesc, IntPtr templateGCDesc, int cbGCDesc, bool isInstanceGCDesc) { if (templateGCDesc == IntPtr.Zero) return; Debug.Assert(dynamicGCDesc != IntPtr.Zero); Debug.Assert(cbGCDesc == MemoryHelpers.AlignUp(cbGCDesc, 4)); uint* pMem1 = (uint*)dynamicGCDesc.ToPointer(); uint* pMem2 = (uint*)templateGCDesc.ToPointer(); bool foundDifferences = false; for (int i = 0; i < cbGCDesc; i += 4) { if (*pMem1 != *pMem2) { // Log all the differences before the assert Debug.WriteLine("ERROR: GCDesc comparison failed at byte #" + i.LowLevelToString() + " while comparing " + dynamicGCDesc.LowLevelToString() + " with " + templateGCDesc.LowLevelToString() + ": [" + (*pMem1).LowLevelToString() + "]/[" + (*pMem2).LowLevelToString() + "]"); foundDifferences = true; } if (isInstanceGCDesc) { pMem1--; pMem2--; } else { pMem1++; pMem2++; } } Debug.Assert(!foundDifferences); } public static RuntimeTypeHandle CreatePointerEEType(UInt32 hashCodeOfNewType, RuntimeTypeHandle pointeeTypeHandle, TypeDesc pointerType) { TypeBuilderState state = new TypeBuilderState(pointerType); CreateEETypeWorker(typeof(void*).TypeHandle.ToEETypePtr(), hashCodeOfNewType, 0, false, state); Debug.Assert(!state.HalfBakedRuntimeTypeHandle.IsNull()); state.HalfBakedRuntimeTypeHandle.ToEETypePtr()->RelatedParameterType = pointeeTypeHandle.ToEETypePtr(); return state.HalfBakedRuntimeTypeHandle; } public static RuntimeTypeHandle CreateByRefEEType(UInt32 hashCodeOfNewType, RuntimeTypeHandle pointeeTypeHandle, TypeDesc byRefType) { TypeBuilderState state = new TypeBuilderState(byRefType); // ByRef and pointer types look similar enough that we can use void* as a template. // Ideally this should be typeof(void&) but C# doesn't support that syntax. We adjust for this below. CreateEETypeWorker(typeof(void*).TypeHandle.ToEETypePtr(), hashCodeOfNewType, 0, false, state); Debug.Assert(!state.HalfBakedRuntimeTypeHandle.IsNull()); state.HalfBakedRuntimeTypeHandle.ToEETypePtr()->RelatedParameterType = pointeeTypeHandle.ToEETypePtr(); // We used a pointer as a template. We need to make this a byref. Debug.Assert(state.HalfBakedRuntimeTypeHandle.ToEETypePtr()->ParameterizedTypeShape == ParameterizedTypeShapeConstants.Pointer); state.HalfBakedRuntimeTypeHandle.ToEETypePtr()->ParameterizedTypeShape = ParameterizedTypeShapeConstants.ByRef; return state.HalfBakedRuntimeTypeHandle; } public static RuntimeTypeHandle CreateEEType(TypeDesc type, TypeBuilderState state) { Debug.Assert(type != null && state != null); EEType* pTemplateEEType = null; bool requireVtableSlotMapping = false; if (type is PointerType || type is ByRefType) { Debug.Assert(0 == state.NonGcDataSize); Debug.Assert(false == state.HasStaticConstructor); Debug.Assert(0 == state.GcDataSize); Debug.Assert(0 == state.ThreadStaticOffset); Debug.Assert(0 == state.NumSealedVTableEntries); Debug.Assert(IntPtr.Zero == state.GcStaticDesc); Debug.Assert(IntPtr.Zero == state.ThreadStaticDesc); // Pointers and ByRefs only differ by the ParameterizedTypeShape value. RuntimeTypeHandle templateTypeHandle = typeof(void*).TypeHandle; pTemplateEEType = templateTypeHandle.ToEETypePtr(); } else if ((type is MetadataType) && (state.TemplateType == null || !state.TemplateType.RetrieveRuntimeTypeHandleIfPossible())) { requireVtableSlotMapping = true; pTemplateEEType = null; } else if (type.IsMdArray || (type.IsSzArray && ((ArrayType)type).ElementType.IsPointer)) { // Multidimensional arrays and szarrays of pointers don't implement generic interfaces and // we don't need to do much for them in terms of type building. We can pretty much just take // the EEType for any of those, massage the bits that matter (GCDesc, element type, // component size,...) to be of the right shape and we're done. pTemplateEEType = typeof(object[,]).TypeHandle.ToEETypePtr(); requireVtableSlotMapping = false; } else { Debug.Assert(state.TemplateType != null && !state.TemplateType.RuntimeTypeHandle.IsNull()); requireVtableSlotMapping = state.TemplateType.IsCanonicalSubtype(CanonicalFormKind.Universal); RuntimeTypeHandle templateTypeHandle = state.TemplateType.RuntimeTypeHandle; pTemplateEEType = templateTypeHandle.ToEETypePtr(); } DefType typeAsDefType = type as DefType; // Use a checked typecast to 'ushort' for the arity to ensure its value never exceeds 65535 and cause integer // overflows later when computing size of memory blocks to allocate for the type and its GenericInstanceDescriptor structures int arity = checked((ushort)((typeAsDefType != null && typeAsDefType.HasInstantiation ? typeAsDefType.Instantiation.Length : 0))); CreateEETypeWorker(pTemplateEEType, (uint)type.GetHashCode(), arity, requireVtableSlotMapping, state); return state.HalfBakedRuntimeTypeHandle; } public static IntPtr GetDictionary(EEType* pEEType) { // Dictionary slot is the first vtable slot EEType* pBaseType = pEEType->BaseType; int dictionarySlot = (pBaseType == null ? 0 : pBaseType->NumVtableSlots); return *(IntPtr*)((byte*)pEEType + sizeof(EEType) + dictionarySlot * IntPtr.Size); } public static int GetDictionarySlotInVTable(TypeDesc type) { if (!type.CanShareNormalGenericCode()) return -1; // Dictionary slot is the first slot in the vtable after the base type's vtable entries return type.BaseType != null ? type.BaseType.GetOrCreateTypeBuilderState().NumVTableSlots : 0; } private static int GetMostDerivedDictionarySlot(ref TypeDesc nextTypeToExamineForDictionarySlot, out TypeDesc typeWithDictionary) { while (nextTypeToExamineForDictionarySlot != null) { if (nextTypeToExamineForDictionarySlot.GetOrCreateTypeBuilderState().HasDictionarySlotInVTable) { typeWithDictionary = nextTypeToExamineForDictionarySlot; nextTypeToExamineForDictionarySlot = nextTypeToExamineForDictionarySlot.BaseType; return GetDictionarySlotInVTable(typeWithDictionary); } nextTypeToExamineForDictionarySlot = nextTypeToExamineForDictionarySlot.BaseType; } typeWithDictionary = null; return -1; } } }
// // SendEmail.cs // // Author: // Ruben Vermeersch <ruben@savanne.be> // Paul Lange <palango@gmx.de> // Bengt Thuree <bengt@thuree.com> // Stephane Delcroix <stephane@delcroix.org> // // Copyright (C) 2006-2010 Novell, Inc. // Copyright (C) 2009-2010 Ruben Vermeersch // Copyright (C) 2010 Paul Lange // Copyright (C) 2006 Bengt Thuree // Copyright (C) 2007-2009 Stephane Delcroix // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // using Gtk; using GLib; using System; using FSpot.Core; using FSpot.Widgets; using FSpot.Filters; using FSpot.UI.Dialog; using Hyena; using Hyena.Widgets; using Mono.Unix; namespace FSpot { public class SendEmail : BuilderDialog { Window parent_window; #pragma warning disable 0649 [GtkBeans.Builder.Object] private Gtk.ScrolledWindow tray_scrolled; [GtkBeans.Builder.Object] private Label NumberOfPictures, TotalOriginalSize, ApproxNewSize; [GtkBeans.Builder.Object] private RadioButton tiny_size, small_size, medium_size, large_size, x_large_size, original_size; #pragma warning restore 0649 long Orig_Photo_Size = 0; double scale_percentage = 0.3; // The different sizes we can shrink to foto to. See RadioButton above for labels. static int[] sizes = { 0, 320, 480, 640, 800, 1024 }; // Estimated size relative to original after shrinking down the photo. double[] avg_scale_ref = { 0, 0.0186, 0.0348, 0.0532, 0.0826, 0.1234 }; static int NoOfSizes = sizes.Length; double[] avg_scale = new double [NoOfSizes]; string tmp_mail_dir; // To temporary keep the resized images bool force_original = false; IBrowsableCollection selection; public SendEmail (IBrowsableCollection selection, Window parent_window) : base ("mail_dialog.ui", "mail_dialog") { this.selection = selection; this.parent_window = parent_window; foreach (var p in selection.Items) { if (FileFactory.NewForUri (p.DefaultVersion.Uri).QueryInfo ("standard::content-type", FileQueryInfoFlags.None, null).ContentType != "image/jpeg") force_original = true; } if (force_original) { original_size.Active = true; tiny_size.Sensitive = false; small_size.Sensitive = false; medium_size.Sensitive = false; large_size.Sensitive = false; x_large_size.Sensitive = false; } else switch (Preferences.Get<int> (Preferences.EXPORT_EMAIL_SIZE)) { case 0 : original_size.Active = true; break; case 1 : tiny_size.Active = true; break; case 2 : small_size.Active = true; break; case 3 : medium_size.Active = true; break; case 4 : large_size.Active = true; break; case 5 : x_large_size.Active = true; break; default: break; } tray_scrolled.Add (new TrayView (selection)); Modal = false; // Calculate total original filesize foreach (var photo in selection.Items) { try { Orig_Photo_Size += FileFactory.NewForUri (photo.DefaultVersion.Uri).QueryInfo ("standard::size", FileQueryInfoFlags.None, null).Size; } catch { } } for (int k = 0; k < avg_scale_ref.Length; k++) avg_scale[k] = avg_scale_ref[k]; // Calculate approximate size shrinking, use first photo, and shrink to medium size as base. var scalephoto = selection [0]; if (scalephoto != null && !force_original) { // Get first photos file size long orig_size = FileFactory.NewForUri (scalephoto.DefaultVersion.Uri).QueryInfo ("standard::size", FileQueryInfoFlags.None, null).Size; FilterSet filters = new FilterSet (); filters.Add (new ResizeFilter ((uint)(sizes [3]))); long new_size; using (FilterRequest request = new FilterRequest (scalephoto.DefaultVersion.Uri)) { filters.Convert (request); new_size = FileFactory.NewForUri (request.Current).QueryInfo ("standard::size", FileQueryInfoFlags.None, null).Size; } if (orig_size > 0) { // Get the factor (scale) between original and resized medium size. scale_percentage = 1 - ( (float) (orig_size - new_size) / orig_size); // What is the relation between the estimated medium scale factor, and reality? double scale_scale = scale_percentage / avg_scale_ref[3]; //System.Console.WriteLine ("scale_percentage {0}, ref {1}, relative {2}", // scale_percentage, avg_scale_ref[3], scale_scale ); // Re-Calculate the proper relation per size for (int k = 0; k < avg_scale_ref.Length; k++) { avg_scale[k] = avg_scale_ref[k] * scale_scale; // System.Console.WriteLine ("avg_scale[{0}]={1} (was {2})", // k, avg_scale[k], avg_scale_ref[k] ); } } } NumberOfPictures.Text = selection.Count.ToString(); TotalOriginalSize.Text = GLib.Format.SizeForDisplay (Orig_Photo_Size); UpdateEstimatedSize(); ShowAll (); //LoadHistory (); Response += HandleResponse; } private int GetScaleSize() { // not only convert dialog size to pixel size, but also set preferences se we use same size next time int size_number = 0; // default to original size if (tiny_size.Active) size_number = 1; if (small_size.Active) size_number = 2; if (medium_size.Active) size_number = 3; if (large_size.Active) size_number = 4; if (x_large_size.Active) size_number = 5; if (!force_original) Preferences.Set (Preferences.EXPORT_EMAIL_SIZE, size_number); return sizes [ size_number ]; } private int GetScaleIndex () { int scale = GetScaleSize(); for (int k = 0; k < sizes.Length; k++) if (sizes[k] == scale) return k; return 0; } private void UpdateEstimatedSize() { int new_size_index; long new_approx_total_size; string approxresult; new_size_index = GetScaleIndex(); if (new_size_index == 0) new_approx_total_size = Orig_Photo_Size; else new_approx_total_size = System.Convert.ToInt64(Orig_Photo_Size * avg_scale [new_size_index]); approxresult = GLib.Format.SizeForDisplay (new_approx_total_size); ApproxNewSize.Text = approxresult; } public void on_size_toggled (object o, EventArgs args) { UpdateEstimatedSize(); } private void HandleResponse (object sender, Gtk.ResponseArgs args) { int size = 0; bool UserCancelled = false; // Lets remove the mail "create mail" dialog Destroy(); if (args.ResponseId != Gtk.ResponseType.Ok) { return; } ProgressDialog progress_dialog = null; progress_dialog = new ProgressDialog (Catalog.GetString ("Preparing email"), ProgressDialog.CancelButtonType.Stop, selection.Count, parent_window); size = GetScaleSize(); // Which size should we scale to. 0 --> Original // evaluate mailto command and define attachment args for cli System.Text.StringBuilder attach_arg = new System.Text.StringBuilder (); switch (Preferences.Get<string> (Preferences.GNOME_MAILTO_COMMAND)) { case "thunderbird %s": case "mozilla-thunderbird %s": case "seamonkey -mail -compose %s": case "icedove %s": attach_arg.Append(","); break; case "kmail %s": attach_arg.Append(" --attach "); break; default://evolution falls into default, since it supports mailto uri correctly attach_arg.Append("&attach="); break; } // Create a tmp directory. tmp_mail_dir = System.IO.Path.GetTempFileName (); // Create a tmp file System.IO.File.Delete (tmp_mail_dir); // Delete above tmp file System.IO.Directory.CreateDirectory (tmp_mail_dir); // Create a directory with above tmp name System.Text.StringBuilder mail_attach = new System.Text.StringBuilder (); FilterSet filters = new FilterSet (); if (size != 0) filters.Add (new ResizeFilter ((uint) size)); filters.Add (new UniqueNameFilter (new SafeUri (tmp_mail_dir))); for (int i = 0; i < selection.Count; i++) { var photo = selection [i]; if ( (photo != null) && (!UserCancelled) ) { if (progress_dialog != null) UserCancelled = progress_dialog.Update (String.Format (Catalog.GetString ("Exporting picture \"{0}\""), photo.Name)); if (UserCancelled) break; try { // Prepare a tmp_mail file name FilterRequest request = new FilterRequest (photo.DefaultVersion.Uri); filters.Convert (request); request.Preserve(request.Current); mail_attach.Append(((i == 0 && attach_arg.ToString () == ",") ? "" : attach_arg.ToString()) + request.Current.ToString ()); } catch (Exception e) { Hyena.Log.ErrorFormat ("Error preparing {0}: {1}", selection[i].Name, e.Message); HigMessageDialog md = new HigMessageDialog (parent_window, DialogFlags.DestroyWithParent, MessageType.Error, ButtonsType.Close, Catalog.GetString("Error processing image"), String.Format(Catalog.GetString("An error occured while processing \"{0}\": {1}"), selection[i].Name, e.Message)); md.Run(); md.Destroy(); UserCancelled = true; } } } // foreach if (progress_dialog != null) progress_dialog.Destroy (); // No need to keep this window if (!UserCancelled) { // Send the mail :) string mail_subject = Catalog.GetString("My Photos"); switch (Preferences.Get<string> (Preferences.GNOME_MAILTO_COMMAND)) { // openSuSE case "thunderbird %s": System.Diagnostics.Process.Start("thunderbird", " -compose \"subject=" + mail_subject + ",attachment='" + mail_attach + "'\""); break; case "icedove %s": System.Diagnostics.Process.Start("icedove", " -compose \"subject=" + mail_subject + ",attachment='" + mail_attach + "'\""); break; case "mozilla-thunderbird %s": System.Diagnostics.Process.Start("mozilla-thunderbird", " -compose \"subject=" + mail_subject + ",attachment='" + mail_attach + "'\""); break; case "seamonkey -mail -compose %s": System.Diagnostics.Process.Start("seamonkey", " -mail -compose \"subject=" + mail_subject + ",attachment='" + mail_attach + "'\""); break; case "kmail %s": System.Diagnostics.Process.Start("kmail", " --composer --subject \"" + mail_subject + "\"" + mail_attach); break; case "evolution %s": //evo doesn't urldecode the subject GtkBeans.Global.ShowUri (Screen, "mailto:?subject=" + mail_subject + mail_attach); break; default: GtkBeans.Global.ShowUri (Screen, "mailto:?subject=" + System.Web.HttpUtility.UrlEncode(mail_subject) + mail_attach); break; } } } } }
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Linq; using System.Security; using System.Threading; using System.Threading.Tasks; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.Editor.Implementation.IntelliSense.SignatureHelp.Presentation; using Microsoft.CodeAnalysis.Editor.UnitTests.Classification; using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces; using Microsoft.CodeAnalysis.Host; using Microsoft.CodeAnalysis.LanguageServices; using Microsoft.CodeAnalysis.Options; using Microsoft.CodeAnalysis.Shared.Extensions; using Microsoft.CodeAnalysis.Text; using Roslyn.Test.Utilities; using Roslyn.Utilities; using Xunit; namespace Microsoft.CodeAnalysis.Editor.UnitTests.SignatureHelp { public abstract class AbstractSignatureHelpProviderTests<TWorkspaceFixture> : TestBase, IClassFixture<TWorkspaceFixture> where TWorkspaceFixture : TestWorkspaceFixture, new() { protected TWorkspaceFixture workspaceFixture; internal abstract ISignatureHelpProvider CreateSignatureHelpProvider(); protected AbstractSignatureHelpProviderTests(TWorkspaceFixture workspaceFixture) { this.workspaceFixture = workspaceFixture; } public override void Dispose() { this.workspaceFixture.CloseTextViewAsync().Wait(); base.Dispose(); } /// <summary> /// Verifies that sighelp comes up at the indicated location in markup ($$), with the indicated span [| ... |]. /// </summary> /// <param name="markup">Input markup with $$ denoting the cursor position, and [| ... |] /// denoting the expected sighelp span</param> /// <param name="expectedOrderedItemsOrNull">The exact expected sighelp items list. If null, this part of the test is ignored.</param> /// <param name="usePreviousCharAsTrigger">If true, uses the last character before $$ to trigger sighelp. /// If false, invokes sighelp explicitly at the cursor location.</param> /// <param name="sourceCodeKind">The sourcecodekind to run this test on. If null, runs on both regular and script sources.</param> protected virtual async Task TestAsync( string markup, IEnumerable<SignatureHelpTestItem> expectedOrderedItemsOrNull = null, bool usePreviousCharAsTrigger = false, SourceCodeKind? sourceCodeKind = null, bool experimental = false) { if (sourceCodeKind.HasValue) { await TestSignatureHelpWorkerAsync(markup, sourceCodeKind.Value, experimental, expectedOrderedItemsOrNull, usePreviousCharAsTrigger); } else { await TestSignatureHelpWorkerAsync(markup, SourceCodeKind.Regular, experimental, expectedOrderedItemsOrNull, usePreviousCharAsTrigger); await TestSignatureHelpWorkerAsync(markup, SourceCodeKind.Script, experimental, expectedOrderedItemsOrNull, usePreviousCharAsTrigger); } } private async Task TestSignatureHelpWorkerAsync( string markupWithPositionAndOptSpan, SourceCodeKind sourceCodeKind, bool experimental, IEnumerable<SignatureHelpTestItem> expectedOrderedItemsOrNull = null, bool usePreviousCharAsTrigger = false) { markupWithPositionAndOptSpan = markupWithPositionAndOptSpan.NormalizeLineEndings(); string code; int cursorPosition; IList<TextSpan> textSpans; TextSpan? textSpan = null; MarkupTestFile.GetPositionAndSpans( markupWithPositionAndOptSpan, out code, out cursorPosition, out textSpans); if (textSpans.Any()) { textSpan = textSpans.First(); } var parseOptions = CreateExperimentalParseOptions(); // regular var document1 = await workspaceFixture.UpdateDocumentAsync(code, sourceCodeKind); if (experimental) { document1 = document1.Project.WithParseOptions(parseOptions).GetDocument(document1.Id); } await TestSignatureHelpWorkerSharedAsync(code, cursorPosition, sourceCodeKind, document1, textSpan, expectedOrderedItemsOrNull, usePreviousCharAsTrigger); // speculative semantic model if (await CanUseSpeculativeSemanticModelAsync(document1, cursorPosition)) { var document2 = await workspaceFixture.UpdateDocumentAsync(code, sourceCodeKind, cleanBeforeUpdate: false); if (experimental) { document2 = document2.Project.WithParseOptions(parseOptions).GetDocument(document2.Id); } await TestSignatureHelpWorkerSharedAsync(code, cursorPosition, sourceCodeKind, document2, textSpan, expectedOrderedItemsOrNull, usePreviousCharAsTrigger); } } protected abstract ParseOptions CreateExperimentalParseOptions(); private static async Task<bool> CanUseSpeculativeSemanticModelAsync(Document document, int position) { var service = document.Project.LanguageServices.GetService<ISyntaxFactsService>(); var node = (await document.GetSyntaxRootAsync()).FindToken(position).Parent; return !service.GetMemberBodySpanForSpeculativeBinding(node).IsEmpty; } protected virtual void VerifyTriggerCharacters(char[] expectedTriggerCharacters, char[] unexpectedTriggerCharacters, SourceCodeKind? sourceCodeKind = null) { if (sourceCodeKind.HasValue) { VerifyTriggerCharactersWorker(expectedTriggerCharacters, unexpectedTriggerCharacters, sourceCodeKind.Value); } else { VerifyTriggerCharactersWorker(expectedTriggerCharacters, unexpectedTriggerCharacters, SourceCodeKind.Regular); VerifyTriggerCharactersWorker(expectedTriggerCharacters, unexpectedTriggerCharacters, SourceCodeKind.Script); } } private void VerifyTriggerCharactersWorker(char[] expectedTriggerCharacters, char[] unexpectedTriggerCharacters, SourceCodeKind sourceCodeKind) { ISignatureHelpProvider signatureHelpProvider = CreateSignatureHelpProvider(); foreach (var expectedTriggerCharacter in expectedTriggerCharacters) { Assert.True(signatureHelpProvider.IsTriggerCharacter(expectedTriggerCharacter), "Expected '" + expectedTriggerCharacter + "' to be a trigger character"); } foreach (var unexpectedTriggerCharacter in unexpectedTriggerCharacters) { Assert.False(signatureHelpProvider.IsTriggerCharacter(unexpectedTriggerCharacter), "Expected '" + unexpectedTriggerCharacter + "' to NOT be a trigger character"); } } protected virtual async Task VerifyCurrentParameterNameAsync(string markup, string expectedParameterName, SourceCodeKind? sourceCodeKind = null) { if (sourceCodeKind.HasValue) { await VerifyCurrentParameterNameWorkerAsync(markup, expectedParameterName, sourceCodeKind.Value); } else { await VerifyCurrentParameterNameWorkerAsync(markup, expectedParameterName, SourceCodeKind.Regular); await VerifyCurrentParameterNameWorkerAsync(markup, expectedParameterName, SourceCodeKind.Script); } } private static async Task<SignatureHelpState> GetArgumentStateAsync(int cursorPosition, Document document, ISignatureHelpProvider signatureHelpProvider, SignatureHelpTriggerInfo triggerInfo) { var items = await signatureHelpProvider.GetItemsAsync(document, cursorPosition, triggerInfo, CancellationToken.None); return items == null ? null : new SignatureHelpState(items.ArgumentIndex, items.ArgumentCount, items.ArgumentName, null); } private async Task VerifyCurrentParameterNameWorkerAsync(string markup, string expectedParameterName, SourceCodeKind sourceCodeKind) { string code; int cursorPosition; MarkupTestFile.GetPosition(markup.NormalizeLineEndings(), out code, out cursorPosition); var document = await workspaceFixture.UpdateDocumentAsync(code, sourceCodeKind); var signatureHelpProvider = CreateSignatureHelpProvider(); var triggerInfo = new SignatureHelpTriggerInfo(SignatureHelpTriggerReason.InvokeSignatureHelpCommand); var items = await signatureHelpProvider.GetItemsAsync(document, cursorPosition, triggerInfo, CancellationToken.None); Assert.Equal(expectedParameterName, (await GetArgumentStateAsync(cursorPosition, document, signatureHelpProvider, triggerInfo)).ArgumentName); } private void CompareAndAssertCollectionsAndCurrentParameter( IEnumerable<SignatureHelpTestItem> expectedTestItems, SignatureHelpItems actualSignatureHelpItems, ISignatureHelpProvider signatureHelpProvider, Document document, int cursorPosition) { Assert.Equal(expectedTestItems.Count(), actualSignatureHelpItems.Items.Count()); for (int i = 0; i < expectedTestItems.Count(); i++) { CompareSigHelpItemsAndCurrentPosition( actualSignatureHelpItems, actualSignatureHelpItems.Items.ElementAt(i), expectedTestItems.ElementAt(i), signatureHelpProvider, document, cursorPosition, actualSignatureHelpItems.ApplicableSpan); } } private void CompareSigHelpItemsAndCurrentPosition( SignatureHelpItems items, SignatureHelpItem actualSignatureHelpItem, SignatureHelpTestItem expectedTestItem, ISignatureHelpProvider signatureHelpProvider, Document document, int cursorPosition, TextSpan applicableSpan) { int currentParameterIndex = -1; if (expectedTestItem.CurrentParameterIndex != null) { if (expectedTestItem.CurrentParameterIndex.Value >= 0 && expectedTestItem.CurrentParameterIndex.Value < actualSignatureHelpItem.Parameters.Length) { currentParameterIndex = expectedTestItem.CurrentParameterIndex.Value; } } var signature = new Signature(applicableToSpan: null, signatureHelpItem: actualSignatureHelpItem, selectedParameterIndex: currentParameterIndex); // We're a match if the signature matches... // We're now combining the signature and documentation to make classification work. if (!string.IsNullOrEmpty(expectedTestItem.MethodDocumentation)) { Assert.Equal(expectedTestItem.Signature + "\r\n" + expectedTestItem.MethodDocumentation, signature.Content); } else { Assert.Equal(expectedTestItem.Signature, signature.Content); } if (expectedTestItem.PrettyPrintedSignature != null) { Assert.Equal(expectedTestItem.PrettyPrintedSignature, signature.PrettyPrintedContent); } if (expectedTestItem.MethodDocumentation != null) { Assert.Equal(expectedTestItem.MethodDocumentation, actualSignatureHelpItem.DocumentationFactory(CancellationToken.None).GetFullText()); } if (expectedTestItem.ParameterDocumentation != null) { Assert.Equal(expectedTestItem.ParameterDocumentation, signature.CurrentParameter.Documentation); } if (expectedTestItem.CurrentParameterIndex != null) { Assert.Equal(expectedTestItem.CurrentParameterIndex, items.ArgumentIndex); } if (expectedTestItem.Description != null) { Assert.Equal(expectedTestItem.Description, ToString(actualSignatureHelpItem.DescriptionParts)); } } private string ToString(IEnumerable<SymbolDisplayPart> list) { return string.Concat(list.Select(i => i.ToString())); } protected async Task TestSignatureHelpInEditorBrowsableContextsAsync( string markup, string referencedCode, IEnumerable<SignatureHelpTestItem> expectedOrderedItemsMetadataReference, IEnumerable<SignatureHelpTestItem> expectedOrderedItemsSameSolution, string sourceLanguage, string referencedLanguage, bool hideAdvancedMembers = false) { if (expectedOrderedItemsMetadataReference == null || expectedOrderedItemsSameSolution == null) { AssertEx.Fail("Expected signature help items must be provided for EditorBrowsable tests. If there are no expected items, provide an empty IEnumerable rather than null."); } await TestSignatureHelpWithMetadataReferenceHelperAsync(markup, referencedCode, expectedOrderedItemsMetadataReference, sourceLanguage, referencedLanguage, hideAdvancedMembers); await TestSignatureHelpWithProjectReferenceHelperAsync(markup, referencedCode, expectedOrderedItemsSameSolution, sourceLanguage, referencedLanguage, hideAdvancedMembers); // Multi-language projects are not supported. if (sourceLanguage == referencedLanguage) { await TestSignatureHelpInSameProjectHelperAsync(markup, referencedCode, expectedOrderedItemsSameSolution, sourceLanguage, hideAdvancedMembers); } } public Task TestSignatureHelpWithMetadataReferenceHelperAsync(string sourceCode, string referencedCode, IEnumerable<SignatureHelpTestItem> expectedOrderedItems, string sourceLanguage, string referencedLanguage, bool hideAdvancedMembers) { var xmlString = string.Format(@" <Workspace> <Project Language=""{0}"" CommonReferences=""true""> <Document FilePath=""SourceDocument""> {1} </Document> <MetadataReferenceFromSource Language=""{2}"" CommonReferences=""true""> <Document FilePath=""ReferencedDocument""> {3} </Document> </MetadataReferenceFromSource> </Project> </Workspace>", sourceLanguage, SecurityElement.Escape(sourceCode), referencedLanguage, SecurityElement.Escape(referencedCode)); return VerifyItemWithReferenceWorkerAsync(xmlString, expectedOrderedItems, hideAdvancedMembers); } public async Task TestSignatureHelpWithProjectReferenceHelperAsync(string sourceCode, string referencedCode, IEnumerable<SignatureHelpTestItem> expectedOrderedItems, string sourceLanguage, string referencedLanguage, bool hideAdvancedMembers) { var xmlString = string.Format(@" <Workspace> <Project Language=""{0}"" CommonReferences=""true""> <ProjectReference>ReferencedProject</ProjectReference> <Document FilePath=""SourceDocument""> {1} </Document> </Project> <Project Language=""{2}"" CommonReferences=""true"" AssemblyName=""ReferencedProject""> <Document FilePath=""ReferencedDocument""> {3} </Document> </Project> </Workspace>", sourceLanguage, SecurityElement.Escape(sourceCode), referencedLanguage, SecurityElement.Escape(referencedCode)); await VerifyItemWithReferenceWorkerAsync(xmlString, expectedOrderedItems, hideAdvancedMembers); } private async Task TestSignatureHelpInSameProjectHelperAsync(string sourceCode, string referencedCode, IEnumerable<SignatureHelpTestItem> expectedOrderedItems, string sourceLanguage, bool hideAdvancedMembers) { var xmlString = string.Format(@" <Workspace> <Project Language=""{0}"" CommonReferences=""true""> <Document FilePath=""SourceDocument""> {1} </Document> <Document FilePath=""ReferencedDocument""> {2} </Document> </Project> </Workspace>", sourceLanguage, SecurityElement.Escape(sourceCode), SecurityElement.Escape(referencedCode)); await VerifyItemWithReferenceWorkerAsync(xmlString, expectedOrderedItems, hideAdvancedMembers); } protected async Task VerifyItemWithReferenceWorkerAsync(string xmlString, IEnumerable<SignatureHelpTestItem> expectedOrderedItems, bool hideAdvancedMembers) { using (var testWorkspace = await TestWorkspaceFactory.CreateWorkspaceAsync(xmlString)) { var optionsService = testWorkspace.Services.GetService<IOptionService>(); var cursorPosition = testWorkspace.Documents.First(d => d.Name == "SourceDocument").CursorPosition.Value; var documentId = testWorkspace.Documents.First(d => d.Name == "SourceDocument").Id; var document = testWorkspace.CurrentSolution.GetDocument(documentId); var code = (await document.GetTextAsync()).ToString(); optionsService.SetOptions(optionsService.GetOptions().WithChangedOption(Microsoft.CodeAnalysis.Completion.CompletionOptions.HideAdvancedMembers, document.Project.Language, hideAdvancedMembers)); IList<TextSpan> textSpans = null; var selectedSpans = testWorkspace.Documents.First(d => d.Name == "SourceDocument").SelectedSpans; if (selectedSpans.Any()) { textSpans = selectedSpans; } TextSpan? textSpan = null; if (textSpans != null && textSpans.Any()) { textSpan = textSpans.First(); } await TestSignatureHelpWorkerSharedAsync(code, cursorPosition, SourceCodeKind.Regular, document, textSpan, expectedOrderedItems); } } private async Task TestSignatureHelpWorkerSharedAsync( string code, int cursorPosition, SourceCodeKind sourceCodeKind, Document document, TextSpan? textSpan, IEnumerable<SignatureHelpTestItem> expectedOrderedItemsOrNull = null, bool usePreviousCharAsTrigger = false) { var signatureHelpProvider = CreateSignatureHelpProvider(); var triggerInfo = new SignatureHelpTriggerInfo(SignatureHelpTriggerReason.InvokeSignatureHelpCommand); if (usePreviousCharAsTrigger) { triggerInfo = new SignatureHelpTriggerInfo( SignatureHelpTriggerReason.TypeCharCommand, code.ElementAt(cursorPosition - 1)); if (!signatureHelpProvider.IsTriggerCharacter(triggerInfo.TriggerCharacter.Value)) { return; } } var items = await signatureHelpProvider.GetItemsAsync(document, cursorPosition, triggerInfo, CancellationToken.None); // If we're expecting 0 items, then there's no need to compare them if ((expectedOrderedItemsOrNull == null || !expectedOrderedItemsOrNull.Any()) && items == null) { return; } AssertEx.NotNull(items, "Signature help provider returned null for items. Did you forget $$ in the test or is the test otherwise malformed, e.g. quotes not escaped?"); // Verify the span if (textSpan != null) { Assert.Equal(textSpan, items.ApplicableSpan); } if (expectedOrderedItemsOrNull != null) { CompareAndAssertCollectionsAndCurrentParameter(expectedOrderedItemsOrNull, items, signatureHelpProvider, document, cursorPosition); } } protected async Task TestSignatureHelpWithMscorlib45Async( string markup, IEnumerable<SignatureHelpTestItem> expectedOrderedItems, string sourceLanguage) { var xmlString = string.Format(@" <Workspace> <Project Language=""{0}"" CommonReferencesNet45=""true""> <Document FilePath=""SourceDocument""> {1} </Document> </Project> </Workspace>", sourceLanguage, SecurityElement.Escape(markup)); using (var testWorkspace = await TestWorkspaceFactory.CreateWorkspaceAsync(xmlString)) { var cursorPosition = testWorkspace.Documents.Single(d => d.Name == "SourceDocument").CursorPosition.Value; var documentId = testWorkspace.Documents.Where(d => d.Name == "SourceDocument").Single().Id; var document = testWorkspace.CurrentSolution.GetDocument(documentId); var code = (await document.GetTextAsync()).ToString(); IList<TextSpan> textSpans = null; var selectedSpans = testWorkspace.Documents.Single(d => d.Name == "SourceDocument").SelectedSpans; if (selectedSpans.Any()) { textSpans = selectedSpans; } TextSpan? textSpan = null; if (textSpans != null && textSpans.Any()) { textSpan = textSpans.First(); } await TestSignatureHelpWorkerSharedAsync(code, cursorPosition, SourceCodeKind.Regular, document, textSpan, expectedOrderedItems); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; /// <summary> /// System.Array.Initialize() /// </summary> public class ArrayInitialize { #region Public Methods public bool RunTests() { bool retVal = true; TestLibrary.TestFramework.LogInformation("[Positive]"); retVal = PosTest1() && retVal; retVal = PosTest2() && retVal; retVal = PosTest3() && retVal; retVal = PosTest4() && retVal; return retVal; } #region Positive Test Cases public bool PosTest1() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest1: Array of Int32, Initialize "); try { int length = TestLibrary.Generator.GetInt16(-55); int[] i1 = new int[length]; for (int i = 0; i < length; i++) { i1[i] = i; } i1.Initialize(); // The type of int32 does not have constructors, so the value is not changed for (int i = 0; i < length; i++) { if (i1[i] != i) { TestLibrary.TestFramework.LogError("001", "The result is not the value as expected"); retVal = false; } } } catch (Exception e) { TestLibrary.TestFramework.LogError("001", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest2() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest2: Try to Initialize a customized structure type"); try { int length = TestLibrary.Generator.GetInt16(-55); A[] i1 = new A[length]; for (int i = 0; i < length; i++) { i1[i] = new A(i); } i1.Initialize(); for (int i = 0; i < length; i++) { if (i1[i].a != i) { TestLibrary.TestFramework.LogError("003", "The result is not the value as expected"); retVal = false; } } } catch (Exception e) { TestLibrary.TestFramework.LogError("004", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest3() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest3: Initialize a reference-type array "); try { int length = TestLibrary.Generator.GetInt16(-55); string[] i1 = new string[length]; for (int i = 0; i < length; i++) { i1[i] = i.ToString(); } i1.Initialize(); // The type of int32 does not have constructors, so the value is not changed for (int i = 0; i < length; i++) { if (i1[i] != i.ToString()) { TestLibrary.TestFramework.LogError("005", "The result is not the value as expected"); retVal = false; } } } catch (Exception e) { TestLibrary.TestFramework.LogError("006", "Unexpected exception: " + e); retVal = false; } return retVal; } public bool PosTest4() { bool retVal = true; TestLibrary.TestFramework.BeginScenario("PosTest4: Try to Initialize a customized class type"); try { int length = TestLibrary.Generator.GetInt16(-55); B[] i1 = new B[length]; for (int i = 0; i < length; i++) { i1[i] = new B(i); } i1.Initialize(); for (int i = 0; i < length; i++) { if (i1[i].b_value != i) { TestLibrary.TestFramework.LogError("007", "The result is not the value as expected"); retVal = false; } } } catch (Exception e) { TestLibrary.TestFramework.LogError("008", "Unexpected exception: " + e); retVal = false; } return retVal; } #endregion #region Nagetive Test Cases #endregion #endregion public static int Main() { ArrayInitialize test = new ArrayInitialize(); TestLibrary.TestFramework.BeginTestCase("ArrayInitialize"); if (test.RunTests()) { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("PASS"); return 100; } else { TestLibrary.TestFramework.EndTestCase(); TestLibrary.TestFramework.LogInformation("FAIL"); return 0; } } } struct A { public A(int a) { this.a = a; } public int a; } class B { public B() { this.b = 0; } public B(int b) { this.b = b; } public int b_value { get { return this.b; } } private int b; }
using System; using System.Collections.Generic; using System.Diagnostics; namespace IxMilia.Dxf.Objects { public partial class DxfDataTable { private bool _readRowCount = false; private bool _readColumnCount = false; private bool _createdTable = false; private int _currentColumnCode = 0; private int _currentColumn = -1; private int _currentRow = -1; private DxfPoint _current2DPoint; private DxfPoint _current3DPoint; public object[,] Values { get; private set; } = new object[0, 0]; public object this[int row, int column] { get { return Values[row, column]; } set { Values[row, column] = value; } } public void SetSize(int rows, int columns) { if (rows < 0) { throw new ArgumentOutOfRangeException(nameof(rows)); } if (columns < 0) { throw new ArgumentOutOfRangeException(nameof(columns)); } var newValues = new object[rows, columns]; for (int row = 0; row < Math.Min(rows, RowCount); row++) { for (int col = 0; col < Math.Min(columns, ColumnCount); col++) { newValues[row, col] = Values[row, col]; } } Values = newValues; RowCount = rows; ColumnCount = columns; } protected override void AddValuePairs(List<DxfCodePair> pairs, DxfAcadVersion version, bool outputHandles, bool writeXData) { base.AddValuePairs(pairs, version, outputHandles, writeXData: false); pairs.Add(new DxfCodePair(100, "AcDbDataTable")); pairs.Add(new DxfCodePair(70, (this.Field))); pairs.Add(new DxfCodePair(90, (this.ColumnCount))); pairs.Add(new DxfCodePair(91, (this.RowCount))); pairs.Add(new DxfCodePair(1, (this.Name))); for (int col = 0; col < ColumnCount; col++) { var columnCode = GetCodeFromColumnType(col); pairs.Add(new DxfCodePair(92, columnCode)); pairs.Add(new DxfCodePair(2, ColumnNames[col])); for (int row = 0; row < RowCount; row++) { pairs.AddRange(GeneratePairsFromCode(columnCode, Values[row, col])); } } if (writeXData) { DxfXData.AddValuePairs(XData, pairs, version, outputHandles); } } internal override bool TrySetPair(DxfCodePair pair) { switch (pair.Code) { case 1: this.Name = (pair.StringValue); break; case 70: this.Field = (pair.ShortValue); break; case 90: this.ColumnCount = (pair.IntegerValue); _readColumnCount = true; break; case 91: this.RowCount = (pair.IntegerValue); _readRowCount = true; break; // column headers case 92: _currentColumnCode = pair.IntegerValue; _currentColumn++; _currentRow = 0; break; case 2: this.ColumnNames.Add(pair.StringValue); break; // column values case 71: SetValue(BoolShort(pair.ShortValue)); break; case 93: SetValue(pair.IntegerValue); break; case 40: SetValue(pair.DoubleValue); break; case 3: SetValue(pair.StringValue); break; case 10: _current2DPoint = new DxfPoint(pair.DoubleValue, 0.0, 0.0); break; case 20: _current2DPoint = _current2DPoint.WithUpdatedY(pair.DoubleValue); break; case 30: _current2DPoint = _current2DPoint.WithUpdatedZ(pair.DoubleValue); SetValue(_current2DPoint); _current2DPoint = default(DxfPoint); break; case 11: _current3DPoint = new DxfPoint(pair.DoubleValue, 0.0, 0.0); break; case 21: _current3DPoint = _current3DPoint.WithUpdatedY(pair.DoubleValue); break; case 31: _current3DPoint = _current3DPoint.WithUpdatedZ(pair.DoubleValue); SetValue(_current3DPoint); _current3DPoint = default(DxfPoint); break; case 331: case 360: case 350: case 340: case 330: if (_readRowCount || _readColumnCount) { // TODO: differentiate between handle types SetValue(HandleString(pair.StringValue)); } else { // still reading AcDbObject values goto default; } break; default: return base.TrySetPair(pair); } if (_readRowCount && _readColumnCount && !_createdTable) { Values = new object[RowCount, ColumnCount]; _createdTable = true; } return true; } private void SetValue(object value) { if (_currentRow < 0 || _currentRow >= RowCount) { Debug.Assert(false, "Row out of range"); return; } if (_currentColumn < 0 || _currentColumn >= ColumnCount) { Debug.Assert(false, "Column out of range"); return; } Values[_currentRow, _currentColumn] = value; _currentRow++; } private int GetCodeFromColumnType(int column) { var value = Values[0, column]; if (value.GetType() == typeof(bool)) { return 71; } else if (value.GetType() == typeof(int)) { return 93; } else if (value.GetType() == typeof(double)) { return 40; } else if (value.GetType() == typeof(string)) { return 3; } else if (value.GetType() == typeof(DxfPoint)) { // TODO: how to differentiate between 2D and 3D point? return 10; } else if (value.GetType() == typeof(DxfHandle)) { // TODO: differentiate between handle types return 331; } else { throw new InvalidOperationException("Unsupported column type: " + value.GetType().Name); } } private IEnumerable<DxfCodePair> GeneratePairsFromCode(int code, object value) { var expectedType = DxfCodePair.ExpectedType(code); if (expectedType == typeof(bool)) { return new[] { new DxfCodePair(code, BoolShort((bool)value)) }; } else if (expectedType == typeof(int)) { return new[] { new DxfCodePair(code, (int)value) }; } else if (code == 40) { return new[] { new DxfCodePair(code, (double)value) }; } else if (expectedType == typeof(string)) { return new[] { new DxfCodePair(code, (string)value) }; } else if (code == 10 || code == 11) { var point = (DxfPoint)value; return new[] { new DxfCodePair(code, point.X), new DxfCodePair(code + 10, point.Y), new DxfCodePair(code + 20, point.Z), }; } else if (expectedType == typeof(string) && code >= 330) { // TODO: differentiate between handle types return new[] { new DxfCodePair(code, HandleString((DxfHandle)value)) }; } else { throw new ArgumentOutOfRangeException(nameof(code)); } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Diagnostics.Contracts; using System.Globalization; using System.Runtime.CompilerServices; namespace System { public partial class String { [Pure] public bool Contains(string value) { return (IndexOf(value, StringComparison.Ordinal) >= 0); } [Pure] public bool Contains(string value, StringComparison comparisonType) { return (IndexOf(value, comparisonType) >= 0); } // Returns the index of the first occurrence of a specified character in the current instance. // The search starts at startIndex and runs thorough the next count characters. // [Pure] public int IndexOf(char value) { return IndexOf(value, 0, this.Length); } [Pure] public int IndexOf(char value, int startIndex) { return IndexOf(value, startIndex, this.Length - startIndex); } [Pure] public unsafe int IndexOf(char value, int startIndex, int count) { if (startIndex < 0 || startIndex > Length) throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); if (count < 0 || count > Length - startIndex) throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); fixed (char* pChars = &m_firstChar) { char* pCh = pChars + startIndex; while (count >= 4) { if (*pCh == value) goto ReturnIndex; if (*(pCh + 1) == value) goto ReturnIndex1; if (*(pCh + 2) == value) goto ReturnIndex2; if (*(pCh + 3) == value) goto ReturnIndex3; count -= 4; pCh += 4; } while (count > 0) { if (*pCh == value) goto ReturnIndex; count--; pCh++; } return -1; ReturnIndex3: pCh++; ReturnIndex2: pCh++; ReturnIndex1: pCh++; ReturnIndex: return (int)(pCh - pChars); } } // Returns the index of the first occurrence of any specified character in the current instance. // The search starts at startIndex and runs to startIndex + count -1. // [Pure] public int IndexOfAny(char[] anyOf) { return IndexOfAny(anyOf, 0, this.Length); } [Pure] public int IndexOfAny(char[] anyOf, int startIndex) { return IndexOfAny(anyOf, startIndex, this.Length - startIndex); } [Pure] [MethodImplAttribute(MethodImplOptions.InternalCall)] public extern int IndexOfAny(char[] anyOf, int startIndex, int count); // Determines the position within this string of the first occurrence of the specified // string, according to the specified search criteria. The search begins at // the first character of this string, it is case-sensitive and the current culture // comparison is used. // [Pure] public int IndexOf(String value) { return IndexOf(value, StringComparison.CurrentCulture); } // Determines the position within this string of the first occurrence of the specified // string, according to the specified search criteria. The search begins at // startIndex, it is case-sensitive and the current culture comparison is used. // [Pure] public int IndexOf(String value, int startIndex) { return IndexOf(value, startIndex, StringComparison.CurrentCulture); } // Determines the position within this string of the first occurrence of the specified // string, according to the specified search criteria. The search begins at // startIndex, ends at endIndex and the current culture comparison is used. // [Pure] public int IndexOf(String value, int startIndex, int count) { if (startIndex < 0 || startIndex > this.Length) { throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); } if (count < 0 || count > this.Length - startIndex) { throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); } Contract.EndContractBlock(); return IndexOf(value, startIndex, count, StringComparison.CurrentCulture); } [Pure] public int IndexOf(String value, StringComparison comparisonType) { return IndexOf(value, 0, this.Length, comparisonType); } [Pure] public int IndexOf(String value, int startIndex, StringComparison comparisonType) { return IndexOf(value, startIndex, this.Length - startIndex, comparisonType); } [Pure] public int IndexOf(String value, int startIndex, int count, StringComparison comparisonType) { // Validate inputs if (value == null) throw new ArgumentNullException(nameof(value)); if (startIndex < 0 || startIndex > this.Length) throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); if (count < 0 || startIndex > this.Length - count) throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); Contract.EndContractBlock(); switch (comparisonType) { case StringComparison.CurrentCulture: return CultureInfo.CurrentCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.None); case StringComparison.CurrentCultureIgnoreCase: return CultureInfo.CurrentCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); case StringComparison.InvariantCulture: return CultureInfo.InvariantCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.None); case StringComparison.InvariantCultureIgnoreCase: return CultureInfo.InvariantCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); case StringComparison.Ordinal: return CultureInfo.InvariantCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.Ordinal); case StringComparison.OrdinalIgnoreCase: if (value.IsAscii() && this.IsAscii()) return CultureInfo.InvariantCulture.CompareInfo.IndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); else return TextInfo.IndexOfStringOrdinalIgnoreCase(this, value, startIndex, count); default: throw new ArgumentException(SR.NotSupported_StringComparison, nameof(comparisonType)); } } // Returns the index of the last occurrence of a specified character in the current instance. // The search starts at startIndex and runs backwards to startIndex - count + 1. // The character at position startIndex is included in the search. startIndex is the larger // index within the string. // [Pure] public int LastIndexOf(char value) { return LastIndexOf(value, this.Length - 1, this.Length); } [Pure] public int LastIndexOf(char value, int startIndex) { return LastIndexOf(value, startIndex, startIndex + 1); } [Pure] public unsafe int LastIndexOf(char value, int startIndex, int count) { if (Length == 0) return -1; if (startIndex < 0 || startIndex >= Length) throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); if (count < 0 || count - 1 > startIndex) throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); fixed (char* pChars = &m_firstChar) { char* pCh = pChars + startIndex; //We search [startIndex..EndIndex] while (count >= 4) { if (*pCh == value) goto ReturnIndex; if (*(pCh - 1) == value) goto ReturnIndex1; if (*(pCh - 2) == value) goto ReturnIndex2; if (*(pCh - 3) == value) goto ReturnIndex3; count -= 4; pCh -= 4; } while (count > 0) { if (*pCh == value) goto ReturnIndex; count--; pCh--; } return -1; ReturnIndex3: pCh--; ReturnIndex2: pCh--; ReturnIndex1: pCh--; ReturnIndex: return (int)(pCh - pChars); } } // Returns the index of the last occurrence of any specified character in the current instance. // The search starts at startIndex and runs backwards to startIndex - count + 1. // The character at position startIndex is included in the search. startIndex is the larger // index within the string. // //ForceInline ... Jit can't recognize String.get_Length to determine that this is "fluff" [Pure] public int LastIndexOfAny(char[] anyOf) { return LastIndexOfAny(anyOf, this.Length - 1, this.Length); } [Pure] public int LastIndexOfAny(char[] anyOf, int startIndex) { return LastIndexOfAny(anyOf, startIndex, startIndex + 1); } [Pure] [MethodImplAttribute(MethodImplOptions.InternalCall)] public extern int LastIndexOfAny(char[] anyOf, int startIndex, int count); // Returns the index of the last occurrence of any character in value in the current instance. // The search starts at startIndex and runs backwards to startIndex - count + 1. // The character at position startIndex is included in the search. startIndex is the larger // index within the string. // [Pure] public int LastIndexOf(String value) { return LastIndexOf(value, this.Length - 1, this.Length, StringComparison.CurrentCulture); } [Pure] public int LastIndexOf(String value, int startIndex) { return LastIndexOf(value, startIndex, startIndex + 1, StringComparison.CurrentCulture); } [Pure] public int LastIndexOf(String value, int startIndex, int count) { if (count < 0) { throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); } Contract.EndContractBlock(); return LastIndexOf(value, startIndex, count, StringComparison.CurrentCulture); } [Pure] public int LastIndexOf(String value, StringComparison comparisonType) { return LastIndexOf(value, this.Length - 1, this.Length, comparisonType); } [Pure] public int LastIndexOf(String value, int startIndex, StringComparison comparisonType) { return LastIndexOf(value, startIndex, startIndex + 1, comparisonType); } [Pure] public int LastIndexOf(String value, int startIndex, int count, StringComparison comparisonType) { if (value == null) throw new ArgumentNullException(nameof(value)); Contract.EndContractBlock(); // Special case for 0 length input strings if (this.Length == 0 && (startIndex == -1 || startIndex == 0)) return (value.Length == 0) ? 0 : -1; // Now after handling empty strings, make sure we're not out of range if (startIndex < 0 || startIndex > this.Length) throw new ArgumentOutOfRangeException(nameof(startIndex), SR.ArgumentOutOfRange_Index); // Make sure that we allow startIndex == this.Length if (startIndex == this.Length) { startIndex--; if (count > 0) count--; // If we are looking for nothing, just return 0 if (value.Length == 0 && count >= 0 && startIndex - count + 1 >= 0) return startIndex; } // 2nd half of this also catches when startIndex == MAXINT, so MAXINT - 0 + 1 == -1, which is < 0. if (count < 0 || startIndex - count + 1 < 0) throw new ArgumentOutOfRangeException(nameof(count), SR.ArgumentOutOfRange_Count); switch (comparisonType) { case StringComparison.CurrentCulture: return CultureInfo.CurrentCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.None); case StringComparison.CurrentCultureIgnoreCase: return CultureInfo.CurrentCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); case StringComparison.InvariantCulture: return CultureInfo.InvariantCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.None); case StringComparison.InvariantCultureIgnoreCase: return CultureInfo.InvariantCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); case StringComparison.Ordinal: return CultureInfo.InvariantCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.Ordinal); case StringComparison.OrdinalIgnoreCase: if (value.IsAscii() && this.IsAscii()) return CultureInfo.InvariantCulture.CompareInfo.LastIndexOf(this, value, startIndex, count, CompareOptions.IgnoreCase); else return TextInfo.LastIndexOfStringOrdinalIgnoreCase(this, value, startIndex, count); default: throw new ArgumentException(SR.NotSupported_StringComparison, nameof(comparisonType)); } } } }
using UnityEngine; using UnityEngine.Events; using UnityEngine.UI; using System.Collections; namespace UIWidgets { /// <summary> /// Color picker RGB block. /// </summary> [AddComponentMenu("UI/UIWidgets/ColorPicker RGB Block")] public class ColorPickerRGBBlock : MonoBehaviour { [SerializeField] Slider rSlider; /// <summary> /// Gets or sets the Red slider. /// </summary> /// <value>The Red slider.</value> public Slider RSlider { get { return rSlider; } set { SetRSlider(value); } } [SerializeField] Spinner rInput; /// <summary> /// Gets or sets the Red input. /// </summary> /// <value>The Red input.</value> public Spinner RInput { get { return rInput; } set { SetRInput(value); } } [SerializeField] Image rSliderBackground; /// <summary> /// Gets or sets the Red slider background. /// </summary> /// <value>The Red slider background.</value> public Image RSliderBackground { get { return rSliderBackground; } set { rSliderBackground = value; UpdateMaterial(); } } [SerializeField] Slider gSlider; /// <summary> /// Gets or sets the Green slider. /// </summary> /// <value>The Green slider.</value> public Slider GSlider { get { return gSlider; } set { SetGSlider(value); } } [SerializeField] Spinner gInput; /// <summary> /// Gets or sets the Green input. /// </summary> /// <value>The Green input.</value> public Spinner GInput { get { return gInput; } set { SetGInput(value); } } [SerializeField] Image gSliderBackground; /// <summary> /// Gets or sets the Green slider background. /// </summary> /// <value>The Green slider background.</value> public Image GSliderBackground { get { return gSliderBackground; } set { gSliderBackground = value; UpdateMaterial(); } } [SerializeField] Slider bSlider; /// <summary> /// Gets or sets the Blue slider. /// </summary> /// <value>The Blue slider.</value> public Slider BSlider { get { return bSlider; } set { SetBSlider(value); } } [SerializeField] Spinner bInput; /// <summary> /// Gets or sets the Blue input. /// </summary> /// <value>The Blue input.</value> public Spinner BInput { get { return bInput; } set { SetBInput(value); } } [SerializeField] Image bSliderBackground; /// <summary> /// Gets or sets the Blue slider background. /// </summary> /// <value>The Blue slider background.</value> public Image BSliderBackground { get { return bSliderBackground; } set { bSliderBackground = value; UpdateMaterial(); } } [SerializeField] Shader defaultShader; /// <summary> /// Gets or sets the default shader to display gradients for sliders background. /// </summary> /// <value>The default shader.</value> public Shader DefaultShader { get { return defaultShader; } set { defaultShader = value; UpdateMaterial(); } } ColorPickerInputMode inputMode; /// <summary> /// Gets or sets the input mode. /// </summary> /// <value>The input mode.</value> public ColorPickerInputMode InputMode { get { return inputMode; } set { inputMode = value; gameObject.SetActive(inputMode==ColorPickerInputMode.RGB); UpdateView(); } } ColorPickerPaletteMode paletteMode; /// <summary> /// Gets or sets the palette mode. /// </summary> /// <value>The palette mode.</value> public ColorPickerPaletteMode PaletteMode { get { return paletteMode; } set { paletteMode = value; } } /// <summary> /// OnChangeRGB event. /// </summary> public ColorRGBChangedEvent OnChangeRGB = new ColorRGBChangedEvent(); /// <summary> /// OnChangeHSV event. /// </summary> public ColorHSVChangedEvent OnChangeHSV = new ColorHSVChangedEvent(); /// <summary> /// OnChangeAlpha event. /// </summary> public ColorAlphaChangedEvent OnChangeAlpha = new ColorAlphaChangedEvent(); bool isStarted; /// <summary> /// Start this instance. /// </summary> public virtual void Start() { if (isStarted) { return ; } isStarted = true; RSlider = rSlider; RInput = rInput; RSliderBackground = rSliderBackground; GSlider = gSlider; GInput = gInput; GSliderBackground = gSliderBackground; BSlider = bSlider; BInput = bInput; BSliderBackground = bSliderBackground; } /// <summary> /// This function is called when the object becomes enabled and active. /// </summary> protected virtual void OnEnable() { UpdateMaterial(); } /// <summary> /// Sets the Red slider. /// </summary> /// <param name="value">Value.</param> protected virtual void SetRSlider(Slider value) { if (rSlider!=null) { rSlider.onValueChanged.RemoveListener(SliderValueChanged); } rSlider = value; if (rSlider!=null) { rSlider.onValueChanged.AddListener(SliderValueChanged); } } /// <summary> /// Sets the Red input. /// </summary> /// <param name="value">Value.</param> protected virtual void SetRInput(Spinner value) { if (rInput!=null) { rInput.onValueChangeInt.RemoveListener(SpinnerValueChanged); } rInput = value; if (rInput!=null) { rInput.onValueChangeInt.AddListener(SpinnerValueChanged); } } /// <summary> /// Sets the Green slider. /// </summary> /// <param name="value">Value.</param> protected virtual void SetGSlider(Slider value) { if (gSlider!=null) { gSlider.onValueChanged.RemoveListener(SliderValueChanged); } gSlider = value; if (gSlider!=null) { gSlider.onValueChanged.AddListener(SliderValueChanged); } } /// <summary> /// Sets the Green input. /// </summary> /// <param name="value">Value.</param> protected virtual void SetGInput(Spinner value) { if (gInput!=null) { gInput.onValueChangeInt.RemoveListener(SpinnerValueChanged); } gInput = value; if (gInput!=null) { gInput.onValueChangeInt.AddListener(SpinnerValueChanged); } } /// <summary> /// Sets the Blue slider. /// </summary> /// <param name="value">Value.</param> protected virtual void SetBSlider(Slider value) { if (bSlider!=null) { bSlider.onValueChanged.RemoveListener(SliderValueChanged); } bSlider = value; if (bSlider!=null) { bSlider.onValueChanged.AddListener(SliderValueChanged); } } /// <summary> /// Sets the Blue input. /// </summary> /// <param name="value">Value.</param> protected virtual void SetBInput(Spinner value) { if (bInput!=null) { bInput.onValueChangeInt.RemoveListener(SpinnerValueChanged); } bInput = value; if (bInput!=null) { bInput.onValueChangeInt.AddListener(SpinnerValueChanged); } } void SpinnerValueChanged(int value) { ValueChanged(isSlider: false); } void SliderValueChanged(float value) { ValueChanged(); } /// <summary> /// If in update mode? /// </summary> protected bool inUpdateMode; /// <summary> /// Values the changed. /// </summary> /// <param name="isSlider">Is slider value changed?</param> protected virtual void ValueChanged(bool isSlider=true) { if (inUpdateMode) { return ; } var color = new Color32( GetRed(isSlider), GetGreen(isSlider), GetBlue(isSlider), currentColor.a ); OnChangeRGB.Invoke(color); } /// <summary> /// Gets the red. /// </summary> /// <param name="isSlider">Is slider value changed?</param> /// <returns>The red.</returns> protected byte GetRed(bool isSlider=true) { if ((rSlider!=null) && (isSlider)) { return (byte)rSlider.value; } if (rInput!=null) { return (byte)rInput.Value; } return currentColor.r; } /// <summary> /// Gets the green. /// </summary> /// <param name="isSlider">Is slider value changed?</param> /// <returns>The green.</returns> protected byte GetGreen(bool isSlider=true) { if ((gSlider!=null) && (isSlider)) { return (byte)gSlider.value; } if (gInput!=null) { return (byte)gInput.Value; } return currentColor.g; } /// <summary> /// Gets the blue. /// </summary> /// <param name="isSlider">Is slider value changed?</param> /// <returns>The blue.</returns> protected byte GetBlue(bool isSlider=true) { if ((bSlider!=null) && (isSlider)) { return (byte)bSlider.value; } if (bInput!=null) { return (byte)bInput.Value; } return currentColor.b; } /// <summary> /// Current color. /// </summary> protected Color32 currentColor; /// <summary> /// Sets the color. /// </summary> /// <param name="color">Color.</param> public void SetColor(Color32 color) { currentColor = color; UpdateView(); } /// <summary> /// Sets the color. /// </summary> /// <param name="color">Color.</param> public void SetColor(ColorHSV color) { currentColor = color; UpdateView(); } /// <summary> /// Updates the view. /// </summary> protected virtual void UpdateView() { #if UNITY_5_2 || UNITY_5_3 || UNITY_5_4 UpdateMaterial(); #else UpdateViewReal(); #endif } /// <summary> /// Updates the view real. /// </summary> protected virtual void UpdateViewReal() { inUpdateMode = true; if (rSlider!=null) { rSlider.value = currentColor.r; } if (rInput!=null) { rInput.Value = currentColor.r; } if (gSlider!=null) { gSlider.value = currentColor.g; } if (gInput!=null) { gInput.Value = currentColor.g; } if (bSlider!=null) { bSlider.value = currentColor.b; } if (bInput!=null) { bInput.Value = currentColor.b; } if (rSliderBackground!=null) { rSliderBackground.material.SetColor("_ColorLeft", new Color32(0, currentColor.g, currentColor.b, 255)); rSliderBackground.material.SetColor("_ColorRight", new Color32(255, currentColor.g, currentColor.b, 255)); } if (gSliderBackground!=null) { gSliderBackground.material.SetColor("_ColorLeft", new Color32(currentColor.r, 0, currentColor.b, 255)); gSliderBackground.material.SetColor("_ColorRight", new Color32(currentColor.r, 255, currentColor.b, 255)); } if (bSliderBackground!=null) { bSliderBackground.material.SetColor("_ColorLeft", new Color32(currentColor.r, currentColor.g, 0, 255)); bSliderBackground.material.SetColor("_ColorRight", new Color32(currentColor.r, currentColor.g, 255, 255)); } inUpdateMode = false; } /// <summary> /// Updates the material. /// </summary> protected virtual void UpdateMaterial() { if (defaultShader==null) { return ; } if (rSliderBackground!=null) { rSliderBackground.material = new Material(defaultShader); } if (gSliderBackground!=null) { gSliderBackground.material = new Material(defaultShader); } if (bSliderBackground!=null) { bSliderBackground.material = new Material(defaultShader); } UpdateViewReal(); } /// <summary> /// This function is called when the MonoBehaviour will be destroyed. /// </summary> protected virtual void OnDestroy() { rSlider = null; rInput = null; gSlider = null; gInput = null; bSlider = null; bInput = null; } } }
// **************************************************************** // This is free software licensed under the NUnit license. You // may obtain a copy of the license as well as information regarding // copyright ownership at http://nunit.org. // **************************************************************** using System; using System.Collections; using System.Reflection; using System.Text.RegularExpressions; using System.Text; namespace NUnit.Core.Builders { /// <summary> /// Built-in SuiteBuilder for NUnit TestFixture /// </summary> public class NUnitTestFixtureBuilder : Extensibility.ISuiteBuilder { #region Instance Fields /// <summary> /// The NUnitTestFixture being constructed; /// </summary> private NUnitTestFixture fixture; private Extensibility.ITestCaseBuilder2 testBuilders = CoreExtensions.Host.TestBuilders; private Extensibility.ITestDecorator testDecorators = CoreExtensions.Host.TestDecorators; #endregion #region ISuiteBuilder Methods /// <summary> /// Checks to see if the fixture type has the TestFixtureAttribute /// </summary> /// <param name="type">The fixture type to check</param> /// <returns>True if the fixture can be built, false if not</returns> public bool CanBuildFrom(Type type) { if (type.IsAbstract && !type.IsSealed) return false; return Reflect.HasAttribute( type, NUnitFramework.TestFixtureAttribute, true ) || Reflect.HasMethodWithAttribute(type, NUnitFramework.TestAttribute, true) || Reflect.HasMethodWithAttribute(type, NUnitFramework.TestCaseAttribute, true) || Reflect.HasMethodWithAttribute(type, NUnitFramework.TestCaseSourceAttribute, true) || Reflect.HasMethodWithAttribute(type, NUnitFramework.TheoryAttribute, true); } /// <summary> /// Build a TestSuite from type provided. /// </summary> /// <param name="type"></param> /// <returns></returns> public Test BuildFrom(Type type) { Attribute[] attrs = GetTestFixtureAttributes(type); #if CLR_2_0 || CLR_4_0 if (type.IsGenericType) return BuildMultipleFixtures(type, attrs); #endif switch (attrs.Length) { case 0: return BuildSingleFixture(type, null); case 1: object[] args = (object[])Reflect.GetPropertyValue(attrs[0], "Arguments"); return args == null || args.Length == 0 ? BuildSingleFixture(type, attrs[0]) : BuildMultipleFixtures(type, attrs); default: return BuildMultipleFixtures(type, attrs); } } #endregion #region Helper Methods private Test BuildMultipleFixtures(Type type, Attribute[] attrs) { TestSuite suite = new ParameterizedFixtureSuite(type); if (attrs.Length > 0) { foreach (Attribute attr in attrs) suite.Add(BuildSingleFixture(type, attr)); } else { suite.RunState = RunState.NotRunnable; suite.IgnoreReason = "Generic fixture has no type arguments provided"; } return suite; } private Test BuildSingleFixture(Type type, Attribute attr) { object[] arguments = null; IList categories = null; if (attr != null) { arguments = (object[])Reflect.GetPropertyValue(attr, "Arguments"); categories = Reflect.GetPropertyValue(attr, "Categories") as IList; #if CLR_2_0 || CLR_4_0 if (type.ContainsGenericParameters) { Type[] typeArgs = (Type[])Reflect.GetPropertyValue(attr, "TypeArgs"); if( typeArgs.Length > 0 || TypeHelper.CanDeduceTypeArgsFromArgs(type, arguments, ref typeArgs)) { type = TypeHelper.MakeGenericType(type, typeArgs); } } #endif } this.fixture = new NUnitTestFixture(type, arguments); CheckTestFixtureIsValid(fixture); NUnitFramework.ApplyCommonAttributes(type, fixture); if (categories != null) foreach (string category in categories) fixture.Categories.Add(category); if (fixture.RunState == RunState.Runnable && attr != null) { object objIgnore = Reflect.GetPropertyValue(attr, "Ignore"); if (objIgnore != null && (bool)objIgnore == true) { fixture.RunState = RunState.Ignored; fixture.IgnoreReason = (string)Reflect.GetPropertyValue(attr, "IgnoreReason"); } } AddTestCases(type); return this.fixture; } /// <summary> /// Method to add test cases to the newly constructed fixture. /// The default implementation looks at each candidate method /// and tries to build a test case from it. It will only need /// to be overridden if some other approach, such as reading a /// datafile is used to generate test cases. /// </summary> /// <param name="fixtureType"></param> protected virtual void AddTestCases( Type fixtureType ) { IList methods = fixtureType.GetMethods( BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance | BindingFlags.Static ); foreach(MethodInfo method in methods) { Test test = BuildTestCase(method, this.fixture); if(test != null) { this.fixture.Add( test ); } } } /// <summary> /// Method to create a test case from a MethodInfo and add /// it to the fixture being built. It first checks to see if /// any global TestCaseBuilder addin wants to build the /// test case. If not, it uses the internal builder /// collection maintained by this fixture builder. After /// building the test case, it applies any decorators /// that have been installed. /// /// The default implementation has no test case builders. /// Derived classes should add builders to the collection /// in their constructor. /// </summary> /// <param name="method"></param> /// <returns></returns> private Test BuildTestCase( MethodInfo method, TestSuite suite ) { Test test = testBuilders.BuildFrom( method, suite ); if ( test != null ) test = testDecorators.Decorate( test, method ); return test; } private void CheckTestFixtureIsValid(TestFixture fixture) { Type fixtureType = fixture.FixtureType; string reason = null; if (!IsValidFixtureType(fixtureType, ref reason)) { fixture.RunState = RunState.NotRunnable; fixture.IgnoreReason = reason; } else if( !IsStaticClass( fixtureType ) ) { // Postpone checking for constructor with arguments till we invoke it // since Type.GetConstructor doesn't handle null arguments well. if ( fixture.arguments == null || fixture.arguments.Length == 0 ) if (Reflect.GetConstructor(fixtureType) == null) { fixture.RunState = RunState.NotRunnable; fixture.IgnoreReason = "No suitable constructor was found"; } } } private static bool IsStaticClass(Type type) { return type.IsAbstract && type.IsSealed; } /// <summary> /// Check that the fixture type is valid. This method ensures that /// the type is not abstract and that there is no more than one of /// each setup or teardown method and that their signatures are correct. /// </summary> /// <param name="fixtureType">The type of the fixture to check</param> /// <param name="reason">A message indicating why the fixture is invalid</param> /// <returns>True if the fixture is valid, false if not</returns> private bool IsValidFixtureType(Type fixtureType, ref string reason) { //if (fixtureType.IsAbstract && !fixtureType.IsSealed) //{ // reason = string.Format("{0} is an abstract class", fixtureType.FullName); // return false; //} #if CLR_2_0 || CLR_4_0 if ( fixtureType.ContainsGenericParameters ) { reason = "Fixture type contains generic parameters. You must either provide " + "Type arguments or specify constructor arguments that allow NUnit " + "to deduce the Type arguments."; return false; } #endif return NUnitFramework.CheckSetUpTearDownMethods(fixtureType, NUnitFramework.SetUpAttribute, ref reason) && NUnitFramework.CheckSetUpTearDownMethods(fixtureType, NUnitFramework.TearDownAttribute, ref reason) && NUnitFramework.CheckSetUpTearDownMethods(fixtureType, NUnitFramework.FixtureSetUpAttribute, ref reason) && NUnitFramework.CheckSetUpTearDownMethods(fixtureType, NUnitFramework.FixtureTearDownAttribute, ref reason); } /// <summary> /// Get TestFixtureAttributes following a somewhat obscure /// set of rules to eliminate spurious duplication of fixtures. /// 1. If there are any attributes with args, they are the only /// ones returned and those without args are ignored. /// 2. No more than one attribute without args is ever returned. /// </summary> private static Attribute[] GetTestFixtureAttributes(Type type) { Attribute[] attrs = Reflect.GetAttributes(type, NUnitFramework.TestFixtureAttribute, true); // Just return - no possibility of duplication if (attrs.Length <= 1) return attrs; int withArgs = 0; bool[] hasArgs = new bool[attrs.Length]; // Count and record those attrs with arguments for (int i = 0; i < attrs.Length; i++) { object[] args = (object[])Reflect.GetPropertyValue(attrs[i], "Arguments"); object[] typeArgs = (object[])Reflect.GetPropertyValue(attrs[i], "TypeArgs"); if (args.Length > 0 || typeArgs != null && typeArgs.Length > 0) { withArgs++; hasArgs[i] = true; } } // If all attributes have args, just return them if (withArgs == attrs.Length) return attrs; // If all attributes are without args, just return the first found if (withArgs == 0) return new Attribute[] { attrs[0] }; // Some of each type, so extract those with args int count = 0; Attribute[] result = new Attribute[withArgs]; for (int i = 0; i < attrs.Length; i++) if (hasArgs[i]) result[count++] = attrs[i]; return result; } #endregion } }
/* Copyright (C) 2008-2018 Peter Palotas, Jeffrey Jangli, Alexandr Normuradov * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ using System; using System.Collections.Generic; using System.Diagnostics.CodeAnalysis; using System.IO; using System.Security; namespace Alphaleonis.Win32.Filesystem { public static partial class Directory { /// <summary>[AlphaFS] Returns an enumerable collection of file system entries in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, Path.WildcardStarMatchAll, null, null, null, PathFormat.RelativePath); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="pathFormat">Indicates the format of the path parameter(s).</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, PathFormat pathFormat) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, Path.WildcardStarMatchAll, null, null, null, pathFormat); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="options"><see cref="DirectoryEnumerationOptions"/> flags that specify how the directory is to be enumerated.</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, DirectoryEnumerationOptions options) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, Path.WildcardStarMatchAll, null, options, null, PathFormat.RelativePath); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="options"><see cref="DirectoryEnumerationOptions"/> flags that specify how the directory is to be enumerated.</param> /// <param name="pathFormat">Indicates the format of the path parameter(s).</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, DirectoryEnumerationOptions options, PathFormat pathFormat) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, Path.WildcardStarMatchAll, null, options, null, pathFormat); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries that match a <paramref name="searchPattern"/> in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="searchPattern"> /// The search string to match against the names of directories in <paramref name="path"/>. /// This parameter can contain a combination of valid literal path and wildcard /// (<see cref="Path.WildcardStarMatchAll"/> and <see cref="Path.WildcardQuestion"/>) characters, but does not support regular expressions. /// </param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] [Obsolete("Argument searchPattern is obsolete. The DirectoryEnumerationFilters argument provides better filter criteria.")] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, string searchPattern) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, searchPattern, null, null, null, PathFormat.RelativePath); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries that match a <paramref name="searchPattern"/> in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="searchPattern"> /// The search string to match against the names of directories in <paramref name="path"/>. /// This parameter can contain a combination of valid literal path and wildcard /// (<see cref="Path.WildcardStarMatchAll"/> and <see cref="Path.WildcardQuestion"/>) characters, but does not support regular expressions. /// </param> /// <param name="pathFormat">Indicates the format of the path parameter(s).</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] [Obsolete("Argument searchPattern is obsolete. The DirectoryEnumerationFilters argument provides better filter criteria.")] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, string searchPattern, PathFormat pathFormat) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, searchPattern, null, null, null, pathFormat); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries that match a <paramref name="searchPattern"/> in a specified path using <see cref="DirectoryEnumerationOptions"/>.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="searchPattern"> /// The search string to match against the names of directories in <paramref name="path"/>. /// This parameter can contain a combination of valid literal path and wildcard /// (<see cref="Path.WildcardStarMatchAll"/> and <see cref="Path.WildcardQuestion"/>) characters, but does not support regular expressions. /// </param> /// <param name="options"><see cref="DirectoryEnumerationOptions"/> flags that specify how the directory is to be enumerated.</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] [Obsolete("Argument searchPattern is obsolete. The DirectoryEnumerationFilters argument provides better filter criteria.")] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, string searchPattern, DirectoryEnumerationOptions options) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, searchPattern, null, options, null, PathFormat.RelativePath); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries that match a <paramref name="searchPattern"/> in a specified path using <see cref="DirectoryEnumerationOptions"/>.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="searchPattern"> /// The search string to match against the names of directories in <paramref name="path"/>. /// This parameter can contain a combination of valid literal path and wildcard /// (<see cref="Path.WildcardStarMatchAll"/> and <see cref="Path.WildcardQuestion"/>) characters, but does not support regular expressions. /// </param> /// <param name="options"><see cref="DirectoryEnumerationOptions"/> flags that specify how the directory is to be enumerated.</param> /// <param name="pathFormat">Indicates the format of the path parameter(s).</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] [Obsolete("Argument searchPattern is obsolete. The DirectoryEnumerationFilters argument provides better filter criteria.")] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, string searchPattern, DirectoryEnumerationOptions options, PathFormat pathFormat) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, searchPattern, null, options, null, pathFormat); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="filters">The specification of custom filters to be used in the process.</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, DirectoryEnumerationFilters filters) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, Path.WildcardStarMatchAll, null, null, filters, PathFormat.RelativePath); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="filters">The specification of custom filters to be used in the process.</param> /// <param name="pathFormat">Indicates the format of the path parameter(s).</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, DirectoryEnumerationFilters filters, PathFormat pathFormat) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, Path.WildcardStarMatchAll, null, null, filters, pathFormat); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="options"><see cref="DirectoryEnumerationOptions"/> flags that specify how the directory is to be enumerated.</param> /// <param name="filters">The specification of custom filters to be used in the process.</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, DirectoryEnumerationOptions options, DirectoryEnumerationFilters filters) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, Path.WildcardStarMatchAll, null, options, filters, PathFormat.RelativePath); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="options"><see cref="DirectoryEnumerationOptions"/> flags that specify how the directory is to be enumerated.</param> /// <param name="filters">The specification of custom filters to be used in the process.</param> /// <param name="pathFormat">Indicates the format of the path parameter(s).</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, DirectoryEnumerationOptions options, DirectoryEnumerationFilters filters, PathFormat pathFormat) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, Path.WildcardStarMatchAll, null, options, filters, pathFormat); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries that match a <paramref name="searchPattern"/> in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="searchPattern"> /// The search string to match against the names of directories in <paramref name="path"/>. /// This parameter can contain a combination of valid literal path and wildcard /// (<see cref="Path.WildcardStarMatchAll"/> and <see cref="Path.WildcardQuestion"/>) characters, but does not support regular expressions. /// </param> /// <param name="filters">The specification of custom filters to be used in the process.</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] [Obsolete("Argument searchPattern is obsolete. The DirectoryEnumerationFilters argument provides better filter criteria.")] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, string searchPattern, DirectoryEnumerationFilters filters) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, searchPattern, null, null, filters, PathFormat.RelativePath); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries that match a <paramref name="searchPattern"/> in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="searchPattern"> /// The search string to match against the names of directories in <paramref name="path"/>. /// This parameter can contain a combination of valid literal path and wildcard /// (<see cref="Path.WildcardStarMatchAll"/> and <see cref="Path.WildcardQuestion"/>) characters, but does not support regular expressions. /// </param> /// <param name="filters">The specification of custom filters to be used in the process.</param> /// <param name="pathFormat">Indicates the format of the path parameter(s).</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] [Obsolete("Argument searchPattern is obsolete. The DirectoryEnumerationFilters argument provides better filter criteria.")] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, string searchPattern, DirectoryEnumerationFilters filters, PathFormat pathFormat) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, searchPattern, null, null, filters, pathFormat); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries that match a <paramref name="searchPattern"/> in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="searchPattern"> /// The search string to match against the names of directories in <paramref name="path"/>. /// This parameter can contain a combination of valid literal path and wildcard /// (<see cref="Path.WildcardStarMatchAll"/> and <see cref="Path.WildcardQuestion"/>) characters, but does not support regular expressions. /// </param> /// <param name="options"><see cref="DirectoryEnumerationOptions"/> flags that specify how the directory is to be enumerated.</param> /// <param name="filters">The specification of custom filters to be used in the process.</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] [Obsolete("Argument searchPattern is obsolete. The DirectoryEnumerationFilters argument provides better filter criteria.")] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, string searchPattern, DirectoryEnumerationOptions options, DirectoryEnumerationFilters filters) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, searchPattern, null, options, filters, PathFormat.RelativePath); } /// <summary>[AlphaFS] Returns an enumerable collection of file system entries that match a <paramref name="searchPattern"/> in a specified path.</summary> /// <returns>The matching file system entries. The type of the items is determined by the type <typeparamref name="T"/>.</returns> /// <exception cref="ArgumentException"/> /// <exception cref="ArgumentNullException"/> /// <exception cref="DirectoryNotFoundException"/> /// <exception cref="IOException"/> /// <exception cref="NotSupportedException"/> /// <exception cref="UnauthorizedAccessException"/> /// <typeparam name="T">The type to return. This may be one of the following types: /// <list type="definition"> /// <item> /// <term><see cref="FileSystemEntryInfo"/></term> /// <description>This method will return instances of <see cref="FileSystemEntryInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="FileSystemInfo"/></term> /// <description>This method will return instances of <see cref="DirectoryInfo"/> and <see cref="FileInfo"/> instances.</description> /// </item> /// <item> /// <term><see cref="string"/></term> /// <description>This method will return the full path of each item.</description> /// </item> /// </list> /// </typeparam> /// <param name="transaction">The transaction.</param> /// <param name="path">The directory to search.</param> /// <param name="searchPattern"> /// The search string to match against the names of directories in <paramref name="path"/>. /// This parameter can contain a combination of valid literal path and wildcard /// (<see cref="Path.WildcardStarMatchAll"/> and <see cref="Path.WildcardQuestion"/>) characters, but does not support regular expressions. /// </param> /// <param name="options"><see cref="DirectoryEnumerationOptions"/> flags that specify how the directory is to be enumerated.</param> /// <param name="filters">The specification of custom filters to be used in the process.</param> /// <param name="pathFormat">Indicates the format of the path parameter(s).</param> [SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", MessageId = "Infos")] [SecurityCritical] [Obsolete("Argument searchPattern is obsolete. The DirectoryEnumerationFilters argument provides better filter criteria.")] public static IEnumerable<T> EnumerateFileSystemEntryInfosTransacted<T>(KernelTransaction transaction, string path, string searchPattern, DirectoryEnumerationOptions options, DirectoryEnumerationFilters filters, PathFormat pathFormat) { return EnumerateFileSystemEntryInfosCore<T>(null, transaction, path, searchPattern, null, options, filters, pathFormat); } } }
using System; using System.Collections.Generic; namespace AvalonAssets.Fyjelu { /// <summary> /// A simple, Stack-based <see cref="IObjectPool{T}" /> implementation. /// </summary> /// <remarks> /// Given a <see cref="IObjectFactory{T}" />, this class will maintain a simple pool of instances. /// A finite number of idle instances is enforced, but when the pool is empty, new instances are created to support the /// new load. /// Hence this class places no limit on the number of active instances created by the pool. /// </remarks> /// <typeparam name="T">The type of objects held in this pool.</typeparam> public class StackObjectPool<T> : AbstractObjectPool<T> where T : class { private const int DefaultPoolSize = 4; private const int DefaultMaxPoolSize = 20; private readonly IObjectFactory<T> _factory; private readonly int _maxPoolSize; private readonly Stack<T> _pool; /// <summary> /// Create a new <see cref="StackObjectPool{T}" /> using the specified factory to create new instances. /// </summary> /// <remarks> /// It limits the maximum number of idle instances to default value. /// </remarks> /// <param name="factory">The <see cref="IObjectFactory{T}" /> used to populate the pool.</param> public StackObjectPool(IObjectFactory<T> factory) : this(factory, DefaultMaxPoolSize, DefaultPoolSize) { } /// <summary> /// Create a new <see cref="StackObjectPool{T}" /> using the specified factory to create new instances. /// </summary> /// <remarks> /// It limits the maximum number of idle instances to <paramref name="maxPoolSize" />. /// </remarks> /// <param name="factory">The <see cref="IObjectFactory{T}" /> used to populate the pool.</param> /// <param name="maxPoolSize">Maximum size of the pool.</param> public StackObjectPool(IObjectFactory<T> factory, int maxPoolSize) : this(factory, maxPoolSize, DefaultPoolSize) { } /// <summary> /// Create a new <see cref="StackObjectPool{T}" /> using the specified factory to create new instances. /// </summary> /// <remarks> /// It limits the maximum number of idle instances to <paramref name="maxPoolSize" />, /// and initially allocating a container capable of containing at least <paramref name="initialPoolSize" /> instances. /// The pool is not pre-populated. The <paramref name="initialPoolSize" /> parameter just determines the initial size /// of the underlying container, /// which can increase beyond this value if <paramref name="maxPoolSize" /> > <paramref name="initialPoolSize" />. /// </remarks> /// <param name="factory">The <see cref="IObjectFactory{T}" /> used to populate the pool.</param> /// <param name="maxPoolSize">Maximum size of the pool.</param> /// <param name="initialPoolSize">Initial size of the pool.</param> public StackObjectPool(IObjectFactory<T> factory, int maxPoolSize, int initialPoolSize) : base(PoolExceptionHandleOption.Swallow) { // Argument validation if (factory == null) throw new ArgumentNullException("factory"); if (maxPoolSize <= 0) throw new ArgumentOutOfRangeException("maxPoolSize"); if (initialPoolSize <= 0 || initialPoolSize > maxPoolSize) throw new ArgumentOutOfRangeException("initialPoolSize"); _factory = factory; _pool = new Stack<T>(initialPoolSize); _maxPoolSize = maxPoolSize; } /// <summary> /// Allocate an object from the pool. /// </summary> /// <remarks> /// <para> /// If there are idle instances available on the stack, the top element of the stack is popped to activate, and /// return to the client. /// If there are no idle instances available, <see cref="IObjectFactory{T}.Make" /> is invoked to create a /// new instance. /// </para> /// <para> /// All instances are <see cref="IObjectFactory{T}.Activate" /> before being returned to the client. /// </para> /// </remarks> /// <returns>An instance from the pool.</returns> /// <exception cref="Exception"> /// When <see cref="IObjectFactory{T}.Make" /> or /// <see cref="IObjectFactory{T}.Make" /> throws an exception. /// </exception> public override T Allocate() { T obj = null; while (obj == null) { // Try to get cached object from stack if (_pool.Count > 0) { obj = _pool.Pop(); try { // Activate object _factory.Activate(obj); } catch (Exception activateException) { // Failed activation // Check the exception rethrow setting CheckExceptionRethrow(activateException); try { // Destroy object _factory.Destory(obj); } catch (Exception destoryException) { // Failed to destroy // Check the exception rethrow setting CheckExceptionRethrow(destoryException); } } finally { obj = null; } } else { // Failed to get cached object, create a new one obj = _factory.Make(); if (obj == null) throw new Exception("Factory failed to create a object."); } } return obj; } /// <summary> /// Frees an instance to the pool, pushing it on top of the idle instance stack after successful deactivation. /// </summary> /// <param name="obj">A <see cref="Allocate" /> instance to be disposed.</param> /// <remarks> /// <para> /// The returning instance is destroyed if deactivation throws an exception, or the stack is already full. /// </para> /// <para> /// Exceptions throws by <see cref="IObjectFactory{T}.Deactivate" /> or /// <see cref="IObjectFactory{T}.Destory" /> instances are silently swallowed. /// </para> /// </remarks> /// <exception cref="Exception"> /// When <see cref="IObjectFactory{T}.Deactivate" /> or /// <see cref="IObjectFactory{T}.Destory" /> throws an exception. /// </exception> public override void Free(T obj) { var shouldDestroy = _pool.Count >= _maxPoolSize; // Check if the pool is full. try { // Deactivate object _factory.Deactivate(obj); } catch (Exception deactivateException) { // Failed deactivation // Check the exception rethrow setting CheckExceptionRethrow(deactivateException); shouldDestroy = true; // Destory object that fail to deactivate. } if (shouldDestroy) try { // Destroy object _factory.Destory(obj); } catch (Exception destoryException) { // Failed to destroy // Check the exception rethrow setting CheckExceptionRethrow(destoryException); } else _pool.Push(obj); // Put back to stack } /// <summary> /// Return the number of instances currently in this pool. /// </summary> /// <remarks> /// This may be considered an approximation of the number of objects that can be borrowed without creating any new /// instances. /// </remarks> /// <returns>The number of instances currently in this pool.</returns> public override int GetCacheSize() { return _pool.Count; } /// <summary> /// Clears any objects sitting idle in the pool, releasing any associated resources. /// </summary> /// <remarks>Idle objects cleared must be <see cref="IObjectFactory{T}.Destory" />.</remarks> /// <exception cref="Exception"> /// When <see cref="IObjectFactory{T}.Destory" /> throws an exception. /// </exception> public override void Clear() { while (_pool.Count > 0) { var obj = _pool.Pop(); try { // Destroy object _factory.Destory(obj); } catch (Exception destoryException) { // Failed to destroy // Check the exception rethrow setting CheckExceptionRethrow(destoryException); } } } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. // Adapted from fasta C# .NET Core program // http://benchmarksgame.alioth.debian.org/u64q/program.php?test=fasta&lang=csharpcore&id=1 // aka (as of 2017-09-01) rev 1.1 of https://alioth.debian.org/scm/viewvc.php/benchmarksgame/bench/fasta/fasta.csharp?root=benchmarksgame&view=log // Best-scoring C# .NET Core version as of 2017-09-01 /* The Computer Language Benchmarks Game http://benchmarksgame.alioth.debian.org/ contributed by Serge Smith further optimized (rewrote threading, random generation loop) by Jan de Vaan modified by Josh Goldfoot (fasta-repeat buffering) */ using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.Linq; using System.Runtime.CompilerServices; using System.Text; using System.Threading; using System.Threading.Tasks; using Microsoft.Xunit.Performance; [assembly: OptimizeForBenchmarks] namespace BenchmarksGame { public class Fasta_1 { const int LineLength = 60; const int IM = 139968; const int IA = 3877; const int IC = 29573; static int seed = 42; public static int Main(string[] args) { int n = args.Length > 0 ? Int32.Parse(args[0]) : 1000; Bench(n, true); return 100; } [Benchmark(InnerIterationCount = 4000)] public static void RunBench() { Benchmark.Iterate(() => Bench(5000, false)); } static void Bench(int n, bool verbose) { MakeCumulative(IUB); MakeCumulative(HomoSapiens); using (var s = (verbose ? Console.OpenStandardOutput() : Stream.Null)) { MakeRepeatFasta("ONE", "Homo sapiens alu", Encoding.ASCII.GetBytes(ALU), n * 2, s); MakeRandomFasta("TWO", "IUB ambiguity codes", IUB, n * 3, s); MakeRandomFasta("THREE", "Homo sapiens frequency", HomoSapiens, n * 5, s); } } public static IEnumerable<R> TransformQueue<T, R>(BlockingCollection<T> queue, Func<T, R> transform, int threadCount) { var tasks = new Task<R>[threadCount]; for (int i = 0; i < threadCount; ++i) { T input; if (!queue.TryTake(out input, Timeout.Infinite)) break; tasks[i] = Task.Run(() => transform(input)); } int pos = 0; while (true) { if (tasks[pos] == null) break; yield return tasks[pos].Result; T input; tasks[pos] = queue.TryTake(out input, Timeout.Infinite) ? Task.Run(() => transform(input)) : null; pos = (pos + 1) % threadCount; } } static void MakeRandomFasta(string id, string desc, Frequency[] a, int n, Stream s) { var queue = new BlockingCollection<int[]>(2); var bufferCount = Environment.ProcessorCount + 4; Task.Run(() => { var len = LineLength * 40; var buffers = Enumerable.Range(0, bufferCount) .Select(i => new int[len]).ToArray(); var index = 0; for (var i = 0; i < n; i += len) { var buffer = n - i < len ? new int[n - i] : buffers[index++ % buffers.Length]; FillRandom(buffer); queue.Add(buffer); } queue.CompleteAdding(); }); byte[] descStr = Encoding.ASCII.GetBytes(">" + id + " " + desc + "\n"); s.Write(descStr, 0, descStr.Length); foreach (var r in TransformQueue(queue, rnd => SelectNucleotides(a, rnd), Environment.ProcessorCount)) { s.Write(r, 0, r.Length); } } private static byte[] SelectNucleotides(Frequency[] a, int[] rnd) { var resLength = (rnd.Length / LineLength) * (LineLength + 1); if (rnd.Length % LineLength != 0) { resLength += rnd.Length % LineLength + 1; } var buf = new byte[resLength]; var index = 0; for (var i = 0; i < rnd.Length; i += LineLength) { var len = Math.Min(LineLength, rnd.Length - i); for (var j = 0; j < len; ++j) buf[index++] = SelectRandom(a, (int)rnd[i + j]); buf[index++] = (byte)'\n'; } return buf; } static void MakeRepeatFasta(string id, string desc, byte[] alu, int n, Stream s) { byte[] descStr = Encoding.ASCII.GetBytes(">" + id + " " + desc + "\n"); s.Write(descStr, 0, descStr.Length); /* JG: fasta_repeat repeats every len(alu) * line-length = 287 * 61 = 17507 characters. So, calculate this once, then just print that buffer over and over. */ byte[] sequence; int sequenceLength; using (var unstandardOut = new MemoryStream(alu.Length * (LineLength + 1) + 1)) { MakeRepeatFastaBuffer(alu, alu.Length * LineLength, unstandardOut); sequenceLength = (int)unstandardOut.Length; sequence = new byte[sequenceLength]; unstandardOut.Seek(0, SeekOrigin.Begin); unstandardOut.Read(sequence, 0, sequenceLength); } int outputBytes = n + n / 60; while (outputBytes >= sequenceLength) { s.Write(sequence, 0, sequenceLength); outputBytes -= sequenceLength; } if (outputBytes > 0) { s.Write(sequence, 0, outputBytes); s.WriteByte((byte)'\n'); } } static void MakeRepeatFastaBuffer(byte[] alu, int n, Stream s) { var index = 0; int m = 0; int k = 0; int kn = alu.Length; var buf = new byte[1024]; while (n > 0) { m = n < LineLength ? n : LineLength; if (buf.Length - index < m) { s.Write(buf, 0, index); index = 0; } for (int i = 0; i < m; i++) { if (k == kn) k = 0; buf[index++] = alu[k]; k++; } buf[index++] = (byte)'\n'; n -= LineLength; } if (index != 0) s.Write(buf, 0, index); } [MethodImpl(MethodImplOptions.AggressiveInlining)] static byte SelectRandom(Frequency[] a, int r) { for (int i = 0; i < a.Length - 1; i++) if (r < a[i].p) return a[i].c; return a[a.Length - 1].c; } static void MakeCumulative(Frequency[] a) { double cp = 0; for (int i = 0; i < a.Length; i++) { cp += a[i].p; a[i].p = cp; } } static string ALU = "GGCCGGGCGCGGTGGCTCACGCCTGTAATCCCAGCACTTTGG" + "GAGGCCGAGGCGGGCGGATCACCTGAGGTCAGGAGTTCGAGA" + "CCAGCCTGGCCAACATGGTGAAACCCCGTCTCTACTAAAAAT" + "ACAAAAATTAGCCGGGCGTGGTGGCGCGCGCCTGTAATCCCA" + "GCTACTCGGGAGGCTGAGGCAGGAGAATCGCTTGAACCCGGG" + "AGGCGGAGGTTGCAGTGAGCCGAGATCGCGCCACTGCACTCC" + "AGCCTGGGCGACAGAGCGAGACTCCGTCTCAAAAA"; struct Frequency { public readonly byte c; public double p; public Frequency(char c, double p) { this.c = (byte)c; this.p = (p * IM); } } static Frequency[] IUB = { new Frequency ('a', 0.27), new Frequency ('c', 0.12), new Frequency ('g', 0.12), new Frequency ('t', 0.27), new Frequency ('B', 0.02), new Frequency ('D', 0.02), new Frequency ('H', 0.02), new Frequency ('K', 0.02), new Frequency ('M', 0.02), new Frequency ('N', 0.02), new Frequency ('R', 0.02), new Frequency ('S', 0.02), new Frequency ('V', 0.02), new Frequency ('W', 0.02), new Frequency ('Y', 0.02) }; static Frequency[] HomoSapiens = { new Frequency ('a', 0.3029549426680), new Frequency ('c', 0.1979883004921), new Frequency ('g', 0.1975473066391), new Frequency ('t', 0.3015094502008) }; private static void FillRandom(int[] result) { var s = seed; for (var i = 0; i < result.Length; i++) { s = (s * IA + IC) % IM; result[i] = s; } seed = s; } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Diagnostics; using System.Runtime.InteropServices; namespace Internal.Cryptography.Pal.Native { internal enum CertQueryObjectType : int { CERT_QUERY_OBJECT_FILE = 0x00000001, CERT_QUERY_OBJECT_BLOB = 0x00000002, } [Flags] internal enum ExpectedContentTypeFlags : int { //encoded single certificate CERT_QUERY_CONTENT_FLAG_CERT = 1 << ContentType.CERT_QUERY_CONTENT_CERT, //encoded single CTL CERT_QUERY_CONTENT_FLAG_CTL = 1 << ContentType.CERT_QUERY_CONTENT_CTL, //encoded single CRL CERT_QUERY_CONTENT_FLAG_CRL = 1 << ContentType.CERT_QUERY_CONTENT_CRL, //serialized store CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE = 1 << ContentType.CERT_QUERY_CONTENT_SERIALIZED_STORE, //serialized single certificate CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT = 1 << ContentType.CERT_QUERY_CONTENT_SERIALIZED_CERT, //serialized single CTL CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL = 1 << ContentType.CERT_QUERY_CONTENT_SERIALIZED_CTL, //serialized single CRL CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL = 1 << ContentType.CERT_QUERY_CONTENT_SERIALIZED_CRL, //an encoded PKCS#7 signed message CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED = 1 << ContentType.CERT_QUERY_CONTENT_PKCS7_SIGNED, //an encoded PKCS#7 message. But it is not a signed message CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED = 1 << ContentType.CERT_QUERY_CONTENT_PKCS7_UNSIGNED, //the content includes an embedded PKCS7 signed message CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED = 1 << ContentType.CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED, //an encoded PKCS#10 CERT_QUERY_CONTENT_FLAG_PKCS10 = 1 << ContentType.CERT_QUERY_CONTENT_PKCS10, //an encoded PFX BLOB CERT_QUERY_CONTENT_FLAG_PFX = 1 << ContentType.CERT_QUERY_CONTENT_PFX, //an encoded CertificatePair (contains forward and/or reverse cross certs) CERT_QUERY_CONTENT_FLAG_CERT_PAIR = 1 << ContentType.CERT_QUERY_CONTENT_CERT_PAIR, //an encoded PFX BLOB, and we do want to load it (not included in //CERT_QUERY_CONTENT_FLAG_ALL) CERT_QUERY_CONTENT_FLAG_PFX_AND_LOAD = 1 << ContentType.CERT_QUERY_CONTENT_PFX_AND_LOAD, CERT_QUERY_CONTENT_FLAG_ALL = CERT_QUERY_CONTENT_FLAG_CERT | CERT_QUERY_CONTENT_FLAG_CTL | CERT_QUERY_CONTENT_FLAG_CRL | CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE | CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT | CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL | CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL | CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED | CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED | CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED | CERT_QUERY_CONTENT_FLAG_PKCS10 | CERT_QUERY_CONTENT_FLAG_PFX | CERT_QUERY_CONTENT_FLAG_CERT_PAIR, } [Flags] internal enum ExpectedFormatTypeFlags : int { CERT_QUERY_FORMAT_FLAG_BINARY = 1 << FormatType.CERT_QUERY_FORMAT_BINARY, CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED = 1 << FormatType.CERT_QUERY_FORMAT_BASE64_ENCODED, CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED = 1 << FormatType.CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED, CERT_QUERY_FORMAT_FLAG_ALL = CERT_QUERY_FORMAT_FLAG_BINARY | CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED | CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED, } internal enum CertEncodingType : int { PKCS_7_ASN_ENCODING = 0x10000, X509_ASN_ENCODING = 0x00001, All = PKCS_7_ASN_ENCODING | X509_ASN_ENCODING, } internal enum ContentType : int { //encoded single certificate CERT_QUERY_CONTENT_CERT = 1, //encoded single CTL CERT_QUERY_CONTENT_CTL = 2, //encoded single CRL CERT_QUERY_CONTENT_CRL = 3, //serialized store CERT_QUERY_CONTENT_SERIALIZED_STORE = 4, //serialized single certificate CERT_QUERY_CONTENT_SERIALIZED_CERT = 5, //serialized single CTL CERT_QUERY_CONTENT_SERIALIZED_CTL = 6, //serialized single CRL CERT_QUERY_CONTENT_SERIALIZED_CRL = 7, //a PKCS#7 signed message CERT_QUERY_CONTENT_PKCS7_SIGNED = 8, //a PKCS#7 message, such as enveloped message. But it is not a signed message, CERT_QUERY_CONTENT_PKCS7_UNSIGNED = 9, //a PKCS7 signed message embedded in a file CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED = 10, //an encoded PKCS#10 CERT_QUERY_CONTENT_PKCS10 = 11, //an encoded PFX BLOB CERT_QUERY_CONTENT_PFX = 12, //an encoded CertificatePair (contains forward and/or reverse cross certs) CERT_QUERY_CONTENT_CERT_PAIR = 13, //an encoded PFX BLOB, which was loaded to phCertStore CERT_QUERY_CONTENT_PFX_AND_LOAD = 14, } internal enum FormatType : int { CERT_QUERY_FORMAT_BINARY = 1, CERT_QUERY_FORMAT_BASE64_ENCODED = 2, CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED = 3, } // CRYPTOAPI_BLOB has many typedef aliases in the C++ world (CERT_BLOB, DATA_BLOB, etc.) We'll just stick to one name here. [StructLayout(LayoutKind.Sequential)] internal unsafe struct CRYPTOAPI_BLOB { public CRYPTOAPI_BLOB(int cbData, byte* pbData) { this.cbData = cbData; this.pbData = pbData; return; } public int cbData; public byte* pbData; public byte[] ToByteArray() { byte[] array = new byte[cbData]; Marshal.Copy((IntPtr)pbData, array, 0, cbData); return array; } } internal enum CertContextPropId : int { CERT_KEY_PROV_INFO_PROP_ID = 2, CERT_SHA1_HASH_PROP_ID = 3, CERT_FRIENDLY_NAME_PROP_ID = 11, CERT_ARCHIVED_PROP_ID = 19, CERT_KEY_IDENTIFIER_PROP_ID = 20, CERT_PUBKEY_ALG_PARA_PROP_ID = 22, CERT_DELETE_KEYSET_PROP_ID = 101, } [Flags] internal enum CertSetPropertyFlags : int { CERT_SET_PROPERTY_INHIBIT_PERSIST_FLAG = 0x40000000, None = 0x00000000, } internal enum CertNameType : int { CERT_NAME_EMAIL_TYPE = 1, CERT_NAME_RDN_TYPE = 2, CERT_NAME_ATTR_TYPE = 3, CERT_NAME_SIMPLE_DISPLAY_TYPE = 4, CERT_NAME_FRIENDLY_DISPLAY_TYPE = 5, CERT_NAME_DNS_TYPE = 6, CERT_NAME_URL_TYPE = 7, CERT_NAME_UPN_TYPE = 8, } [Flags] internal enum CertNameFlags : int { None = 0x00000000, CERT_NAME_ISSUER_FLAG = 0x00000001, } internal enum CertNameStringType : int { CERT_X500_NAME_STR = 3, CERT_NAME_STR_REVERSE_FLAG = 0x02000000, } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_CONTEXT { public CertEncodingType dwCertEncodingType; public byte* pbCertEncoded; public int cbCertEncoded; public CERT_INFO* pCertInfo; public IntPtr hCertStore; } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_INFO { public int dwVersion; public CRYPTOAPI_BLOB SerialNumber; public CRYPT_ALGORITHM_IDENTIFIER SignatureAlgorithm; public CRYPTOAPI_BLOB Issuer; public FILETIME NotBefore; public FILETIME NotAfter; public CRYPTOAPI_BLOB Subject; public CERT_PUBLIC_KEY_INFO SubjectPublicKeyInfo; public CRYPT_BIT_BLOB IssuerUniqueId; public CRYPT_BIT_BLOB SubjectUniqueId; public int cExtension; public CERT_EXTENSION* rgExtension; } [StructLayout(LayoutKind.Sequential)] internal struct CRYPT_ALGORITHM_IDENTIFIER { public IntPtr pszObjId; public CRYPTOAPI_BLOB Parameters; } [StructLayout(LayoutKind.Sequential)] internal struct CERT_PUBLIC_KEY_INFO { public CRYPT_ALGORITHM_IDENTIFIER Algorithm; public CRYPT_BIT_BLOB PublicKey; } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CRYPT_BIT_BLOB { public int cbData; public byte* pbData; public int cUnusedBits; public byte[] ToByteArray() { byte[] array = new byte[cbData]; Marshal.Copy((IntPtr)pbData, array, 0, cbData); return array; } } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_EXTENSION { public IntPtr pszObjId; public int fCritical; public CRYPTOAPI_BLOB Value; } [StructLayout(LayoutKind.Sequential)] internal struct FILETIME { private uint ftTimeLow; private uint ftTimeHigh; public DateTime ToDateTime() { long fileTime = (((long)ftTimeHigh) << 32) + ftTimeLow; return DateTime.FromFileTime(fileTime); } public static FILETIME FromDateTime(DateTime dt) { long fileTime = dt.ToFileTime(); return new FILETIME() { ftTimeLow = (uint)fileTime, ftTimeHigh = (uint)(fileTime >> 32), }; } } internal enum CertStoreProvider : int { CERT_STORE_PROV_MEMORY = 2, CERT_STORE_PROV_SYSTEM_W = 10, } [Flags] internal enum CertStoreFlags : int { CERT_STORE_NO_CRYPT_RELEASE_FLAG = 0x00000001, CERT_STORE_SET_LOCALIZED_NAME_FLAG = 0x00000002, CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG = 0x00000004, CERT_STORE_DELETE_FLAG = 0x00000010, CERT_STORE_UNSAFE_PHYSICAL_FLAG = 0x00000020, CERT_STORE_SHARE_STORE_FLAG = 0x00000040, CERT_STORE_SHARE_CONTEXT_FLAG = 0x00000080, CERT_STORE_MANIFOLD_FLAG = 0x00000100, CERT_STORE_ENUM_ARCHIVED_FLAG = 0x00000200, CERT_STORE_UPDATE_KEYID_FLAG = 0x00000400, CERT_STORE_BACKUP_RESTORE_FLAG = 0x00000800, CERT_STORE_READONLY_FLAG = 0x00008000, CERT_STORE_OPEN_EXISTING_FLAG = 0x00004000, CERT_STORE_CREATE_NEW_FLAG = 0x00002000, CERT_STORE_MAXIMUM_ALLOWED_FLAG = 0x00001000, CERT_SYSTEM_STORE_CURRENT_USER = 0x00010000, CERT_SYSTEM_STORE_LOCAL_MACHINE = 0x00020000, None = 0x00000000, } internal enum CertStoreAddDisposition : int { CERT_STORE_ADD_NEW = 1, CERT_STORE_ADD_USE_EXISTING = 2, CERT_STORE_ADD_REPLACE_EXISTING = 3, CERT_STORE_ADD_ALWAYS = 4, CERT_STORE_ADD_REPLACE_EXISTING_INHERIT_PROPERTIES = 5, CERT_STORE_ADD_NEWER = 6, CERT_STORE_ADD_NEWER_INHERIT_PROPERTIES = 7, } [Flags] internal enum PfxCertStoreFlags : int { CRYPT_EXPORTABLE = 0x00000001, CRYPT_USER_PROTECTED = 0x00000002, CRYPT_MACHINE_KEYSET = 0x00000020, CRYPT_USER_KEYSET = 0x00001000, PKCS12_PREFER_CNG_KSP = 0x00000100, PKCS12_ALWAYS_CNG_KSP = 0x00000200, PKCS12_ALLOW_OVERWRITE_KEY = 0x00004000, PKCS12_NO_PERSIST_KEY = 0x00008000, PKCS12_INCLUDE_EXTENDED_PROPERTIES = 0x00000010, None = 0x00000000, } internal enum CryptMessageParameterType : int { CMSG_SIGNER_COUNT_PARAM = 5, CMSG_SIGNER_INFO_PARAM = 6, } [StructLayout(LayoutKind.Sequential)] internal struct CMSG_SIGNER_INFO_Partial // This is not the full definition of CMSG_SIGNER_INFO. Only defining the part we use. { public int dwVersion; public CRYPTOAPI_BLOB Issuer; public CRYPTOAPI_BLOB SerialNumber; //... more fields follow ... } [Flags] internal enum CertFindFlags : int { None = 0x00000000, } internal enum CertFindType : int { CERT_FIND_SUBJECT_CERT = 0x000b0000, CERT_FIND_HASH = 0x00010000, CERT_FIND_SUBJECT_STR = 0x00080007, CERT_FIND_ISSUER_STR = 0x00080004, CERT_FIND_EXISTING = 0x000d0000, CERT_FIND_ANY = 0x00000000, } [Flags] internal enum PFXExportFlags : int { REPORT_NO_PRIVATE_KEY = 0x00000001, REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY = 0x00000002, EXPORT_PRIVATE_KEYS = 0x00000004, None = 0x00000000, } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CRYPT_KEY_PROV_INFO { public char* pwszContainerName; public char* pwszProvName; public int dwProvType; public CryptAcquireContextFlags dwFlags; public int cProvParam; public IntPtr rgProvParam; public int dwKeySpec; } [Flags] internal enum CryptAcquireContextFlags : int { CRYPT_DELETEKEYSET = 0x00000010, CRYPT_MACHINE_KEYSET = 0x00000020, None = 0x00000000, } [Flags] internal enum CertNameStrTypeAndFlags : int { CERT_SIMPLE_NAME_STR = 1, CERT_OID_NAME_STR = 2, CERT_X500_NAME_STR = 3, CERT_NAME_STR_SEMICOLON_FLAG = 0x40000000, CERT_NAME_STR_NO_PLUS_FLAG = 0x20000000, CERT_NAME_STR_NO_QUOTING_FLAG = 0x10000000, CERT_NAME_STR_CRLF_FLAG = 0x08000000, CERT_NAME_STR_COMMA_FLAG = 0x04000000, CERT_NAME_STR_REVERSE_FLAG = 0x02000000, CERT_NAME_STR_DISABLE_IE4_UTF8_FLAG = 0x00010000, CERT_NAME_STR_ENABLE_T61_UNICODE_FLAG = 0x00020000, CERT_NAME_STR_ENABLE_UTF8_UNICODE_FLAG = 0x00040000, CERT_NAME_STR_FORCE_UTF8_DIR_STR_FLAG = 0x00080000, } internal enum FormatObjectType : int { None = 0, } [Flags] internal enum FormatObjectStringType : int { CRYPT_FORMAT_STR_MULTI_LINE = 0x00000001, CRYPT_FORMAT_STR_NO_HEX = 0x00000010, None = 0x00000000, } internal enum FormatObjectStructType : int { X509_NAME = 7, } internal static class AlgId { public const int CALG_RSA_KEYX = 0xa400; public const int CALG_RSA_SIGN = 0x2400; public const int CALG_DSS_SIGN = 0x2200; public const int CALG_SHA1 = 0x8004; } [Flags] internal enum CryptDecodeObjectFlags : int { None = 0x00000000, } internal enum CryptDecodeObjectStructType : int { CNG_RSA_PUBLIC_KEY_BLOB = 72, X509_DSS_PUBLICKEY = 38, X509_DSS_PARAMETERS = 39, X509_KEY_USAGE = 14, X509_BASIC_CONSTRAINTS = 13, X509_BASIC_CONSTRAINTS2 = 15, X509_ENHANCED_KEY_USAGE = 36, X509_CERT_POLICIES = 16, X509_UNICODE_ANY_STRING = 24, X509_CERTIFICATE_TEMPLATE = 64, } [StructLayout(LayoutKind.Sequential)] internal struct CTL_USAGE { public int cUsageIdentifier; public IntPtr rgpszUsageIdentifier; } [StructLayout(LayoutKind.Sequential)] internal struct CERT_USAGE_MATCH { public CertUsageMatchType dwType; public CTL_USAGE Usage; } internal enum CertUsageMatchType : int { USAGE_MATCH_TYPE_AND = 0x00000000, USAGE_MATCH_TYPE_OR = 0x00000001, } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_CHAIN_PARA { public int cbSize; public CERT_USAGE_MATCH RequestedUsage; public CERT_USAGE_MATCH RequestedIssuancePolicy; public int dwUrlRetrievalTimeout; public int fCheckRevocationFreshnessTime; public int dwRevocationFreshnessTime; public FILETIME* pftCacheResync; public int pStrongSignPara; public int dwStrongSignFlags; } [Flags] internal enum CertChainFlags : int { None = 0x00000000, CERT_CHAIN_REVOCATION_CHECK_END_CERT = 0x10000000, CERT_CHAIN_REVOCATION_CHECK_CHAIN = 0x20000000, CERT_CHAIN_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT = 0x40000000, CERT_CHAIN_REVOCATION_CHECK_CACHE_ONLY = unchecked((int)0x80000000), } internal enum ChainEngine : int { HCCE_CURRENT_USER = 0x0, HCCE_LOCAL_MACHINE = 0x1, } [StructLayout(LayoutKind.Sequential)] internal struct CERT_DSS_PARAMETERS { public CRYPTOAPI_BLOB p; public CRYPTOAPI_BLOB q; public CRYPTOAPI_BLOB g; } internal enum PubKeyMagic : int { DSS_MAGIC = 0x31535344, } [StructLayout(LayoutKind.Sequential)] internal struct BLOBHEADER { public byte bType; public byte bVersion; public short reserved; public uint aiKeyAlg; }; [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_BASIC_CONSTRAINTS_INFO { public CRYPT_BIT_BLOB SubjectType; public int fPathLenConstraint; public int dwPathLenConstraint; public int cSubtreesConstraint; public CRYPTOAPI_BLOB* rgSubtreesConstraint; // PCERT_NAME_BLOB // SubjectType.pbData[0] can contain a CERT_CA_SUBJECT_FLAG that when set indicates that the certificate's subject can act as a CA public const byte CERT_CA_SUBJECT_FLAG = 0x80; }; [StructLayout(LayoutKind.Sequential)] internal struct CERT_BASIC_CONSTRAINTS2_INFO { public int fCA; public int fPathLenConstraint; public int dwPathLenConstraint; }; [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_ENHKEY_USAGE { public int cUsageIdentifier; public IntPtr* rgpszUsageIdentifier; // LPSTR* } internal enum CertStoreSaveAs : int { CERT_STORE_SAVE_AS_STORE = 1, CERT_STORE_SAVE_AS_PKCS7 = 2, } internal enum CertStoreSaveTo : int { CERT_STORE_SAVE_TO_MEMORY = 2, } [StructLayout(LayoutKind.Sequential)] internal struct CERT_POLICY_INFO { public IntPtr pszPolicyIdentifier; public int cPolicyQualifier; public IntPtr rgPolicyQualifier; } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_POLICIES_INFO { public int cPolicyInfo; public CERT_POLICY_INFO* rgPolicyInfo; } [StructLayout(LayoutKind.Sequential)] internal struct CERT_NAME_VALUE { public int dwValueType; public CRYPTOAPI_BLOB Value; } [StructLayout(LayoutKind.Sequential)] internal struct CERT_TEMPLATE_EXT { public IntPtr pszObjId; public int dwMajorVersion; public int fMinorVersion; public int dwMinorVersion; } [Flags] internal enum CertControlStoreFlags : int { None = 0x00000000, } internal enum CertControlStoreType : int { CERT_STORE_CTRL_AUTO_RESYNC = 4, } [Flags] internal enum CertTrustErrorStatus : int { CERT_TRUST_NO_ERROR = 0x00000000, CERT_TRUST_IS_NOT_TIME_VALID = 0x00000001, CERT_TRUST_IS_NOT_TIME_NESTED = 0x00000002, CERT_TRUST_IS_REVOKED = 0x00000004, CERT_TRUST_IS_NOT_SIGNATURE_VALID = 0x00000008, CERT_TRUST_IS_NOT_VALID_FOR_USAGE = 0x00000010, CERT_TRUST_IS_UNTRUSTED_ROOT = 0x00000020, CERT_TRUST_REVOCATION_STATUS_UNKNOWN = 0x00000040, CERT_TRUST_IS_CYCLIC = 0x00000080, CERT_TRUST_INVALID_EXTENSION = 0x00000100, CERT_TRUST_INVALID_POLICY_CONSTRAINTS = 0x00000200, CERT_TRUST_INVALID_BASIC_CONSTRAINTS = 0x00000400, CERT_TRUST_INVALID_NAME_CONSTRAINTS = 0x00000800, CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT = 0x00001000, CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT = 0x00002000, CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT = 0x00004000, CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT = 0x00008000, CERT_TRUST_IS_OFFLINE_REVOCATION = 0x01000000, CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY = 0x02000000, // These can be applied to chains only CERT_TRUST_IS_PARTIAL_CHAIN = 0x00010000, CERT_TRUST_CTL_IS_NOT_TIME_VALID = 0x00020000, CERT_TRUST_CTL_IS_NOT_SIGNATURE_VALID = 0x00040000, CERT_TRUST_CTL_IS_NOT_VALID_FOR_USAGE = 0x00080000, } [Flags] internal enum CertTrustInfoStatus : int { // These can be applied to certificates only CERT_TRUST_HAS_EXACT_MATCH_ISSUER = 0x00000001, CERT_TRUST_HAS_KEY_MATCH_ISSUER = 0x00000002, CERT_TRUST_HAS_NAME_MATCH_ISSUER = 0x00000004, CERT_TRUST_IS_SELF_SIGNED = 0x00000008, // These can be applied to certificates and chains CERT_TRUST_HAS_PREFERRED_ISSUER = 0x00000100, CERT_TRUST_HAS_ISSUANCE_CHAIN_POLICY = 0x00000200, CERT_TRUST_HAS_VALID_NAME_CONSTRAINTS = 0x00000400, // These can be applied to chains only CERT_TRUST_IS_COMPLEX_CHAIN = 0x00010000, } [StructLayout(LayoutKind.Sequential)] internal struct CERT_TRUST_STATUS { public CertTrustErrorStatus dwErrorStatus; public CertTrustInfoStatus dwInfoStatus; } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_CHAIN_ELEMENT { public int cbSize; public CERT_CONTEXT* pCertContext; public CERT_TRUST_STATUS TrustStatus; public IntPtr pRevocationInfo; public IntPtr pIssuanceUsage; public IntPtr pApplicationUsage; public IntPtr pwszExtendedErrorInfo; } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_SIMPLE_CHAIN { public int cbSize; public CERT_TRUST_STATUS TrustStatus; public int cElement; public CERT_CHAIN_ELEMENT** rgpElement; public IntPtr pTrustListInfo; // fHasRevocationFreshnessTime is only set if we are able to retrieve // revocation information for all elements checked for revocation. // For a CRL its CurrentTime - ThisUpdate. // // dwRevocationFreshnessTime is the largest time across all elements // checked. public int fHasRevocationFreshnessTime; public int dwRevocationFreshnessTime; // seconds } [StructLayout(LayoutKind.Sequential)] internal unsafe struct CERT_CHAIN_CONTEXT { public int cbSize; public CERT_TRUST_STATUS TrustStatus; public int cChain; public CERT_SIMPLE_CHAIN** rgpChain; // Following is returned when CERT_CHAIN_RETURN_LOWER_QUALITY_CONTEXTS // is set in dwFlags public int cLowerQualityChainContext; public CERT_CHAIN_CONTEXT** rgpLowerQualityChainContext; // fHasRevocationFreshnessTime is only set if we are able to retrieve // revocation information for all elements checked for revocation. // For a CRL its CurrentTime - ThisUpdate. // // dwRevocationFreshnessTime is the largest time across all elements // checked. public int fHasRevocationFreshnessTime; public int dwRevocationFreshnessTime; // seconds // Flags passed when created via CertGetCertificateChain public int dwCreateFlags; // Following is updated with unique Id when the chain context is logged. public Guid ChainId; } [Flags] internal enum FormatMessageFlags : int { FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000, FORMAT_MESSAGE_IGNORE_INSERTS = 0x00000200, } [StructLayout(LayoutKind.Sequential)] internal struct CERT_CHAIN_POLICY_PARA { public int cbSize; public int dwFlags; public IntPtr pvExtraPolicyPara; } [StructLayout(LayoutKind.Sequential)] internal struct CERT_CHAIN_POLICY_STATUS { public int cbSize; public int dwError; public IntPtr lChainIndex; public IntPtr lElementIndex; public IntPtr pvExtraPolicyStatus; } internal enum ChainPolicy : int { // Predefined verify chain policies CERT_CHAIN_POLICY_BASE = 1, } }
using NUnit.Framework; using System; using FilenameBuddy; using System.IO; using Shouldly; namespace FilenameBuddyTests { [TestFixture()] public class Test { [SetUp] public void Setup() { Filename.SetCurrentDirectory(Directory.GetCurrentDirectory() + @"\Content\"); } /// <summary> /// get the current working directory /// </summary> /// <returns>The location.</returns> string progLocation() { return Filename.ReplaceSlashes(Directory.GetCurrentDirectory() + @"\Content\"); } [Test()] public void StaticConstructor() { //get teh program location Assert.AreEqual(Directory.GetCurrentDirectory() + "\\", Filename.ProgramLocation); } [Test()] public void DefaultConstructor() { //default constructor = no filename Filename dude = new Filename(); Assert.IsTrue(string.IsNullOrEmpty(dude.File)); } [Test()] public void Constructor() { //set the filename in teh constructor Filename dude = new Filename("test"); dude.File.ShouldBe(progLocation() + @"test"); } [Test()] public void SetFilename() { //set the name and get it back Filename dude = new Filename(); dude.File = "test"; Assert.AreEqual("test", dude.File); } [Test()] public void SetAbsFilenameGetRelFilename() { //set the name and get it back Filename dude = new Filename(); dude.File = progLocation() + @"Buttnuts\test.txt"; Assert.AreEqual(@"Buttnuts\test.txt", dude.GetRelFilename()); } [Test()] public void SetRelFilename() { Filename dude = new Filename(); dude.SetRelFilename("test"); dude.File.ShouldBe(progLocation() + @"test"); } [Test()] public void SetRelFilename1() { Filename dude = new Filename(); dude.SetRelFilename(@"Buttnuts\test.txt"); dude.File.ShouldBe(progLocation() + @"Buttnuts\test.txt"); } [Test()] public void GetPath() { Filename dude = new Filename(); dude.SetRelFilename("test"); Assert.AreEqual(progLocation(), dude.GetPath()); } [Test()] public void GetPathWithExt() { Filename dude = new Filename(); dude.SetRelFilename("test.txt"); Assert.AreEqual(progLocation(), dude.GetPath()); } [Test()] public void GetPathWithSub() { Filename dude = new Filename(); dude.SetRelFilename("test.txt"); Assert.AreEqual(progLocation(), dude.GetPath()); } [Test()] public void GetRelPath() { Filename dude = new Filename(); dude.SetRelFilename(@"Buttnuts\test.txt"); Assert.AreEqual(@"Buttnuts\", dude.GetRelPath()); } [Test()] public void GetRelPath1() { Filename dude = new Filename(); dude.SetRelFilename(@"Buttnuts\assnuts\test.txt"); Assert.AreEqual(@"Buttnuts\assnuts\", dude.GetRelPath()); } [Test()] public void GetFilename() { Filename dude = new Filename(); dude.SetRelFilename(@"Content\Buttnuts\assnuts\test.txt"); Assert.AreEqual(@"test.txt", dude.GetFile()); } [Test()] public void GetFilename1() { Filename dude = new Filename(); dude.SetRelFilename(@"Content\Buttnuts\assnuts\test"); Assert.AreEqual(@"test", dude.GetFile()); } [Test()] public void GetFileExt() { Filename dude = new Filename(); dude.SetRelFilename(@"Content\Buttnuts\assnuts\test.txt"); Assert.AreEqual(@".txt", dude.GetFileExt()); } [Test()] public void GetFileExt1() { Filename dude = new Filename(); dude.SetRelFilename(@"Content\Buttnuts\assnuts\test"); Assert.AreEqual(@"", dude.GetFileExt()); } [Test()] public void GetFileNoExt() { Filename dude = new Filename(); dude.SetRelFilename(@"Content\Buttnuts\assnuts\test.txt"); Assert.AreEqual(@"test", dude.GetFileNoExt()); } [Test()] public void GetFileNoExtBreakIt() { Filename dude = new Filename(); dude.SetRelFilename(@"Content\Buttnuts\assnuts\test"); Assert.AreEqual(@"test", dude.GetFileNoExt()); } [Test()] public void GetPathFileNoExt() { Filename dude = new Filename(); string testFile = @"Buttnuts\assnuts\test.txt"; dude.SetRelFilename(testFile); Assert.AreEqual(progLocation() + @"Buttnuts\assnuts\test", dude.GetPathFileNoExt()); } [Test()] public void GetRelPathFileNoExt() { Filename dude = new Filename(); string testFile = @"Buttnuts\assnuts\test.txt"; dude.SetRelFilename(testFile); Assert.AreEqual(@"Buttnuts\assnuts\test", dude.GetRelPathFileNoExt()); } [Test()] public void GetRelFilename() { Filename dude = new Filename(); string testFile = @"Buttnuts\assnuts\test.txt"; dude.SetRelFilename(testFile); Assert.AreEqual(@"Buttnuts\assnuts\test.txt", dude.GetRelFilename()); } [Test()] public void SetCurrentDirectory() { Filename.SetCurrentDirectory(@"c:assnuts\shitass\Content\poopstains"); Filename dude = new Filename(); string testFile = @"Buttnuts\assnuts\test.txt"; dude.SetRelFilename(testFile); dude.File.ShouldBe(Filename.ReplaceSlashes(@"c:assnuts/shitass/Content/Buttnuts/assnuts/test.txt")); } [Test()] public void GetRelFilename1() { Filename dude = new Filename(); string testFile = @"test.txt"; dude.SetRelFilename(testFile); Assert.AreEqual(@"test.txt", dude.GetRelFilename()); } [Test()] public void GetRelFilename2() { Filename dude = new Filename(); string testFile = @"test.txt"; dude.SetRelFilename(testFile); Assert.AreEqual(@"test.txt", dude.GetRelFilename()); } [Test()] public void FilenameNoExt() { Filename dude = new Filename(); string testFile = @"test.txt"; dude.SetRelFilename(testFile); Assert.AreEqual(@"test", dude.GetFileNoExt()); } [Test()] public void FilenameNoExt1() { Filename dude = new Filename(); string testFile = @"windows.xna\test.txt"; dude.SetRelFilename(testFile); Assert.AreEqual(@"test", dude.GetFileNoExt()); } [Test()] public void GetExtension() { Filename dude = new Filename(); string testFile = @"windows.xna\test.longextension"; dude.SetRelFilename(testFile); Assert.AreEqual(@".longextension", dude.GetFileExt()); } [Test] public void Comparison() { var dude1 = new Filename("dude"); var dude2 = new Filename("dude"); Assert.IsTrue(dude1.Compare(dude2)); } [Test] public void Comparison_false() { var dude1 = new Filename("dude"); var dude2 = new Filename("cat"); Assert.IsFalse(dude1.Compare(dude2)); } [Test] public void HasFilename1() { var dude = new Filename(); dude.HasFilename.ShouldBeFalse(); } [Test] public void HasFilename2() { var dude = new Filename("dude"); dude.HasFilename.ShouldBeTrue(); } [Test] public void HasFilename3() { var dude = new Filename(); dude.File = "dude"; dude.HasFilename.ShouldBeTrue(); } [Test] public void HasFilename4() { var dude1 = new Filename("dude"); var dude2 = new Filename(dude1); dude2.HasFilename.ShouldBeTrue(); } [Test] public void HasFilename5() { var dude1 = new Filename(); var dude2 = new Filename(dude1); dude2.HasFilename.ShouldBeFalse(); } [Test] public void HasFilename6() { var dude1 = new Filename(); dude1.SetRelFilename("dude"); dude1.HasFilename.ShouldBeTrue(); } [Test] public void SetFromRelativeFilename() { Filename originalLocation = new Filename(); string testFile = @"Buttnuts\assnuts\test.txt"; originalLocation.SetRelFilename(testFile); var secondFilename = new Filename(originalLocation, "catpants\\cat.png"); secondFilename.GetRelFilename().ShouldBe(@"Buttnuts\assnuts\catpants\cat.png"); } [TestCase(@"test1\test.txt", @"test2.txt", @"test1\test2.txt")] [TestCase(@"test1\test.txt", @"test3\test2.txt", @"test1\test3\test2.txt")] [TestCase(@"test1\test2\test3.txt", @"..\test4\test5.txt", @"test1\test4\test5.txt")] public void SetFilenameRelativeToPath(string original, string target, string expectedResult) { var originalFilename = new Filename(original); var targetFilename = new Filename(); targetFilename.SetFilenameRelativeToPath(originalFilename, target); targetFilename.GetRelFilename().ShouldBe(expectedResult); } [TestCase(@"test1\test.txt", @"test1\test2.txt", @"test2.txt")] [TestCase(@"test1\test.txt", @"test1\test3\test2.txt", @"test3\test2.txt")] [TestCase(@"test1\test2\test3.txt", @"test1\test4\test5.txt", @"..\test4\test5.txt")] public void GetFilenameRelativeToPath(string original, string target, string expectedResult) { var originalFilename = new Filename(original); var targetFilename = new Filename(target); targetFilename.GetFilenameRelativeToPath(originalFilename).ShouldBe(expectedResult); } [Test] public void SetFilenameRelativeToPath_fullPAth() { var originalFilename = new Filename(@"test1\test.txt"); var targetFilename = new Filename(); targetFilename.SetFilenameRelativeToPath(originalFilename, @"test2.txt"); var expectedResult = $@"{Filename.ProgramLocation}Content\test1\test2.txt"; targetFilename.File.ShouldBe(expectedResult); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System; using System.Collections.Generic; using System.Globalization; using System.Runtime.Tests.Common; using Xunit; public static class ByteTests { [Fact] public static void TestCtorEmpty() { byte i = new byte(); Assert.Equal(0, i); } [Fact] public static void TestCtorValue() { byte i = 41; Assert.Equal(41, i); } [Fact] public static void TestMaxValue() { Assert.Equal(0xFF, byte.MaxValue); } [Fact] public static void TestMinValue() { Assert.Equal(0, byte.MinValue); } [Theory] [InlineData((byte)234, 0)] [InlineData(byte.MinValue, 1)] [InlineData((byte)0, 1)] [InlineData((byte)45, 1)] [InlineData((byte)123, 1)] [InlineData((byte)235, -1)] [InlineData(byte.MaxValue, -1)] public static void TestCompareTo(byte value, int expected) { byte i = 234; int result = CompareHelper.NormalizeCompare(i.CompareTo(value)); Assert.Equal(expected, result); } [Theory] [InlineData(null, 1)] [InlineData((byte)234, 0)] [InlineData(byte.MinValue, 1)] [InlineData((byte)0, 1)] [InlineData((byte)45, 1)] [InlineData((byte)123, 1)] [InlineData((byte)235, -1)] [InlineData(byte.MaxValue, -1)] public static void TestCompareToObject(object obj, int expected) { IComparable comparable = (byte)234; int i = CompareHelper.NormalizeCompare(comparable.CompareTo(obj)); Assert.Equal(expected, i); } [Fact] public static void TestCompareToObjectInvalid() { IComparable comparable = (byte)234; Assert.Throws<ArgumentException>(null, () => comparable.CompareTo("a")); //Obj is not a byte } [Theory] [InlineData((byte)78, true)] [InlineData((byte)0, false)] public static void TestEqualsObject(object obj, bool expected) { byte i = 78; Assert.Equal(expected, i.Equals(obj)); } [Theory] [InlineData((byte)78, true)] [InlineData((byte)0, false)] public static void TestEquals(byte i2, bool expected) { byte i = 78; Assert.Equal(expected, i.Equals(i2)); } [Fact] public static void TestGetHashCode() { byte i1 = 123; byte i2 = 65; Assert.NotEqual(0, i1.GetHashCode()); Assert.NotEqual(i1.GetHashCode(), i2.GetHashCode()); } [Fact] public static void TestToString() { byte i1 = 63; Assert.Equal("63", i1.ToString()); } [Fact] public static void TestToStringFormatProvider() { var numberFormat = new NumberFormatInfo(); byte i1 = 63; Assert.Equal("63", i1.ToString(numberFormat)); } [Fact] public static void TestToStringFormat() { byte i1 = 63; Assert.Equal("63", i1.ToString("G")); byte i2 = 82; Assert.Equal("82", i2.ToString("g")); byte i3 = 246; Assert.Equal(string.Format("{0:N}", 246.00), i3.ToString("N")); byte i4 = 0x24; Assert.Equal("24", i4.ToString("x")); } [Fact] public static void TestToStringFormatFormatProvider() { var numberFormat = new NumberFormatInfo(); byte i1 = 63; Assert.Equal("63", i1.ToString("G", numberFormat)); byte i2 = 82; Assert.Equal("82", i2.ToString("g", numberFormat)); numberFormat.NegativeSign = "xx"; // setting it to trash to make sure it doesn't show up numberFormat.NumberGroupSeparator = "*"; numberFormat.NumberNegativePattern = 0; numberFormat.NumberDecimalSeparator = "."; byte i3 = 24; Assert.Equal("24.00", i3.ToString("N", numberFormat)); } public static IEnumerable<object[]> ParseValidData() { NumberFormatInfo defaultFormat = null; NumberStyles defaultStyle = NumberStyles.Integer; var emptyNfi = new NumberFormatInfo(); var testNfi = new NumberFormatInfo(); testNfi.CurrencySymbol = "$"; yield return new object[] { "0", defaultStyle, defaultFormat, (byte)0 }; yield return new object[] { "123", defaultStyle, defaultFormat, (byte)123 }; yield return new object[] { " 123 ", defaultStyle, defaultFormat, (byte)123 }; yield return new object[] { "255", defaultStyle, defaultFormat, (byte)255 }; yield return new object[] { "12", NumberStyles.HexNumber, defaultFormat, (byte)0x12 }; yield return new object[] { "10", NumberStyles.AllowThousands, defaultFormat, (byte)10 }; yield return new object[] { "123", defaultStyle, emptyNfi, (byte)123 }; yield return new object[] { "123", NumberStyles.Any, emptyNfi, (byte)123 }; yield return new object[] { "12", NumberStyles.HexNumber, emptyNfi, (byte)0x12 }; yield return new object[] { "ab", NumberStyles.HexNumber, emptyNfi, (byte)0xab }; yield return new object[] { "$100", NumberStyles.Currency, testNfi, (byte)100 }; } public static IEnumerable<object[]> ParseInvalidData() { NumberFormatInfo defaultFormat = null; NumberStyles defaultStyle = NumberStyles.Integer; var emptyNfi = new NumberFormatInfo(); var testNfi = new NumberFormatInfo(); testNfi.CurrencySymbol = "$"; testNfi.NumberDecimalSeparator = "."; yield return new object[] { null, defaultStyle, defaultFormat, typeof(ArgumentNullException) }; yield return new object[] { "", defaultStyle, defaultFormat, typeof(FormatException) }; yield return new object[] { " ", defaultStyle, defaultFormat, typeof(FormatException) }; yield return new object[] { "Garbage", defaultStyle, defaultFormat, typeof(FormatException) }; yield return new object[] { "ab", defaultStyle, defaultFormat, typeof(FormatException) }; // Hex value yield return new object[] { "1E23", defaultStyle, defaultFormat, typeof(FormatException) }; // Exponent yield return new object[] { "(123)", defaultStyle, defaultFormat, typeof(FormatException) }; // Parentheses yield return new object[] { 100.ToString("C0"), defaultStyle, defaultFormat, typeof(FormatException) }; //Currency yield return new object[] { 1000.ToString("N0"), defaultStyle, defaultFormat, typeof(FormatException) }; //Thousands yield return new object[] { 67.90.ToString("F2"), defaultStyle, defaultFormat, typeof(FormatException) }; //Decimal yield return new object[] { "ab", NumberStyles.None, defaultFormat, typeof(FormatException) }; // Negative hex value yield return new object[] { " 123 ", NumberStyles.None, defaultFormat, typeof(FormatException) }; // Trailing and leading whitespace yield return new object[] { "67.90", defaultStyle, testNfi, typeof(FormatException) }; // Decimal yield return new object[] { "-1", defaultStyle, defaultFormat, typeof(OverflowException) }; // < min value yield return new object[] { "256", defaultStyle, defaultFormat, typeof(OverflowException) }; // > max value yield return new object[] { "(123)", NumberStyles.AllowParentheses, defaultFormat, typeof(OverflowException) }; // Parentheses = negative } [Theory, MemberData("ParseValidData")] public static void TestParse(string value, NumberStyles style, NumberFormatInfo nfi, byte expected) { byte i; //If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.Equal(true, byte.TryParse(value, out i)); Assert.Equal(expected, i); Assert.Equal(expected, byte.Parse(value)); //If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (nfi != null) { Assert.Equal(expected, byte.Parse(value, nfi)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.Equal(true, byte.TryParse(value, style, nfi ?? new NumberFormatInfo(), out i)); Assert.Equal(expected, i); //If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (nfi == null) { Assert.Equal(expected, byte.Parse(value, style)); } Assert.Equal(expected, byte.Parse(value, style, nfi ?? new NumberFormatInfo())); } [Theory, MemberData("ParseInvalidData")] public static void TestParseInvalid(string value, NumberStyles style, NumberFormatInfo nfi, Type exceptionType) { byte i; //If no style is specified, use the (String) or (String, IFormatProvider) overload if (style == NumberStyles.Integer) { Assert.Equal(false, byte.TryParse(value, out i)); Assert.Equal(default(byte), i); Assert.Throws(exceptionType, () => byte.Parse(value)); //If a format provider is specified, but the style is the default, use the (String, IFormatProvider) overload if (nfi != null) { Assert.Throws(exceptionType, () => byte.Parse(value, nfi)); } } // If a format provider isn't specified, test the default one, using a new instance of NumberFormatInfo Assert.Equal(false, byte.TryParse(value, style, nfi ?? new NumberFormatInfo(), out i)); Assert.Equal(default(byte), i); //If a format provider isn't specified, test the default one, using the (String, NumberStyles) overload if (nfi == null) { Assert.Throws(exceptionType, () => byte.Parse(value, style)); } Assert.Throws(exceptionType, () => byte.Parse(value, style, nfi ?? new NumberFormatInfo())); } }
#region License // Copyright (c) 2007 James Newton-King // // Permission is hereby granted, free of charge, to any person // obtaining a copy of this software and associated documentation // files (the "Software"), to deal in the Software without // restriction, including without limitation the rights to use, // copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following // conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES // OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT // HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, // WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR // OTHER DEALINGS IN THE SOFTWARE. #endregion using System; using System.Collections.Generic; using System.Diagnostics; #if !(NET20 || NET35 || PORTABLE40 || PORTABLE) using System.Numerics; #endif using System.Text; using System.IO; using System.Xml; using System.Globalization; using Newtonsoft.Json.Utilities; namespace Newtonsoft.Json { internal enum ReadType { Read, ReadAsInt32, ReadAsBytes, ReadAsString, ReadAsDecimal, ReadAsDateTime, #if !NET20 ReadAsDateTimeOffset #endif } /// <summary> /// Represents a reader that provides fast, non-cached, forward-only access to JSON text data. /// </summary> public class JsonTextReader : JsonReader, IJsonLineInfo { private const char UnicodeReplacementChar = '\uFFFD'; private readonly TextReader _reader; private char[] _chars; private int _charsUsed; private int _charPos; private int _lineStartPos; private int _lineNumber; private bool _isEndOfFile; private StringBuffer _buffer; private StringReference _stringReference; /// <summary> /// Initializes a new instance of the <see cref="JsonReader"/> class with the specified <see cref="TextReader"/>. /// </summary> /// <param name="reader">The <c>TextReader</c> containing the XML data to read.</param> public JsonTextReader(TextReader reader) { if (reader == null) throw new ArgumentNullException("reader"); _reader = reader; _lineNumber = 1; _chars = new char[1025]; } #if DEBUG internal void SetCharBuffer(char[] chars) { _chars = chars; } #endif private StringBuffer GetBuffer() { if (_buffer == null) { _buffer = new StringBuffer(1025); } else { _buffer.Position = 0; } return _buffer; } private void OnNewLine(int pos) { _lineNumber++; _lineStartPos = pos - 1; } private void ParseString(char quote) { _charPos++; ShiftBufferIfNeeded(); ReadStringIntoBuffer(quote); if (_readType == ReadType.ReadAsBytes) { byte[] data; if (_stringReference.Length == 0) { data = new byte[0]; } else { data = Convert.FromBase64CharArray(_stringReference.Chars, _stringReference.StartIndex, _stringReference.Length); } SetToken(JsonToken.Bytes, data); } else if (_readType == ReadType.ReadAsString) { string text = _stringReference.ToString(); SetToken(JsonToken.String, text); _quoteChar = quote; } else { string text = _stringReference.ToString(); if (_dateParseHandling != DateParseHandling.None) { DateParseHandling dateParseHandling; if (_readType == ReadType.ReadAsDateTime) dateParseHandling = DateParseHandling.DateTime; #if !NET20 else if (_readType == ReadType.ReadAsDateTimeOffset) dateParseHandling = DateParseHandling.DateTimeOffset; #endif else dateParseHandling = _dateParseHandling; object dt; if (DateTimeUtils.TryParseDateTime(text, dateParseHandling, DateTimeZoneHandling, out dt)) { SetToken(JsonToken.Date, dt); return; } } SetToken(JsonToken.String, text); _quoteChar = quote; } } private static void BlockCopyChars(char[] src, int srcOffset, char[] dst, int dstOffset, int count) { const int charByteCount = 2; Buffer.BlockCopy(src, srcOffset * charByteCount, dst, dstOffset * charByteCount, count * charByteCount); } private void ShiftBufferIfNeeded() { // once in the last 10% of the buffer shift the remainling content to the start to avoid // unnessesarly increasing the buffer size when reading numbers/strings int length = _chars.Length; if (length - _charPos <= length * 0.1) { int count = _charsUsed - _charPos; if (count > 0) BlockCopyChars(_chars, _charPos, _chars, 0, count); _lineStartPos -= _charPos; _charPos = 0; _charsUsed = count; _chars[_charsUsed] = '\0'; } } private int ReadData(bool append) { return ReadData(append, 0); } private int ReadData(bool append, int charsRequired) { if (_isEndOfFile) return 0; // char buffer is full if (_charsUsed + charsRequired >= _chars.Length - 1) { if (append) { // copy to new array either double the size of the current or big enough to fit required content int newArrayLength = Math.Max(_chars.Length * 2, _charsUsed + charsRequired + 1); // increase the size of the buffer char[] dst = new char[newArrayLength]; BlockCopyChars(_chars, 0, dst, 0, _chars.Length); _chars = dst; } else { int remainingCharCount = _charsUsed - _charPos; if (remainingCharCount + charsRequired + 1 >= _chars.Length) { // the remaining count plus the required is bigger than the current buffer size char[] dst = new char[remainingCharCount + charsRequired + 1]; if (remainingCharCount > 0) BlockCopyChars(_chars, _charPos, dst, 0, remainingCharCount); _chars = dst; } else { // copy any remaining data to the beginning of the buffer if needed and reset positions if (remainingCharCount > 0) BlockCopyChars(_chars, _charPos, _chars, 0, remainingCharCount); } _lineStartPos -= _charPos; _charPos = 0; _charsUsed = remainingCharCount; } } int attemptCharReadCount = _chars.Length - _charsUsed - 1; int charsRead = _reader.Read(_chars, _charsUsed, attemptCharReadCount); _charsUsed += charsRead; if (charsRead == 0) _isEndOfFile = true; _chars[_charsUsed] = '\0'; return charsRead; } private bool EnsureChars(int relativePosition, bool append) { if (_charPos + relativePosition >= _charsUsed) return ReadChars(relativePosition, append); return true; } private bool ReadChars(int relativePosition, bool append) { if (_isEndOfFile) return false; int charsRequired = _charPos + relativePosition - _charsUsed + 1; int totalCharsRead = 0; // it is possible that the TextReader doesn't return all data at once // repeat read until the required text is returned or the reader is out of content do { int charsRead = ReadData(append, charsRequired - totalCharsRead); // no more content if (charsRead == 0) break; totalCharsRead += charsRead; } while (totalCharsRead < charsRequired); if (totalCharsRead < charsRequired) return false; return true; } /// <summary> /// Reads the next JSON token from the stream. /// </summary> /// <returns> /// true if the next token was read successfully; false if there are no more tokens to read. /// </returns> [DebuggerStepThrough] public override bool Read() { _readType = ReadType.Read; if (!ReadInternal()) { SetToken(JsonToken.None); return false; } return true; } /// <summary> /// Reads the next JSON token from the stream as a <see cref="T:Byte[]"/>. /// </summary> /// <returns> /// A <see cref="T:Byte[]"/> or a null reference if the next JSON token is null. This method will return <c>null</c> at the end of an array. /// </returns> public override byte[] ReadAsBytes() { return ReadAsBytesInternal(); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{Decimal}"/>. /// </summary> /// <returns>A <see cref="Nullable{Decimal}"/>. This method will return <c>null</c> at the end of an array.</returns> public override decimal? ReadAsDecimal() { return ReadAsDecimalInternal(); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{Int32}"/>. /// </summary> /// <returns>A <see cref="Nullable{Int32}"/>. This method will return <c>null</c> at the end of an array.</returns> public override int? ReadAsInt32() { return ReadAsInt32Internal(); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="String"/>. /// </summary> /// <returns>A <see cref="String"/>. This method will return <c>null</c> at the end of an array.</returns> public override string ReadAsString() { return ReadAsStringInternal(); } /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{DateTime}"/>. /// </summary> /// <returns>A <see cref="String"/>. This method will return <c>null</c> at the end of an array.</returns> public override DateTime? ReadAsDateTime() { return ReadAsDateTimeInternal(); } #if !NET20 /// <summary> /// Reads the next JSON token from the stream as a <see cref="Nullable{DateTimeOffset}"/>. /// </summary> /// <returns>A <see cref="DateTimeOffset"/>. This method will return <c>null</c> at the end of an array.</returns> public override DateTimeOffset? ReadAsDateTimeOffset() { return ReadAsDateTimeOffsetInternal(); } #endif internal override bool ReadInternal() { while (true) { switch (_currentState) { case State.Start: case State.Property: case State.Array: case State.ArrayStart: case State.Constructor: case State.ConstructorStart: return ParseValue(); case State.Complete: break; case State.Object: case State.ObjectStart: return ParseObject(); case State.PostValue: // returns true if it hits // end of object or array if (ParsePostValue()) return true; break; case State.Finished: if (EnsureChars(0, false)) { EatWhitespace(false); if (_isEndOfFile) { return false; } if (_chars[_charPos] == '/') { ParseComment(); return true; } else { throw JsonReaderException.Create(this, "Additional text encountered after finished reading JSON content: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); } } return false; case State.Closed: break; case State.Error: break; default: throw JsonReaderException.Create(this, "Unexpected state: {0}.".FormatWith(CultureInfo.InvariantCulture, CurrentState)); } } } private void ReadStringIntoBuffer(char quote) { int charPos = _charPos; int initialPosition = _charPos; int lastWritePosition = _charPos; StringBuffer buffer = null; while (true) { switch (_chars[charPos++]) { case '\0': if (_charsUsed == charPos - 1) { charPos--; if (ReadData(true) == 0) { _charPos = charPos; throw JsonReaderException.Create(this, "Unterminated string. Expected delimiter: {0}.".FormatWith(CultureInfo.InvariantCulture, quote)); } } break; case '\\': _charPos = charPos; if (!EnsureChars(0, true)) { _charPos = charPos; throw JsonReaderException.Create(this, "Unterminated string. Expected delimiter: {0}.".FormatWith(CultureInfo.InvariantCulture, quote)); } // start of escape sequence int escapeStartPos = charPos - 1; char currentChar = _chars[charPos]; char writeChar; switch (currentChar) { case 'b': charPos++; writeChar = '\b'; break; case 't': charPos++; writeChar = '\t'; break; case 'n': charPos++; writeChar = '\n'; break; case 'f': charPos++; writeChar = '\f'; break; case 'r': charPos++; writeChar = '\r'; break; case '\\': charPos++; writeChar = '\\'; break; case '"': case '\'': case '/': writeChar = currentChar; charPos++; break; case 'u': charPos++; _charPos = charPos; writeChar = ParseUnicode(); if (StringUtils.IsLowSurrogate(writeChar)) { // low surrogate with no preceding high surrogate; this char is replaced writeChar = UnicodeReplacementChar; } else if (StringUtils.IsHighSurrogate(writeChar)) { bool anotherHighSurrogate; // loop for handling situations where there are multiple consecutive high surrogates do { anotherHighSurrogate = false; // potential start of a surrogate pair if (EnsureChars(2, true) && _chars[_charPos] == '\\' && _chars[_charPos + 1] == 'u') { char highSurrogate = writeChar; _charPos += 2; writeChar = ParseUnicode(); if (StringUtils.IsLowSurrogate(writeChar)) { // a valid surrogate pair! } else if (StringUtils.IsHighSurrogate(writeChar)) { // another high surrogate; replace current and start check over highSurrogate = UnicodeReplacementChar; anotherHighSurrogate = true; } else { // high surrogate not followed by low surrogate; original char is replaced highSurrogate = UnicodeReplacementChar; } if (buffer == null) buffer = GetBuffer(); WriteCharToBuffer(buffer, highSurrogate, lastWritePosition, escapeStartPos); lastWritePosition = _charPos; } else { // there are not enough remaining chars for the low surrogate or is not follow by unicode sequence // replace high surrogate and continue on as usual writeChar = UnicodeReplacementChar; } } while (anotherHighSurrogate); } charPos = _charPos; break; default: charPos++; _charPos = charPos; throw JsonReaderException.Create(this, "Bad JSON escape sequence: {0}.".FormatWith(CultureInfo.InvariantCulture, @"\" + currentChar)); } if (buffer == null) buffer = GetBuffer(); WriteCharToBuffer(buffer, writeChar, lastWritePosition, escapeStartPos); lastWritePosition = charPos; break; case StringUtils.CarriageReturn: _charPos = charPos - 1; ProcessCarriageReturn(true); charPos = _charPos; break; case StringUtils.LineFeed: _charPos = charPos - 1; ProcessLineFeed(); charPos = _charPos; break; case '"': case '\'': if (_chars[charPos - 1] == quote) { charPos--; if (initialPosition == lastWritePosition) { _stringReference = new StringReference(_chars, initialPosition, charPos - initialPosition); } else { if (buffer == null) buffer = GetBuffer(); if (charPos > lastWritePosition) buffer.Append(_chars, lastWritePosition, charPos - lastWritePosition); _stringReference = new StringReference(buffer.GetInternalBuffer(), 0, buffer.Position); } charPos++; _charPos = charPos; return; } break; } } } private void WriteCharToBuffer(StringBuffer buffer, char writeChar, int lastWritePosition, int writeToPosition) { if (writeToPosition > lastWritePosition) { buffer.Append(_chars, lastWritePosition, writeToPosition - lastWritePosition); } buffer.Append(writeChar); } private char ParseUnicode() { char writeChar; if (EnsureChars(4, true)) { string hexValues = new string(_chars, _charPos, 4); char hexChar = Convert.ToChar(int.Parse(hexValues, NumberStyles.HexNumber, NumberFormatInfo.InvariantInfo)); writeChar = hexChar; _charPos += 4; } else { throw JsonReaderException.Create(this, "Unexpected end while parsing unicode character."); } return writeChar; } private void ReadNumberIntoBuffer() { int charPos = _charPos; while (true) { switch (_chars[charPos++]) { case '\0': if (_charsUsed == charPos - 1) { charPos--; _charPos = charPos; if (ReadData(true) == 0) return; } else { _charPos = charPos - 1; return; } break; case '-': case '+': case 'a': case 'A': case 'b': case 'B': case 'c': case 'C': case 'd': case 'D': case 'e': case 'E': case 'f': case 'F': case 'x': case 'X': case '.': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': break; default: _charPos = charPos - 1; return; } } } private void ClearRecentString() { if (_buffer != null) _buffer.Position = 0; _stringReference = new StringReference(); } private bool ParsePostValue() { while (true) { char currentChar = _chars[_charPos]; switch (currentChar) { case '\0': if (_charsUsed == _charPos) { if (ReadData(false) == 0) { _currentState = State.Finished; return false; } } else { _charPos++; } break; case '}': _charPos++; SetToken(JsonToken.EndObject); return true; case ']': _charPos++; SetToken(JsonToken.EndArray); return true; case ')': _charPos++; SetToken(JsonToken.EndConstructor); return true; case '/': ParseComment(); return true; case ',': _charPos++; // finished parsing SetStateBasedOnCurrent(); return false; case ' ': case StringUtils.Tab: // eat _charPos++; break; case StringUtils.CarriageReturn: ProcessCarriageReturn(false); break; case StringUtils.LineFeed: ProcessLineFeed(); break; default: if (char.IsWhiteSpace(currentChar)) { // eat _charPos++; } else { throw JsonReaderException.Create(this, "After parsing a value an unexpected character was encountered: {0}.".FormatWith(CultureInfo.InvariantCulture, currentChar)); } break; } } } private bool ParseObject() { while (true) { char currentChar = _chars[_charPos]; switch (currentChar) { case '\0': if (_charsUsed == _charPos) { if (ReadData(false) == 0) return false; } else { _charPos++; } break; case '}': SetToken(JsonToken.EndObject); _charPos++; return true; case '/': ParseComment(); return true; case StringUtils.CarriageReturn: ProcessCarriageReturn(false); break; case StringUtils.LineFeed: ProcessLineFeed(); break; case ' ': case StringUtils.Tab: // eat _charPos++; break; default: if (char.IsWhiteSpace(currentChar)) { // eat _charPos++; } else { return ParseProperty(); } break; } } } private bool ParseProperty() { char firstChar = _chars[_charPos]; char quoteChar; if (firstChar == '"' || firstChar == '\'') { _charPos++; quoteChar = firstChar; ShiftBufferIfNeeded(); ReadStringIntoBuffer(quoteChar); } else if (ValidIdentifierChar(firstChar)) { quoteChar = '\0'; ShiftBufferIfNeeded(); ParseUnquotedProperty(); } else { throw JsonReaderException.Create(this, "Invalid property identifier character: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); } string propertyName = _stringReference.ToString(); EatWhitespace(false); if (_chars[_charPos] != ':') throw JsonReaderException.Create(this, "Invalid character after parsing property name. Expected ':' but got: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); _charPos++; SetToken(JsonToken.PropertyName, propertyName); _quoteChar = quoteChar; ClearRecentString(); return true; } private bool ValidIdentifierChar(char value) { return (char.IsLetterOrDigit(value) || value == '_' || value == '$'); } private void ParseUnquotedProperty() { int initialPosition = _charPos; // parse unquoted property name until whitespace or colon while (true) { switch (_chars[_charPos]) { case '\0': if (_charsUsed == _charPos) { if (ReadData(true) == 0) throw JsonReaderException.Create(this, "Unexpected end while parsing unquoted property name."); break; } _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition); return; default: char currentChar = _chars[_charPos]; if (ValidIdentifierChar(currentChar)) { _charPos++; break; } else if (char.IsWhiteSpace(currentChar) || currentChar == ':') { _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition); return; } throw JsonReaderException.Create(this, "Invalid JavaScript property identifier character: {0}.".FormatWith(CultureInfo.InvariantCulture, currentChar)); } } } private bool ParseValue() { while (true) { char currentChar = _chars[_charPos]; switch (currentChar) { case '\0': if (_charsUsed == _charPos) { if (ReadData(false) == 0) return false; } else { _charPos++; } break; case '"': case '\'': ParseString(currentChar); return true; case 't': ParseTrue(); return true; case 'f': ParseFalse(); return true; case 'n': if (EnsureChars(1, true)) { char next = _chars[_charPos + 1]; if (next == 'u') ParseNull(); else if (next == 'e') ParseConstructor(); else throw JsonReaderException.Create(this, "Unexpected character encountered while parsing value: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); } else { throw JsonReaderException.Create(this, "Unexpected end."); } return true; case 'N': ParseNumberNaN(); return true; case 'I': ParseNumberPositiveInfinity(); return true; case '-': if (EnsureChars(1, true) && _chars[_charPos + 1] == 'I') ParseNumberNegativeInfinity(); else ParseNumber(); return true; case '/': ParseComment(); return true; case 'u': ParseUndefined(); return true; case '{': _charPos++; SetToken(JsonToken.StartObject); return true; case '[': _charPos++; SetToken(JsonToken.StartArray); return true; case ']': _charPos++; SetToken(JsonToken.EndArray); return true; case ',': // don't increment position, the next call to read will handle comma // this is done to handle multiple empty comma values SetToken(JsonToken.Undefined); return true; case ')': _charPos++; SetToken(JsonToken.EndConstructor); return true; case StringUtils.CarriageReturn: ProcessCarriageReturn(false); break; case StringUtils.LineFeed: ProcessLineFeed(); break; case ' ': case StringUtils.Tab: // eat _charPos++; break; default: if (char.IsWhiteSpace(currentChar)) { // eat _charPos++; break; } else if (char.IsNumber(currentChar) || currentChar == '-' || currentChar == '.') { ParseNumber(); return true; } else { throw JsonReaderException.Create(this, "Unexpected character encountered while parsing value: {0}.".FormatWith(CultureInfo.InvariantCulture, currentChar)); } } } } private void ProcessLineFeed() { _charPos++; OnNewLine(_charPos); } private void ProcessCarriageReturn(bool append) { _charPos++; if (EnsureChars(1, append) && _chars[_charPos] == StringUtils.LineFeed) _charPos++; OnNewLine(_charPos); } private bool EatWhitespace(bool oneOrMore) { bool finished = false; bool ateWhitespace = false; while (!finished) { char currentChar = _chars[_charPos]; switch (currentChar) { case '\0': if (_charsUsed == _charPos) { if (ReadData(false) == 0) finished = true; } else { _charPos++; } break; case StringUtils.CarriageReturn: ProcessCarriageReturn(false); break; case StringUtils.LineFeed: ProcessLineFeed(); break; default: if (currentChar == ' ' || char.IsWhiteSpace(currentChar)) { ateWhitespace = true; _charPos++; } else { finished = true; } break; } } return (!oneOrMore || ateWhitespace); } private void ParseConstructor() { if (MatchValueWithTrailingSeperator("new")) { EatWhitespace(false); int initialPosition = _charPos; int endPosition; while (true) { char currentChar = _chars[_charPos]; if (currentChar == '\0') { if (_charsUsed == _charPos) { if (ReadData(true) == 0) throw JsonReaderException.Create(this, "Unexpected end while parsing constructor."); } else { endPosition = _charPos; _charPos++; break; } } else if (char.IsLetterOrDigit(currentChar)) { _charPos++; } else if (currentChar == StringUtils.CarriageReturn) { endPosition = _charPos; ProcessCarriageReturn(true); break; } else if (currentChar == StringUtils.LineFeed) { endPosition = _charPos; ProcessLineFeed(); break; } else if (char.IsWhiteSpace(currentChar)) { endPosition = _charPos; _charPos++; break; } else if (currentChar == '(') { endPosition = _charPos; break; } else { throw JsonReaderException.Create(this, "Unexpected character while parsing constructor: {0}.".FormatWith(CultureInfo.InvariantCulture, currentChar)); } } _stringReference = new StringReference(_chars, initialPosition, endPosition - initialPosition); string constructorName = _stringReference.ToString(); EatWhitespace(false); if (_chars[_charPos] != '(') throw JsonReaderException.Create(this, "Unexpected character while parsing constructor: {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); _charPos++; ClearRecentString(); SetToken(JsonToken.StartConstructor, constructorName); } else { throw JsonReaderException.Create(this, "Unexpected content while parsing JSON."); } } private void ParseNumber() { ShiftBufferIfNeeded(); char firstChar = _chars[_charPos]; int initialPosition = _charPos; ReadNumberIntoBuffer(); _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition); object numberValue; JsonToken numberType; bool singleDigit = (char.IsDigit(firstChar) && _stringReference.Length == 1); bool nonBase10 = (firstChar == '0' && _stringReference.Length > 1 && _stringReference.Chars[_stringReference.StartIndex + 1] != '.' && _stringReference.Chars[_stringReference.StartIndex + 1] != 'e' && _stringReference.Chars[_stringReference.StartIndex + 1] != 'E'); if (_readType == ReadType.ReadAsInt32) { if (singleDigit) { // digit char values start at 48 numberValue = firstChar - 48; } else if (nonBase10) { string number = _stringReference.ToString(); // decimal.Parse doesn't support parsing hexadecimal values int integer = number.StartsWith("0x", StringComparison.OrdinalIgnoreCase) ? Convert.ToInt32(number, 16) : Convert.ToInt32(number, 8); numberValue = integer; } else { int value; ParseResult parseResult = ConvertUtils.Int32TryParse(_stringReference.Chars, _stringReference.StartIndex, _stringReference.Length, out value); if (parseResult == ParseResult.Success) numberValue = value; else if (parseResult == ParseResult.Overflow) throw JsonReaderException.Create(this, "JSON integer {0} is too large or small for an Int32.".FormatWith(CultureInfo.InvariantCulture, _stringReference.ToString())); else throw JsonReaderException.Create(this, "Input string '{0}' is not a valid integer.".FormatWith(CultureInfo.InvariantCulture, _stringReference.ToString())); } numberType = JsonToken.Integer; } else if (_readType == ReadType.ReadAsDecimal) { if (singleDigit) { // digit char values start at 48 numberValue = (decimal)firstChar - 48; } else if (nonBase10) { string number = _stringReference.ToString(); // decimal.Parse doesn't support parsing hexadecimal values long integer = number.StartsWith("0x", StringComparison.OrdinalIgnoreCase) ? Convert.ToInt64(number, 16) : Convert.ToInt64(number, 8); numberValue = Convert.ToDecimal(integer); } else { string number = _stringReference.ToString(); decimal value; if (decimal.TryParse(number, NumberStyles.Number | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out value)) numberValue = value; else throw JsonReaderException.Create(this, "Input string '{0}' is not a valid decimal.".FormatWith(CultureInfo.InvariantCulture, _stringReference.ToString())); } numberType = JsonToken.Float; } else { if (singleDigit) { // digit char values start at 48 numberValue = (long)firstChar - 48; numberType = JsonToken.Integer; } else if (nonBase10) { string number = _stringReference.ToString(); numberValue = number.StartsWith("0x", StringComparison.OrdinalIgnoreCase) ? Convert.ToInt64(number, 16) : Convert.ToInt64(number, 8); numberType = JsonToken.Integer; } else { long value; ParseResult parseResult = ConvertUtils.Int64TryParse(_stringReference.Chars, _stringReference.StartIndex, _stringReference.Length, out value); if (parseResult == ParseResult.Success) { numberValue = value; numberType = JsonToken.Integer; } else if (parseResult == ParseResult.Overflow) { #if !(NET20 || NET35 || PORTABLE40 || PORTABLE) string number = _stringReference.ToString(); numberValue = BigInteger.Parse(number, CultureInfo.InvariantCulture); numberType = JsonToken.Integer; #else throw JsonReaderException.Create(this, "JSON integer {0} is too large or small for an Int64.".FormatWith(CultureInfo.InvariantCulture, _stringReference.ToString())); #endif } else { string number = _stringReference.ToString(); if (_floatParseHandling == FloatParseHandling.Decimal) { decimal d; if (decimal.TryParse(number, NumberStyles.Number | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out d)) numberValue = d; else throw JsonReaderException.Create(this, "Input string '{0}' is not a valid decimal.".FormatWith(CultureInfo.InvariantCulture, number)); } else { double d; if (double.TryParse(number, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out d)) numberValue = d; else throw JsonReaderException.Create(this, "Input string '{0}' is not a valid number.".FormatWith(CultureInfo.InvariantCulture, number)); } numberType = JsonToken.Float; } } } ClearRecentString(); SetToken(numberType, numberValue); } private void ParseComment() { // should have already parsed / character before reaching this method _charPos++; if (!EnsureChars(1, false) || _chars[_charPos] != '*') throw JsonReaderException.Create(this, "Error parsing comment. Expected: *, got {0}.".FormatWith(CultureInfo.InvariantCulture, _chars[_charPos])); else _charPos++; int initialPosition = _charPos; bool commentFinished = false; while (!commentFinished) { switch (_chars[_charPos]) { case '\0': if (_charsUsed == _charPos) { if (ReadData(true) == 0) throw JsonReaderException.Create(this, "Unexpected end while parsing comment."); } else { _charPos++; } break; case '*': _charPos++; if (EnsureChars(0, true)) { if (_chars[_charPos] == '/') { _stringReference = new StringReference(_chars, initialPosition, _charPos - initialPosition - 1); _charPos++; commentFinished = true; } } break; case StringUtils.CarriageReturn: ProcessCarriageReturn(true); break; case StringUtils.LineFeed: ProcessLineFeed(); break; default: _charPos++; break; } } SetToken(JsonToken.Comment, _stringReference.ToString()); ClearRecentString(); } private bool MatchValue(string value) { if (!EnsureChars(value.Length - 1, true)) return false; for (int i = 0; i < value.Length; i++) { if (_chars[_charPos + i] != value[i]) { return false; } } _charPos += value.Length; return true; } private bool MatchValueWithTrailingSeperator(string value) { // will match value and then move to the next character, checking that it is a seperator character bool match = MatchValue(value); if (!match) return false; if (!EnsureChars(0, false)) return true; return IsSeperator(_chars[_charPos]) || _chars[_charPos] == '\0'; } private bool IsSeperator(char c) { switch (c) { case '}': case ']': case ',': return true; case '/': // check next character to see if start of a comment if (!EnsureChars(1, false)) return false; return (_chars[_charPos + 1] == '*'); case ')': if (CurrentState == State.Constructor || CurrentState == State.ConstructorStart) return true; break; case ' ': case StringUtils.Tab: case StringUtils.LineFeed: case StringUtils.CarriageReturn: return true; default: if (char.IsWhiteSpace(c)) return true; break; } return false; } private void ParseTrue() { // check characters equal 'true' // and that it is followed by either a seperator character // or the text ends if (MatchValueWithTrailingSeperator(JsonConvert.True)) { SetToken(JsonToken.Boolean, true); } else { throw JsonReaderException.Create(this, "Error parsing boolean value."); } } private void ParseNull() { if (MatchValueWithTrailingSeperator(JsonConvert.Null)) { SetToken(JsonToken.Null); } else { throw JsonReaderException.Create(this, "Error parsing null value."); } } private void ParseUndefined() { if (MatchValueWithTrailingSeperator(JsonConvert.Undefined)) { SetToken(JsonToken.Undefined); } else { throw JsonReaderException.Create(this, "Error parsing undefined value."); } } private void ParseFalse() { if (MatchValueWithTrailingSeperator(JsonConvert.False)) { SetToken(JsonToken.Boolean, false); } else { throw JsonReaderException.Create(this, "Error parsing boolean value."); } } private void ParseNumberNegativeInfinity() { if (MatchValueWithTrailingSeperator(JsonConvert.NegativeInfinity)) { if (_floatParseHandling == FloatParseHandling.Decimal) throw new JsonReaderException("Cannot read -Infinity as a decimal."); SetToken(JsonToken.Float, double.NegativeInfinity); } else { throw JsonReaderException.Create(this, "Error parsing negative infinity value."); } } private void ParseNumberPositiveInfinity() { if (MatchValueWithTrailingSeperator(JsonConvert.PositiveInfinity)) { if (_floatParseHandling == FloatParseHandling.Decimal) throw new JsonReaderException("Cannot read Infinity as a decimal."); SetToken(JsonToken.Float, double.PositiveInfinity); } else { throw JsonReaderException.Create(this, "Error parsing positive infinity value."); } } private void ParseNumberNaN() { if (MatchValueWithTrailingSeperator(JsonConvert.NaN)) { if (_floatParseHandling == FloatParseHandling.Decimal) throw new JsonReaderException("Cannot read NaN as a decimal."); SetToken(JsonToken.Float, double.NaN); } else { throw JsonReaderException.Create(this, "Error parsing NaN value."); } } /// <summary> /// Changes the state to closed. /// </summary> public override void Close() { base.Close(); if (CloseInput && _reader != null) #if !(NETFX_CORE || PORTABLE40 || PORTABLE) _reader.Close(); #else _reader.Dispose(); #endif if (_buffer != null) _buffer.Clear(); } /// <summary> /// Gets a value indicating whether the class can return line information. /// </summary> /// <returns> /// <c>true</c> if LineNumber and LinePosition can be provided; otherwise, <c>false</c>. /// </returns> public bool HasLineInfo() { return true; } /// <summary> /// Gets the current line number. /// </summary> /// <value> /// The current line number or 0 if no line information is available (for example, HasLineInfo returns false). /// </value> public int LineNumber { get { if (CurrentState == State.Start && LinePosition == 0) return 0; return _lineNumber; } } /// <summary> /// Gets the current line position. /// </summary> /// <value> /// The current line position or 0 if no line information is available (for example, HasLineInfo returns false). /// </value> public int LinePosition { get { return _charPos - _lineStartPos; } } } }
using System.Collections.Generic; using System.IO; using System.Linq; using System.Reflection; using Microsoft.CodeAnalysis; using Microsoft.CodeAnalysis.CSharp; using System; using CK.Core; namespace CK.CodeGen { /// <summary> /// Encapsulates Roslyn compiler. /// </summary> public class CodeGenerator { readonly Func<ICodeWorkspace> _workspaceFactory; /// <summary> /// Gets the default option, initialized to produce <see cref="OutputKind.DynamicallyLinkedLibrary"/> output. /// </summary> public static readonly CSharpCompilationOptions DefaultCompilationOptions = new CSharpCompilationOptions( OutputKind.DynamicallyLinkedLibrary ); /// <summary> /// Initializes a new <see cref="CodeGenerator"/> with options. /// </summary> /// <param name="workspaceFactory"> /// Factory for <see cref="ICodeWorkspace"/> implementations. /// Must not be null. /// </param> public CodeGenerator( Func<ICodeWorkspace> workspaceFactory ) { if( workspaceFactory == null ) throw new ArgumentNullException( nameof( workspaceFactory ) ); _workspaceFactory = workspaceFactory; CompilationOptions = new CSharpCompilationOptions( OutputKind.DynamicallyLinkedLibrary ); } /// <summary> /// Gets or sets the parse options to use. /// Default to null: all default applies, the language version is <see cref="LanguageVersion.Default"/>. /// </summary> public CSharpParseOptions? ParseOptions { get; set; } /// <summary> /// Gets or sets a <see cref="CSharpCompilationOptions"/>. /// When let to null, defaults to the <see cref="DefaultCompilationOptions"/>. /// </summary> public CSharpCompilationOptions? CompilationOptions { get; set; } /// <summary> /// Gets or sets whether the assembly that defines the object type is /// automaticaaly registered. /// Defaults to true. /// </summary> public bool AutoRegisterRuntimeAssembly { get; set; } = true; /// <summary> /// Gets a mutable list of <see cref="ICodeGeneratorModule"/>. /// Since a code module can maitain an internal state between the calls to <see cref="ICodeGeneratorModule.Rewrite(IReadOnlyList{SyntaxTree})"/> /// and <see cref="ICodeGeneratorModule.Inject(ICodeWorkspace)"/>, this list is cleared by each /// call to Generate instance methods. /// </summary> public List<ICodeGeneratorModule> Modules { get; } = new List<ICodeGeneratorModule>(); /// <summary> /// Generates an assembly from a source, a minimal list of required reference assemblies. /// </summary> /// <param name="sourceCode">The source code. Must be valid C# code.</param> /// <param name="assemblyPath">The full final assembly path (including the .dll extension). Can be null if skipCompilation is true.</param> /// <param name="someReferences">List of reference assemblies that can be a subset of the actual dependencies.</param> /// <param name="skipCompilation">True to skip the compilation. Only the parsing and the source generation is done.</param> /// <param name="loader">Optional loader function to load the final emitted assembly.</param> /// <returns>Encapsulation of the result.</returns> public GenerateResult Generate( string sourceCode, string assemblyPath, IEnumerable<Assembly> someReferences, bool skipCompilation, Func<string, Assembly>? loader = null ) { var w = _workspaceFactory(); if( !String.IsNullOrWhiteSpace( sourceCode ) ) w.Global.Append( sourceCode ); foreach( var a in someReferences ) w.DoEnsureAssemblyReference( a ); return Generate( w, assemblyPath, skipCompilation, loader ); } /// <summary> /// Generates an assembly from a source and a minimal list of required reference assemblies. /// </summary> /// <param name="code">The source code.</param> /// <param name="assemblyPath">The full final assembly path (including the .dll extension). Can be null if skipCompilation is true.</param> /// <param name="skipCompilation">True to skip the compilation. Only the parsing and the source generation is done.</param> /// <param name="loader">Optional loader function to load the final emitted assembly.</param> /// <returns>Encapsulation of the result.</returns> public GenerateResult Generate( ICodeWorkspace code, string assemblyPath, bool skipCompilation, Func<string, Assembly>? loader = null ) { if( code == null ) throw new ArgumentNullException( nameof( code ) ); using( var weakLoader = WeakAssemblyNameResolver.TemporaryInstall() ) { var input = GeneratorInput.Create( _workspaceFactory, code, Modules, !skipCompilation && AutoRegisterRuntimeAssembly, ParseOptions ); Modules.Clear(); if( skipCompilation ) return new GenerateResult( input.Trees ); var collector = new HashSet<Assembly>(); foreach( var a in input.Assemblies ) { if( collector.Add( a ) ) Discover( a, collector ); } return Generate( CompilationOptions, input.Trees, assemblyPath, collector.Select( a => MetadataReference.CreateFromFile( new Uri( a.Location ).LocalPath ) ), loader ) .WithLoadFailures( weakLoader.Conflicts ); } } static void Discover( Assembly a, HashSet<Assembly> collector ) { foreach( var name in a.GetReferencedAssemblies() ) { var dep = Assembly.Load( name ); if( collector.Add( dep ) ) Discover( dep, collector ); } } /// <summary> /// Compiles or parses only a single code source. /// </summary> /// <param name="code"></param> /// <param name="assemblyPath">The output path or null if only parsing is required.</param> /// <param name="references">Optional list of dependent assemblies. Used only if compilation is required. This list will be transitively closed.</param> /// <param name="parseOptions">By default, all default applies, the language version is <see cref="LanguageVersion.Default"/>.</param> /// <param name="compileOptions">The compilation options. Used only if compilation is required. Defaults to <see cref="DefaultCompilationOptions"/>.</param> /// <param name="loader">Optional loader function to load the final emitted assembly. Used only if compilation is required.</param> /// <returns>Encapsulation of the result.</returns> static public GenerateResult Generate( string code, string? assemblyPath = null, IEnumerable<Assembly>? references = null, CSharpParseOptions? parseOptions = null, CSharpCompilationOptions? compileOptions = null, Func<string, Assembly>? loader = null ) { SyntaxTree[] trees = new[] { SyntaxFactory.ParseSyntaxTree( code, parseOptions ) }; if( String.IsNullOrEmpty( assemblyPath ) ) { // Parsing is enough. return new GenerateResult( trees ); } using( var weakLoader = WeakAssemblyNameResolver.TemporaryInstall() ) { var collector = new HashSet<Assembly>(); collector.Add( typeof( object ).Assembly ); if( references != null ) { foreach( var a in references ) { if( collector.Add( a ) ) Discover( a, collector ); } } return Generate( compileOptions, trees, assemblyPath, collector.Select( a => MetadataReference.CreateFromFile( new Uri( a.Location ).LocalPath ) ), loader ) .WithLoadFailures( weakLoader.Conflicts ); } } /// <summary> /// Generates an assembly from a <see cref="SyntaxTree"/> list and a /// list of <see cref="MetadataReference"/> required reference assemblies. /// <para> /// Caution: this method is not protected by the <see cref="WeakAssemblyNameResolver"/>. /// It should be done, if necessary, by the caller. /// </para> /// </summary> /// <param name="compileOptions">Compilation options.</param> /// <param name="trees">The syntax trees.</param> /// <param name="assemblyPath">The full final assembly path (including the .dll extension).</param> /// <param name="allReferences">Optional list of assemblies' references.</param> /// <param name="loader">Optional loader function to load the final emitted assembly.</param> /// <returns>Encapsulation of the result.</returns> static public GenerateResult Generate( CSharpCompilationOptions? compileOptions, IReadOnlyList<SyntaxTree> trees, string assemblyPath, IEnumerable<MetadataReference>? allReferences = null, Func<string, Assembly>? loader = null ) { if( assemblyPath == null ) throw new ArgumentNullException( nameof( assemblyPath ) ); try { var option = (compileOptions ?? DefaultCompilationOptions).WithAssemblyIdentityComparer( DesktopAssemblyIdentityComparer.Default ); CSharpCompilation compilation = CSharpCompilation.Create( Path.GetFileNameWithoutExtension( assemblyPath ), trees, allReferences, option ); var r = compilation.Emit( assemblyPath ); if( r.Success && loader != null ) { try { return new GenerateResult( null, trees, r, loader( assemblyPath ), null, null ); } catch( Exception ex ) { return new GenerateResult( null, trees, r, null, ex, null ); } } return new GenerateResult( null, trees, r, null, null, null ); } catch( Exception ex ) { return new GenerateResult( ex, Array.Empty<SyntaxTree>(), null, null, null, null ); } } } }
// Copyright (c) 1995-2009 held by the author(s). All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the names of the Naval Postgraduate School (NPS) // Modeling Virtual Environments and Simulation (MOVES) Institute // (http://www.nps.edu and http://www.MovesInstitute.org) // nor the names of its contributors may be used to endorse or // promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE // COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. // // Copyright (c) 2008, MOVES Institute, Naval Postgraduate School. All // rights reserved. This work is licensed under the BSD open source license, // available at https://www.movesinstitute.org/licenses/bsd.html // // Author: DMcG // Modified for use with C#: // - Peter Smith (Naval Air Warfare Center - Training Systems Division) // - Zvonko Bostjancic (Blubit d.o.o. - zvonko.bostjancic@blubit.si) using System; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Text; using System.Xml.Serialization; using OpenDis.Core; namespace OpenDis.Dis1998 { /// <summary> /// Used in the UA pdu; ties together an emmitter and a location. This requires manual cleanup; the beam data should not be attached to each emitter system. /// </summary> [Serializable] [XmlRoot] [XmlInclude(typeof(AcousticEmitterSystem))] [XmlInclude(typeof(Vector3Float))] [XmlInclude(typeof(AcousticBeamData))] public partial class AcousticEmitterSystemData { /// <summary> /// Length of emitter system data /// </summary> private byte _emitterSystemDataLength; /// <summary> /// Number of beams /// </summary> private byte _numberOfBeams; /// <summary> /// padding /// </summary> private ushort _pad2; /// <summary> /// This field shall specify the system for a particular UA emitter. /// </summary> private AcousticEmitterSystem _acousticEmitterSystem = new AcousticEmitterSystem(); /// <summary> /// Represents the location wrt the entity /// </summary> private Vector3Float _emitterLocation = new Vector3Float(); /// <summary> /// For each beam in numberOfBeams, an emitter system. This is not right--the beam records need to be at the end of the PDU, rather than attached to each system. /// </summary> private List<AcousticBeamData> _beamRecords = new List<AcousticBeamData>(); /// <summary> /// Initializes a new instance of the <see cref="AcousticEmitterSystemData"/> class. /// </summary> public AcousticEmitterSystemData() { } /// <summary> /// Implements the operator !=. /// </summary> /// <param name="left">The left operand.</param> /// <param name="right">The right operand.</param> /// <returns> /// <c>true</c> if operands are not equal; otherwise, <c>false</c>. /// </returns> public static bool operator !=(AcousticEmitterSystemData left, AcousticEmitterSystemData right) { return !(left == right); } /// <summary> /// Implements the operator ==. /// </summary> /// <param name="left">The left operand.</param> /// <param name="right">The right operand.</param> /// <returns> /// <c>true</c> if both operands are equal; otherwise, <c>false</c>. /// </returns> public static bool operator ==(AcousticEmitterSystemData left, AcousticEmitterSystemData right) { if (object.ReferenceEquals(left, right)) { return true; } if (((object)left == null) || ((object)right == null)) { return false; } return left.Equals(right); } public virtual int GetMarshalledSize() { int marshalSize = 0; marshalSize += 1; // this._emitterSystemDataLength marshalSize += 1; // this._numberOfBeams marshalSize += 2; // this._pad2 marshalSize += this._acousticEmitterSystem.GetMarshalledSize(); // this._acousticEmitterSystem marshalSize += this._emitterLocation.GetMarshalledSize(); // this._emitterLocation for (int idx = 0; idx < this._beamRecords.Count; idx++) { AcousticBeamData listElement = (AcousticBeamData)this._beamRecords[idx]; marshalSize += listElement.GetMarshalledSize(); } return marshalSize; } /// <summary> /// Gets or sets the Length of emitter system data /// </summary> [XmlElement(Type = typeof(byte), ElementName = "emitterSystemDataLength")] public byte EmitterSystemDataLength { get { return this._emitterSystemDataLength; } set { this._emitterSystemDataLength = value; } } /// <summary> /// Gets or sets the Number of beams /// </summary> /// <remarks> /// Note that setting this value will not change the marshalled value. The list whose length this describes is used for that purpose. /// The getnumberOfBeams method will also be based on the actual list length rather than this value. /// The method is simply here for completeness and should not be used for any computations. /// </remarks> [XmlElement(Type = typeof(byte), ElementName = "numberOfBeams")] public byte NumberOfBeams { get { return this._numberOfBeams; } set { this._numberOfBeams = value; } } /// <summary> /// Gets or sets the padding /// </summary> [XmlElement(Type = typeof(ushort), ElementName = "pad2")] public ushort Pad2 { get { return this._pad2; } set { this._pad2 = value; } } /// <summary> /// Gets or sets the This field shall specify the system for a particular UA emitter. /// </summary> [XmlElement(Type = typeof(AcousticEmitterSystem), ElementName = "acousticEmitterSystem")] public AcousticEmitterSystem AcousticEmitterSystem { get { return this._acousticEmitterSystem; } set { this._acousticEmitterSystem = value; } } /// <summary> /// Gets or sets the Represents the location wrt the entity /// </summary> [XmlElement(Type = typeof(Vector3Float), ElementName = "emitterLocation")] public Vector3Float EmitterLocation { get { return this._emitterLocation; } set { this._emitterLocation = value; } } /// <summary> /// Gets the For each beam in numberOfBeams, an emitter system. This is not right--the beam records need to be at the end of the PDU, rather than attached to each system. /// </summary> [XmlElement(ElementName = "beamRecordsList", Type = typeof(List<AcousticBeamData>))] public List<AcousticBeamData> BeamRecords { get { return this._beamRecords; } } /// <summary> /// Occurs when exception when processing PDU is caught. /// </summary> public event EventHandler<PduExceptionEventArgs> ExceptionOccured; /// <summary> /// Called when exception occurs (raises the <see cref="Exception"/> event). /// </summary> /// <param name="e">The exception.</param> protected void RaiseExceptionOccured(Exception e) { if (Pdu.FireExceptionEvents && this.ExceptionOccured != null) { this.ExceptionOccured(this, new PduExceptionEventArgs(e)); } } /// <summary> /// Marshal the data to the DataOutputStream. Note: Length needs to be set before calling this method /// </summary> /// <param name="dos">The DataOutputStream instance to which the PDU is marshaled.</param> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Due to ignoring errors.")] public virtual void Marshal(DataOutputStream dos) { if (dos != null) { try { dos.WriteUnsignedByte((byte)this._emitterSystemDataLength); dos.WriteUnsignedByte((byte)this._beamRecords.Count); dos.WriteUnsignedShort((ushort)this._pad2); this._acousticEmitterSystem.Marshal(dos); this._emitterLocation.Marshal(dos); for (int idx = 0; idx < this._beamRecords.Count; idx++) { AcousticBeamData aAcousticBeamData = (AcousticBeamData)this._beamRecords[idx]; aAcousticBeamData.Marshal(dos); } } catch (Exception e) { if (PduBase.TraceExceptions) { Trace.WriteLine(e); Trace.Flush(); } this.RaiseExceptionOccured(e); if (PduBase.ThrowExceptions) { throw e; } } } } [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Due to ignoring errors.")] public virtual void Unmarshal(DataInputStream dis) { if (dis != null) { try { this._emitterSystemDataLength = dis.ReadUnsignedByte(); this._numberOfBeams = dis.ReadUnsignedByte(); this._pad2 = dis.ReadUnsignedShort(); this._acousticEmitterSystem.Unmarshal(dis); this._emitterLocation.Unmarshal(dis); for (int idx = 0; idx < this.NumberOfBeams; idx++) { AcousticBeamData anX = new AcousticBeamData(); anX.Unmarshal(dis); this._beamRecords.Add(anX); } } catch (Exception e) { if (PduBase.TraceExceptions) { Trace.WriteLine(e); Trace.Flush(); } this.RaiseExceptionOccured(e); if (PduBase.ThrowExceptions) { throw e; } } } } /// <summary> /// This allows for a quick display of PDU data. The current format is unacceptable and only used for debugging. /// This will be modified in the future to provide a better display. Usage: /// pdu.GetType().InvokeMember("Reflection", System.Reflection.BindingFlags.InvokeMethod, null, pdu, new object[] { sb }); /// where pdu is an object representing a single pdu and sb is a StringBuilder. /// Note: The supplied Utilities folder contains a method called 'DecodePDU' in the PDUProcessor Class that provides this functionality /// </summary> /// <param name="sb">The StringBuilder instance to which the PDU is written to.</param> [SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Due to ignoring errors.")] public virtual void Reflection(StringBuilder sb) { sb.AppendLine("<AcousticEmitterSystemData>"); try { sb.AppendLine("<emitterSystemDataLength type=\"byte\">" + this._emitterSystemDataLength.ToString(CultureInfo.InvariantCulture) + "</emitterSystemDataLength>"); sb.AppendLine("<beamRecords type=\"byte\">" + this._beamRecords.Count.ToString(CultureInfo.InvariantCulture) + "</beamRecords>"); sb.AppendLine("<pad2 type=\"ushort\">" + this._pad2.ToString(CultureInfo.InvariantCulture) + "</pad2>"); sb.AppendLine("<acousticEmitterSystem>"); this._acousticEmitterSystem.Reflection(sb); sb.AppendLine("</acousticEmitterSystem>"); sb.AppendLine("<emitterLocation>"); this._emitterLocation.Reflection(sb); sb.AppendLine("</emitterLocation>"); for (int idx = 0; idx < this._beamRecords.Count; idx++) { sb.AppendLine("<beamRecords" + idx.ToString(CultureInfo.InvariantCulture) + " type=\"AcousticBeamData\">"); AcousticBeamData aAcousticBeamData = (AcousticBeamData)this._beamRecords[idx]; aAcousticBeamData.Reflection(sb); sb.AppendLine("</beamRecords" + idx.ToString(CultureInfo.InvariantCulture) + ">"); } sb.AppendLine("</AcousticEmitterSystemData>"); } catch (Exception e) { if (PduBase.TraceExceptions) { Trace.WriteLine(e); Trace.Flush(); } this.RaiseExceptionOccured(e); if (PduBase.ThrowExceptions) { throw e; } } } /// <summary> /// Determines whether the specified <see cref="System.Object"/> is equal to this instance. /// </summary> /// <param name="obj">The <see cref="System.Object"/> to compare with this instance.</param> /// <returns> /// <c>true</c> if the specified <see cref="System.Object"/> is equal to this instance; otherwise, <c>false</c>. /// </returns> public override bool Equals(object obj) { return this == obj as AcousticEmitterSystemData; } /// <summary> /// Compares for reference AND value equality. /// </summary> /// <param name="obj">The object to compare with this instance.</param> /// <returns> /// <c>true</c> if both operands are equal; otherwise, <c>false</c>. /// </returns> public bool Equals(AcousticEmitterSystemData obj) { bool ivarsEqual = true; if (obj.GetType() != this.GetType()) { return false; } if (this._emitterSystemDataLength != obj._emitterSystemDataLength) { ivarsEqual = false; } if (this._numberOfBeams != obj._numberOfBeams) { ivarsEqual = false; } if (this._pad2 != obj._pad2) { ivarsEqual = false; } if (!this._acousticEmitterSystem.Equals(obj._acousticEmitterSystem)) { ivarsEqual = false; } if (!this._emitterLocation.Equals(obj._emitterLocation)) { ivarsEqual = false; } if (this._beamRecords.Count != obj._beamRecords.Count) { ivarsEqual = false; } if (ivarsEqual) { for (int idx = 0; idx < this._beamRecords.Count; idx++) { if (!this._beamRecords[idx].Equals(obj._beamRecords[idx])) { ivarsEqual = false; } } } return ivarsEqual; } /// <summary> /// HashCode Helper /// </summary> /// <param name="hash">The hash value.</param> /// <returns>The new hash value.</returns> private static int GenerateHash(int hash) { hash = hash << (5 + hash); return hash; } /// <summary> /// Gets the hash code. /// </summary> /// <returns>The hash code.</returns> public override int GetHashCode() { int result = 0; result = GenerateHash(result) ^ this._emitterSystemDataLength.GetHashCode(); result = GenerateHash(result) ^ this._numberOfBeams.GetHashCode(); result = GenerateHash(result) ^ this._pad2.GetHashCode(); result = GenerateHash(result) ^ this._acousticEmitterSystem.GetHashCode(); result = GenerateHash(result) ^ this._emitterLocation.GetHashCode(); if (this._beamRecords.Count > 0) { for (int idx = 0; idx < this._beamRecords.Count; idx++) { result = GenerateHash(result) ^ this._beamRecords[idx].GetHashCode(); } } return result; } } }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Generated code. DO NOT EDIT! namespace Google.Cloud.Compute.V1.Snippets { using Google.Api.Gax; using System; using System.Linq; using System.Threading.Tasks; using lro = Google.LongRunning; /// <summary>Generated snippets.</summary> public sealed class AllGeneratedBackendBucketsClientSnippets { /// <summary>Snippet for AddSignedUrlKey</summary> public void AddSignedUrlKeyRequestObject() { // Snippet: AddSignedUrlKey(AddSignedUrlKeyBackendBucketRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) AddSignedUrlKeyBackendBucketRequest request = new AddSignedUrlKeyBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", SignedUrlKeyResource = new SignedUrlKey(), }; // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.AddSignedUrlKey(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceAddSignedUrlKey(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for AddSignedUrlKeyAsync</summary> public async Task AddSignedUrlKeyRequestObjectAsync() { // Snippet: AddSignedUrlKeyAsync(AddSignedUrlKeyBackendBucketRequest, CallSettings) // Additional: AddSignedUrlKeyAsync(AddSignedUrlKeyBackendBucketRequest, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) AddSignedUrlKeyBackendBucketRequest request = new AddSignedUrlKeyBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", SignedUrlKeyResource = new SignedUrlKey(), }; // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.AddSignedUrlKeyAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceAddSignedUrlKeyAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for AddSignedUrlKey</summary> public void AddSignedUrlKey() { // Snippet: AddSignedUrlKey(string, string, SignedUrlKey, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) string project = ""; string backendBucket = ""; SignedUrlKey signedUrlKeyResource = new SignedUrlKey(); // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.AddSignedUrlKey(project, backendBucket, signedUrlKeyResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceAddSignedUrlKey(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for AddSignedUrlKeyAsync</summary> public async Task AddSignedUrlKeyAsync() { // Snippet: AddSignedUrlKeyAsync(string, string, SignedUrlKey, CallSettings) // Additional: AddSignedUrlKeyAsync(string, string, SignedUrlKey, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string backendBucket = ""; SignedUrlKey signedUrlKeyResource = new SignedUrlKey(); // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.AddSignedUrlKeyAsync(project, backendBucket, signedUrlKeyResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceAddSignedUrlKeyAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Delete</summary> public void DeleteRequestObject() { // Snippet: Delete(DeleteBackendBucketRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) DeleteBackendBucketRequest request = new DeleteBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", }; // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.Delete(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceDelete(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteAsync</summary> public async Task DeleteRequestObjectAsync() { // Snippet: DeleteAsync(DeleteBackendBucketRequest, CallSettings) // Additional: DeleteAsync(DeleteBackendBucketRequest, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) DeleteBackendBucketRequest request = new DeleteBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", }; // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.DeleteAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceDeleteAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Delete</summary> public void Delete() { // Snippet: Delete(string, string, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) string project = ""; string backendBucket = ""; // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.Delete(project, backendBucket); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceDelete(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteAsync</summary> public async Task DeleteAsync() { // Snippet: DeleteAsync(string, string, CallSettings) // Additional: DeleteAsync(string, string, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string backendBucket = ""; // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.DeleteAsync(project, backendBucket); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceDeleteAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteSignedUrlKey</summary> public void DeleteSignedUrlKeyRequestObject() { // Snippet: DeleteSignedUrlKey(DeleteSignedUrlKeyBackendBucketRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) DeleteSignedUrlKeyBackendBucketRequest request = new DeleteSignedUrlKeyBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", KeyName = "", }; // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.DeleteSignedUrlKey(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceDeleteSignedUrlKey(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteSignedUrlKeyAsync</summary> public async Task DeleteSignedUrlKeyRequestObjectAsync() { // Snippet: DeleteSignedUrlKeyAsync(DeleteSignedUrlKeyBackendBucketRequest, CallSettings) // Additional: DeleteSignedUrlKeyAsync(DeleteSignedUrlKeyBackendBucketRequest, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) DeleteSignedUrlKeyBackendBucketRequest request = new DeleteSignedUrlKeyBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", KeyName = "", }; // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.DeleteSignedUrlKeyAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceDeleteSignedUrlKeyAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteSignedUrlKey</summary> public void DeleteSignedUrlKey() { // Snippet: DeleteSignedUrlKey(string, string, string, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) string project = ""; string backendBucket = ""; string keyName = ""; // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.DeleteSignedUrlKey(project, backendBucket, keyName); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceDeleteSignedUrlKey(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for DeleteSignedUrlKeyAsync</summary> public async Task DeleteSignedUrlKeyAsync() { // Snippet: DeleteSignedUrlKeyAsync(string, string, string, CallSettings) // Additional: DeleteSignedUrlKeyAsync(string, string, string, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string backendBucket = ""; string keyName = ""; // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.DeleteSignedUrlKeyAsync(project, backendBucket, keyName); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceDeleteSignedUrlKeyAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Get</summary> public void GetRequestObject() { // Snippet: Get(GetBackendBucketRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) GetBackendBucketRequest request = new GetBackendBucketRequest { BackendBucket = "", Project = "", }; // Make the request BackendBucket response = backendBucketsClient.Get(request); // End snippet } /// <summary>Snippet for GetAsync</summary> public async Task GetRequestObjectAsync() { // Snippet: GetAsync(GetBackendBucketRequest, CallSettings) // Additional: GetAsync(GetBackendBucketRequest, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) GetBackendBucketRequest request = new GetBackendBucketRequest { BackendBucket = "", Project = "", }; // Make the request BackendBucket response = await backendBucketsClient.GetAsync(request); // End snippet } /// <summary>Snippet for Get</summary> public void Get() { // Snippet: Get(string, string, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) string project = ""; string backendBucket = ""; // Make the request BackendBucket response = backendBucketsClient.Get(project, backendBucket); // End snippet } /// <summary>Snippet for GetAsync</summary> public async Task GetAsync() { // Snippet: GetAsync(string, string, CallSettings) // Additional: GetAsync(string, string, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string backendBucket = ""; // Make the request BackendBucket response = await backendBucketsClient.GetAsync(project, backendBucket); // End snippet } /// <summary>Snippet for Insert</summary> public void InsertRequestObject() { // Snippet: Insert(InsertBackendBucketRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) InsertBackendBucketRequest request = new InsertBackendBucketRequest { RequestId = "", Project = "", BackendBucketResource = new BackendBucket(), }; // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.Insert(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceInsert(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for InsertAsync</summary> public async Task InsertRequestObjectAsync() { // Snippet: InsertAsync(InsertBackendBucketRequest, CallSettings) // Additional: InsertAsync(InsertBackendBucketRequest, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) InsertBackendBucketRequest request = new InsertBackendBucketRequest { RequestId = "", Project = "", BackendBucketResource = new BackendBucket(), }; // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.InsertAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceInsertAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Insert</summary> public void Insert() { // Snippet: Insert(string, BackendBucket, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) string project = ""; BackendBucket backendBucketResource = new BackendBucket(); // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.Insert(project, backendBucketResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceInsert(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for InsertAsync</summary> public async Task InsertAsync() { // Snippet: InsertAsync(string, BackendBucket, CallSettings) // Additional: InsertAsync(string, BackendBucket, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) string project = ""; BackendBucket backendBucketResource = new BackendBucket(); // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.InsertAsync(project, backendBucketResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceInsertAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for List</summary> public void ListRequestObject() { // Snippet: List(ListBackendBucketsRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) ListBackendBucketsRequest request = new ListBackendBucketsRequest { OrderBy = "", Project = "", Filter = "", ReturnPartialSuccess = false, }; // Make the request PagedEnumerable<BackendBucketList, BackendBucket> response = backendBucketsClient.List(request); // Iterate over all response items, lazily performing RPCs as required foreach (BackendBucket item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (BackendBucketList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (BackendBucket item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<BackendBucket> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (BackendBucket item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAsync</summary> public async Task ListRequestObjectAsync() { // Snippet: ListAsync(ListBackendBucketsRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) ListBackendBucketsRequest request = new ListBackendBucketsRequest { OrderBy = "", Project = "", Filter = "", ReturnPartialSuccess = false, }; // Make the request PagedAsyncEnumerable<BackendBucketList, BackendBucket> response = backendBucketsClient.ListAsync(request); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((BackendBucket item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((BackendBucketList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (BackendBucket item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<BackendBucket> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (BackendBucket item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for List</summary> public void List() { // Snippet: List(string, string, int?, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) string project = ""; // Make the request PagedEnumerable<BackendBucketList, BackendBucket> response = backendBucketsClient.List(project); // Iterate over all response items, lazily performing RPCs as required foreach (BackendBucket item in response) { // Do something with each item Console.WriteLine(item); } // Or iterate over pages (of server-defined size), performing one RPC per page foreach (BackendBucketList page in response.AsRawResponses()) { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (BackendBucket item in page) { // Do something with each item Console.WriteLine(item); } } // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<BackendBucket> singlePage = response.ReadPage(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (BackendBucket item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for ListAsync</summary> public async Task ListAsync() { // Snippet: ListAsync(string, string, int?, CallSettings) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) string project = ""; // Make the request PagedAsyncEnumerable<BackendBucketList, BackendBucket> response = backendBucketsClient.ListAsync(project); // Iterate over all response items, lazily performing RPCs as required await response.ForEachAsync((BackendBucket item) => { // Do something with each item Console.WriteLine(item); }); // Or iterate over pages (of server-defined size), performing one RPC per page await response.AsRawResponses().ForEachAsync((BackendBucketList page) => { // Do something with each page of items Console.WriteLine("A page of results:"); foreach (BackendBucket item in page) { // Do something with each item Console.WriteLine(item); } }); // Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required int pageSize = 10; Page<BackendBucket> singlePage = await response.ReadPageAsync(pageSize); // Do something with the page of items Console.WriteLine($"A page of {pageSize} results (unless it's the final page):"); foreach (BackendBucket item in singlePage) { // Do something with each item Console.WriteLine(item); } // Store the pageToken, for when the next page is required. string nextPageToken = singlePage.NextPageToken; // End snippet } /// <summary>Snippet for Patch</summary> public void PatchRequestObject() { // Snippet: Patch(PatchBackendBucketRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) PatchBackendBucketRequest request = new PatchBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", BackendBucketResource = new BackendBucket(), }; // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.Patch(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOncePatch(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for PatchAsync</summary> public async Task PatchRequestObjectAsync() { // Snippet: PatchAsync(PatchBackendBucketRequest, CallSettings) // Additional: PatchAsync(PatchBackendBucketRequest, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) PatchBackendBucketRequest request = new PatchBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", BackendBucketResource = new BackendBucket(), }; // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.PatchAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOncePatchAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Patch</summary> public void Patch() { // Snippet: Patch(string, string, BackendBucket, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) string project = ""; string backendBucket = ""; BackendBucket backendBucketResource = new BackendBucket(); // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.Patch(project, backendBucket, backendBucketResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOncePatch(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for PatchAsync</summary> public async Task PatchAsync() { // Snippet: PatchAsync(string, string, BackendBucket, CallSettings) // Additional: PatchAsync(string, string, BackendBucket, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string backendBucket = ""; BackendBucket backendBucketResource = new BackendBucket(); // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.PatchAsync(project, backendBucket, backendBucketResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOncePatchAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for SetEdgeSecurityPolicy</summary> public void SetEdgeSecurityPolicyRequestObject() { // Snippet: SetEdgeSecurityPolicy(SetEdgeSecurityPolicyBackendBucketRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) SetEdgeSecurityPolicyBackendBucketRequest request = new SetEdgeSecurityPolicyBackendBucketRequest { RequestId = "", BackendBucket = "", SecurityPolicyReferenceResource = new SecurityPolicyReference(), Project = "", }; // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.SetEdgeSecurityPolicy(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceSetEdgeSecurityPolicy(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for SetEdgeSecurityPolicyAsync</summary> public async Task SetEdgeSecurityPolicyRequestObjectAsync() { // Snippet: SetEdgeSecurityPolicyAsync(SetEdgeSecurityPolicyBackendBucketRequest, CallSettings) // Additional: SetEdgeSecurityPolicyAsync(SetEdgeSecurityPolicyBackendBucketRequest, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) SetEdgeSecurityPolicyBackendBucketRequest request = new SetEdgeSecurityPolicyBackendBucketRequest { RequestId = "", BackendBucket = "", SecurityPolicyReferenceResource = new SecurityPolicyReference(), Project = "", }; // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.SetEdgeSecurityPolicyAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceSetEdgeSecurityPolicyAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for SetEdgeSecurityPolicy</summary> public void SetEdgeSecurityPolicy() { // Snippet: SetEdgeSecurityPolicy(string, string, SecurityPolicyReference, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) string project = ""; string backendBucket = ""; SecurityPolicyReference securityPolicyReferenceResource = new SecurityPolicyReference(); // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.SetEdgeSecurityPolicy(project, backendBucket, securityPolicyReferenceResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceSetEdgeSecurityPolicy(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for SetEdgeSecurityPolicyAsync</summary> public async Task SetEdgeSecurityPolicyAsync() { // Snippet: SetEdgeSecurityPolicyAsync(string, string, SecurityPolicyReference, CallSettings) // Additional: SetEdgeSecurityPolicyAsync(string, string, SecurityPolicyReference, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string backendBucket = ""; SecurityPolicyReference securityPolicyReferenceResource = new SecurityPolicyReference(); // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.SetEdgeSecurityPolicyAsync(project, backendBucket, securityPolicyReferenceResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceSetEdgeSecurityPolicyAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Update</summary> public void UpdateRequestObject() { // Snippet: Update(UpdateBackendBucketRequest, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) UpdateBackendBucketRequest request = new UpdateBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", BackendBucketResource = new BackendBucket(), }; // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.Update(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceUpdate(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for UpdateAsync</summary> public async Task UpdateRequestObjectAsync() { // Snippet: UpdateAsync(UpdateBackendBucketRequest, CallSettings) // Additional: UpdateAsync(UpdateBackendBucketRequest, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) UpdateBackendBucketRequest request = new UpdateBackendBucketRequest { RequestId = "", BackendBucket = "", Project = "", BackendBucketResource = new BackendBucket(), }; // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.UpdateAsync(request); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceUpdateAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for Update</summary> public void Update() { // Snippet: Update(string, string, BackendBucket, CallSettings) // Create client BackendBucketsClient backendBucketsClient = BackendBucketsClient.Create(); // Initialize request argument(s) string project = ""; string backendBucket = ""; BackendBucket backendBucketResource = new BackendBucket(); // Make the request lro::Operation<Operation, Operation> response = backendBucketsClient.Update(project, backendBucket, backendBucketResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = backendBucketsClient.PollOnceUpdate(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } /// <summary>Snippet for UpdateAsync</summary> public async Task UpdateAsync() { // Snippet: UpdateAsync(string, string, BackendBucket, CallSettings) // Additional: UpdateAsync(string, string, BackendBucket, CancellationToken) // Create client BackendBucketsClient backendBucketsClient = await BackendBucketsClient.CreateAsync(); // Initialize request argument(s) string project = ""; string backendBucket = ""; BackendBucket backendBucketResource = new BackendBucket(); // Make the request lro::Operation<Operation, Operation> response = await backendBucketsClient.UpdateAsync(project, backendBucket, backendBucketResource); // Poll until the returned long-running operation is complete lro::Operation<Operation, Operation> completedResponse = await response.PollUntilCompletedAsync(); // Retrieve the operation result Operation result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name lro::Operation<Operation, Operation> retrievedResponse = await backendBucketsClient.PollOnceUpdateAsync(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Operation retrievedResult = retrievedResponse.Result; } // End snippet } } }
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the opsworks-2013-02-18.normal.json service model. */ using System; using System.IO; using System.Text; using Microsoft.VisualStudio.TestTools.UnitTesting; using Amazon.OpsWorks; using Amazon.OpsWorks.Model; using Amazon.OpsWorks.Model.Internal.MarshallTransformations; using Amazon.Runtime.Internal.Transform; using ServiceClientGenerator; using AWSSDK_DotNet35.UnitTests.TestTools; namespace AWSSDK_DotNet35.UnitTests.Marshalling { [TestClass] public class OpsWorksMarshallingTests { static readonly ServiceModel service_model = Utils.LoadServiceModel("opsworks-2013-02-18.normal.json", "opsworks.customizations.json"); [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void AssignInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<AssignInstanceRequest>(); var marshaller = new AssignInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<AssignInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void AssignVolumeMarshallTest() { var request = InstantiateClassGenerator.Execute<AssignVolumeRequest>(); var marshaller = new AssignVolumeRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<AssignVolumeRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void AssociateElasticIpMarshallTest() { var request = InstantiateClassGenerator.Execute<AssociateElasticIpRequest>(); var marshaller = new AssociateElasticIpRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<AssociateElasticIpRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void AttachElasticLoadBalancerMarshallTest() { var request = InstantiateClassGenerator.Execute<AttachElasticLoadBalancerRequest>(); var marshaller = new AttachElasticLoadBalancerRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<AttachElasticLoadBalancerRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void CloneStackMarshallTest() { var request = InstantiateClassGenerator.Execute<CloneStackRequest>(); var marshaller = new CloneStackRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<CloneStackRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("CloneStack").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = CloneStackResponseUnmarshaller.Instance.Unmarshall(context) as CloneStackResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void CreateAppMarshallTest() { var request = InstantiateClassGenerator.Execute<CreateAppRequest>(); var marshaller = new CreateAppRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<CreateAppRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("CreateApp").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = CreateAppResponseUnmarshaller.Instance.Unmarshall(context) as CreateAppResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void CreateDeploymentMarshallTest() { var request = InstantiateClassGenerator.Execute<CreateDeploymentRequest>(); var marshaller = new CreateDeploymentRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<CreateDeploymentRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("CreateDeployment").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = CreateDeploymentResponseUnmarshaller.Instance.Unmarshall(context) as CreateDeploymentResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void CreateInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<CreateInstanceRequest>(); var marshaller = new CreateInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<CreateInstanceRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("CreateInstance").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = CreateInstanceResponseUnmarshaller.Instance.Unmarshall(context) as CreateInstanceResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void CreateLayerMarshallTest() { var request = InstantiateClassGenerator.Execute<CreateLayerRequest>(); var marshaller = new CreateLayerRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<CreateLayerRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("CreateLayer").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = CreateLayerResponseUnmarshaller.Instance.Unmarshall(context) as CreateLayerResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void CreateStackMarshallTest() { var request = InstantiateClassGenerator.Execute<CreateStackRequest>(); var marshaller = new CreateStackRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<CreateStackRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("CreateStack").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = CreateStackResponseUnmarshaller.Instance.Unmarshall(context) as CreateStackResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void CreateUserProfileMarshallTest() { var request = InstantiateClassGenerator.Execute<CreateUserProfileRequest>(); var marshaller = new CreateUserProfileRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<CreateUserProfileRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("CreateUserProfile").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = CreateUserProfileResponseUnmarshaller.Instance.Unmarshall(context) as CreateUserProfileResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeleteAppMarshallTest() { var request = InstantiateClassGenerator.Execute<DeleteAppRequest>(); var marshaller = new DeleteAppRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeleteAppRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeleteInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<DeleteInstanceRequest>(); var marshaller = new DeleteInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeleteInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeleteLayerMarshallTest() { var request = InstantiateClassGenerator.Execute<DeleteLayerRequest>(); var marshaller = new DeleteLayerRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeleteLayerRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeleteStackMarshallTest() { var request = InstantiateClassGenerator.Execute<DeleteStackRequest>(); var marshaller = new DeleteStackRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeleteStackRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeleteUserProfileMarshallTest() { var request = InstantiateClassGenerator.Execute<DeleteUserProfileRequest>(); var marshaller = new DeleteUserProfileRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeleteUserProfileRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeregisterEcsClusterMarshallTest() { var request = InstantiateClassGenerator.Execute<DeregisterEcsClusterRequest>(); var marshaller = new DeregisterEcsClusterRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeregisterEcsClusterRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeregisterElasticIpMarshallTest() { var request = InstantiateClassGenerator.Execute<DeregisterElasticIpRequest>(); var marshaller = new DeregisterElasticIpRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeregisterElasticIpRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeregisterInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<DeregisterInstanceRequest>(); var marshaller = new DeregisterInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeregisterInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeregisterRdsDbInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<DeregisterRdsDbInstanceRequest>(); var marshaller = new DeregisterRdsDbInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeregisterRdsDbInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DeregisterVolumeMarshallTest() { var request = InstantiateClassGenerator.Execute<DeregisterVolumeRequest>(); var marshaller = new DeregisterVolumeRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DeregisterVolumeRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeAgentVersionsMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeAgentVersionsRequest>(); var marshaller = new DescribeAgentVersionsRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeAgentVersionsRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeAgentVersions").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeAgentVersionsResponseUnmarshaller.Instance.Unmarshall(context) as DescribeAgentVersionsResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeAppsMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeAppsRequest>(); var marshaller = new DescribeAppsRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeAppsRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeApps").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeAppsResponseUnmarshaller.Instance.Unmarshall(context) as DescribeAppsResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeCommandsMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeCommandsRequest>(); var marshaller = new DescribeCommandsRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeCommandsRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeCommands").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeCommandsResponseUnmarshaller.Instance.Unmarshall(context) as DescribeCommandsResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeDeploymentsMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeDeploymentsRequest>(); var marshaller = new DescribeDeploymentsRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeDeploymentsRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeDeployments").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeDeploymentsResponseUnmarshaller.Instance.Unmarshall(context) as DescribeDeploymentsResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeEcsClustersMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeEcsClustersRequest>(); var marshaller = new DescribeEcsClustersRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeEcsClustersRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeEcsClusters").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeEcsClustersResponseUnmarshaller.Instance.Unmarshall(context) as DescribeEcsClustersResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeElasticIpsMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeElasticIpsRequest>(); var marshaller = new DescribeElasticIpsRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeElasticIpsRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeElasticIps").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeElasticIpsResponseUnmarshaller.Instance.Unmarshall(context) as DescribeElasticIpsResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeElasticLoadBalancersMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeElasticLoadBalancersRequest>(); var marshaller = new DescribeElasticLoadBalancersRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeElasticLoadBalancersRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeElasticLoadBalancers").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeElasticLoadBalancersResponseUnmarshaller.Instance.Unmarshall(context) as DescribeElasticLoadBalancersResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeInstancesMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeInstancesRequest>(); var marshaller = new DescribeInstancesRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeInstancesRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeInstances").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeInstancesResponseUnmarshaller.Instance.Unmarshall(context) as DescribeInstancesResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeLayersMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeLayersRequest>(); var marshaller = new DescribeLayersRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeLayersRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeLayers").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeLayersResponseUnmarshaller.Instance.Unmarshall(context) as DescribeLayersResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeLoadBasedAutoScalingMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeLoadBasedAutoScalingRequest>(); var marshaller = new DescribeLoadBasedAutoScalingRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeLoadBasedAutoScalingRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeLoadBasedAutoScaling").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeLoadBasedAutoScalingResponseUnmarshaller.Instance.Unmarshall(context) as DescribeLoadBasedAutoScalingResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeMyUserProfileMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeMyUserProfileRequest>(); var marshaller = new DescribeMyUserProfileRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeMyUserProfile").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeMyUserProfileResponseUnmarshaller.Instance.Unmarshall(context) as DescribeMyUserProfileResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribePermissionsMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribePermissionsRequest>(); var marshaller = new DescribePermissionsRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribePermissionsRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribePermissions").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribePermissionsResponseUnmarshaller.Instance.Unmarshall(context) as DescribePermissionsResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeRaidArraysMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeRaidArraysRequest>(); var marshaller = new DescribeRaidArraysRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeRaidArraysRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeRaidArrays").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeRaidArraysResponseUnmarshaller.Instance.Unmarshall(context) as DescribeRaidArraysResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeRdsDbInstancesMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeRdsDbInstancesRequest>(); var marshaller = new DescribeRdsDbInstancesRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeRdsDbInstancesRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeRdsDbInstances").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeRdsDbInstancesResponseUnmarshaller.Instance.Unmarshall(context) as DescribeRdsDbInstancesResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeServiceErrorsMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeServiceErrorsRequest>(); var marshaller = new DescribeServiceErrorsRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeServiceErrorsRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeServiceErrors").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeServiceErrorsResponseUnmarshaller.Instance.Unmarshall(context) as DescribeServiceErrorsResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeStackProvisioningParametersMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeStackProvisioningParametersRequest>(); var marshaller = new DescribeStackProvisioningParametersRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeStackProvisioningParametersRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeStackProvisioningParameters").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeStackProvisioningParametersResponseUnmarshaller.Instance.Unmarshall(context) as DescribeStackProvisioningParametersResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeStacksMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeStacksRequest>(); var marshaller = new DescribeStacksRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeStacksRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeStacks").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeStacksResponseUnmarshaller.Instance.Unmarshall(context) as DescribeStacksResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeStackSummaryMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeStackSummaryRequest>(); var marshaller = new DescribeStackSummaryRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeStackSummaryRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeStackSummary").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeStackSummaryResponseUnmarshaller.Instance.Unmarshall(context) as DescribeStackSummaryResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeTimeBasedAutoScalingMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeTimeBasedAutoScalingRequest>(); var marshaller = new DescribeTimeBasedAutoScalingRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeTimeBasedAutoScalingRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeTimeBasedAutoScaling").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeTimeBasedAutoScalingResponseUnmarshaller.Instance.Unmarshall(context) as DescribeTimeBasedAutoScalingResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeUserProfilesMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeUserProfilesRequest>(); var marshaller = new DescribeUserProfilesRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeUserProfilesRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeUserProfiles").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeUserProfilesResponseUnmarshaller.Instance.Unmarshall(context) as DescribeUserProfilesResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DescribeVolumesMarshallTest() { var request = InstantiateClassGenerator.Execute<DescribeVolumesRequest>(); var marshaller = new DescribeVolumesRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DescribeVolumesRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("DescribeVolumes").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = DescribeVolumesResponseUnmarshaller.Instance.Unmarshall(context) as DescribeVolumesResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DetachElasticLoadBalancerMarshallTest() { var request = InstantiateClassGenerator.Execute<DetachElasticLoadBalancerRequest>(); var marshaller = new DetachElasticLoadBalancerRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DetachElasticLoadBalancerRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void DisassociateElasticIpMarshallTest() { var request = InstantiateClassGenerator.Execute<DisassociateElasticIpRequest>(); var marshaller = new DisassociateElasticIpRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<DisassociateElasticIpRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void GetHostnameSuggestionMarshallTest() { var request = InstantiateClassGenerator.Execute<GetHostnameSuggestionRequest>(); var marshaller = new GetHostnameSuggestionRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<GetHostnameSuggestionRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("GetHostnameSuggestion").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = GetHostnameSuggestionResponseUnmarshaller.Instance.Unmarshall(context) as GetHostnameSuggestionResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void GrantAccessMarshallTest() { var request = InstantiateClassGenerator.Execute<GrantAccessRequest>(); var marshaller = new GrantAccessRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<GrantAccessRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("GrantAccess").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = GrantAccessResponseUnmarshaller.Instance.Unmarshall(context) as GrantAccessResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void RebootInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<RebootInstanceRequest>(); var marshaller = new RebootInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<RebootInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void RegisterEcsClusterMarshallTest() { var request = InstantiateClassGenerator.Execute<RegisterEcsClusterRequest>(); var marshaller = new RegisterEcsClusterRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<RegisterEcsClusterRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("RegisterEcsCluster").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = RegisterEcsClusterResponseUnmarshaller.Instance.Unmarshall(context) as RegisterEcsClusterResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void RegisterElasticIpMarshallTest() { var request = InstantiateClassGenerator.Execute<RegisterElasticIpRequest>(); var marshaller = new RegisterElasticIpRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<RegisterElasticIpRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("RegisterElasticIp").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = RegisterElasticIpResponseUnmarshaller.Instance.Unmarshall(context) as RegisterElasticIpResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void RegisterInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<RegisterInstanceRequest>(); var marshaller = new RegisterInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<RegisterInstanceRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("RegisterInstance").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = RegisterInstanceResponseUnmarshaller.Instance.Unmarshall(context) as RegisterInstanceResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void RegisterRdsDbInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<RegisterRdsDbInstanceRequest>(); var marshaller = new RegisterRdsDbInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<RegisterRdsDbInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void RegisterVolumeMarshallTest() { var request = InstantiateClassGenerator.Execute<RegisterVolumeRequest>(); var marshaller = new RegisterVolumeRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<RegisterVolumeRequest>(request,jsonRequest); var webResponse = new WebResponseData { Headers = { {"x-amzn-RequestId", Guid.NewGuid().ToString()}, {"x-amz-crc32","0"} } }; var jsonResponse = new JsonSampleGenerator(service_model, service_model.FindOperation("RegisterVolume").ResponseStructure).Execute(); webResponse.Headers.Add("Content-Length", UTF8Encoding.UTF8.GetBytes(jsonResponse).Length.ToString()); UnmarshallerContext context = new JsonUnmarshallerContext(Utils.CreateStreamFromString(jsonResponse), false, webResponse); var response = RegisterVolumeResponseUnmarshaller.Instance.Unmarshall(context) as RegisterVolumeResponse; InstantiateClassGenerator.ValidateObjectFullyInstantiated(response); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void SetLoadBasedAutoScalingMarshallTest() { var request = InstantiateClassGenerator.Execute<SetLoadBasedAutoScalingRequest>(); var marshaller = new SetLoadBasedAutoScalingRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<SetLoadBasedAutoScalingRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void SetPermissionMarshallTest() { var request = InstantiateClassGenerator.Execute<SetPermissionRequest>(); var marshaller = new SetPermissionRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<SetPermissionRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void SetTimeBasedAutoScalingMarshallTest() { var request = InstantiateClassGenerator.Execute<SetTimeBasedAutoScalingRequest>(); var marshaller = new SetTimeBasedAutoScalingRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<SetTimeBasedAutoScalingRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void StartInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<StartInstanceRequest>(); var marshaller = new StartInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<StartInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void StartStackMarshallTest() { var request = InstantiateClassGenerator.Execute<StartStackRequest>(); var marshaller = new StartStackRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<StartStackRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void StopInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<StopInstanceRequest>(); var marshaller = new StopInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<StopInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void StopStackMarshallTest() { var request = InstantiateClassGenerator.Execute<StopStackRequest>(); var marshaller = new StopStackRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<StopStackRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UnassignInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<UnassignInstanceRequest>(); var marshaller = new UnassignInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UnassignInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UnassignVolumeMarshallTest() { var request = InstantiateClassGenerator.Execute<UnassignVolumeRequest>(); var marshaller = new UnassignVolumeRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UnassignVolumeRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UpdateAppMarshallTest() { var request = InstantiateClassGenerator.Execute<UpdateAppRequest>(); var marshaller = new UpdateAppRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UpdateAppRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UpdateElasticIpMarshallTest() { var request = InstantiateClassGenerator.Execute<UpdateElasticIpRequest>(); var marshaller = new UpdateElasticIpRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UpdateElasticIpRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UpdateInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<UpdateInstanceRequest>(); var marshaller = new UpdateInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UpdateInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UpdateLayerMarshallTest() { var request = InstantiateClassGenerator.Execute<UpdateLayerRequest>(); var marshaller = new UpdateLayerRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UpdateLayerRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UpdateMyUserProfileMarshallTest() { var request = InstantiateClassGenerator.Execute<UpdateMyUserProfileRequest>(); var marshaller = new UpdateMyUserProfileRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UpdateMyUserProfileRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UpdateRdsDbInstanceMarshallTest() { var request = InstantiateClassGenerator.Execute<UpdateRdsDbInstanceRequest>(); var marshaller = new UpdateRdsDbInstanceRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UpdateRdsDbInstanceRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UpdateStackMarshallTest() { var request = InstantiateClassGenerator.Execute<UpdateStackRequest>(); var marshaller = new UpdateStackRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UpdateStackRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UpdateUserProfileMarshallTest() { var request = InstantiateClassGenerator.Execute<UpdateUserProfileRequest>(); var marshaller = new UpdateUserProfileRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UpdateUserProfileRequest>(request,jsonRequest); } [TestMethod] [TestCategory("UnitTest")] [TestCategory("Json")] [TestCategory("OpsWorks")] public void UpdateVolumeMarshallTest() { var request = InstantiateClassGenerator.Execute<UpdateVolumeRequest>(); var marshaller = new UpdateVolumeRequestMarshaller(); var internalRequest = marshaller.Marshall(request); var jsonRequest = UTF8Encoding.UTF8.GetString(internalRequest.Content); Comparer.CompareObjectToJson<UpdateVolumeRequest>(request,jsonRequest); } } }
using System; using CmsEngine.Core.Constants; namespace CmsEngine.Ui.Middleware.SecurityHeaders { /// <summary> /// Exposes methods to build a policy. /// </summary> public class SecurityHeadersBuilder { private readonly SecurityHeadersPolicy _policy = new SecurityHeadersPolicy(); /// <summary> /// The number of seconds in one year /// </summary> public const int OneYearInSeconds = 60 * 60 * 24 * 365; /// <summary> /// Add default headers in accordance with most secure approach /// </summary> public SecurityHeadersBuilder AddDefaultSecurePolicy() { // TODO: Have these settings in a configuration file AddFrameOptionsDeny(); AddXssProtectionBlock(); AddContentTypeOptionsNoSniff(); AddStrictTransportSecurityMaxAge(); RemoveServerHeader(); AddCustomHeader("Referrer-Policy", "strict-origin-when-cross-origin"); AddCustomHeader("Feature-Policy", "geolocation 'none';midi 'none';notifications 'none';push 'none';sync-xhr 'none';" + "microphone 'none';camera 'none';magnetometer 'none';gyroscope 'none';speaker 'self';" + "vibrate 'none';fullscreen 'self';payment 'none';"); AddCustomHeader("Content-Security-Policy", "default-src https: 'unsafe-inline' 'unsafe-eval'; " + "img-src * 'self' data: https: blob:;" + "style-src 'self' 'unsafe-inline' github.githubassets.com www.google.com platform.twitter.com cdn.syndication.twimg.com fonts.googleapis.com;" + "script-src 'self' 'unsafe-inline' 'unsafe-eval' www.gstatic.com gist.github.com *.disqus.com www.googletagmanager.com www.google.com cse.google.com cdn.syndication.twimg.com platform.twitter.com cdn1.developermedia.com cdn2.developermedia.com apis.google.com www.googletagservices.com adservice.google.com securepubads.g.doubleclick.net ajax.aspnetcdn.com *.google-analytics.com"); RemoveHeader("X-Powered-By"); return this; } /// <summary> /// Add X-Frame-Options DENY to all requests. /// The page cannot be displayed in a frame, regardless of the site attempting to do so /// </summary> public SecurityHeadersBuilder AddFrameOptionsDeny() { _policy.SetHeaders[FrameOptionsConstants.Header] = FrameOptionsConstants.Deny; return this; } /// <summary> /// Add X-Frame-Options SAMEORIGIN to all requests. /// The page can only be displayed in a frame on the same origin as the page itself. /// </summary> public SecurityHeadersBuilder AddFrameOptionsSameOrigin() { _policy.SetHeaders[FrameOptionsConstants.Header] = FrameOptionsConstants.SameOrigin; return this; } /// <summary> /// Add X-Frame-Options ALLOW-FROM {uri} to all requests, where the uri is provided /// The page can only be displayed in a frame on the specified origin. /// </summary> /// <param name="uri">The uri of the origin in which the page may be displayed in a frame</param> public SecurityHeadersBuilder AddFrameOptionsSameOrigin(string uri) { _policy.SetHeaders[FrameOptionsConstants.Header] = string.Format(FrameOptionsConstants.AllowFromUri, uri); return this; } /// <summary> /// Add X-XSS-Protection 1 to all requests. /// Enables the XSS Protections /// </summary> public SecurityHeadersBuilder AddXssProtectionEnabled() { _policy.SetHeaders[XssProtectionConstants.Header] = XssProtectionConstants.Enabled; return this; } /// <summary> /// Add X-XSS-Protection 0 to all requests. /// Disables the XSS Protections offered by the user-agent. /// </summary> public SecurityHeadersBuilder AddXssProtectionDisabled() { _policy.SetHeaders[XssProtectionConstants.Header] = XssProtectionConstants.Disabled; return this; } /// <summary> /// Add X-XSS-Protection 1; mode=block to all requests. /// Enables XSS protections and instructs the user-agent to block the response in the event that script has been inserted from user input, instead of sanitizing. /// </summary> public SecurityHeadersBuilder AddXssProtectionBlock() { _policy.SetHeaders[XssProtectionConstants.Header] = XssProtectionConstants.Block; return this; } /// <summary> /// Add X-XSS-Protection 1; report=http://site.com/report to all requests. /// A partially supported directive that tells the user-agent to report potential XSS attacks to a single URL. Data will be POST'd to the report URL in JSON format. /// </summary> public SecurityHeadersBuilder AddXssProtectionReport(string reportUrl) { _policy.SetHeaders[XssProtectionConstants.Header] = string.Format(XssProtectionConstants.Report, reportUrl); return this; } /// <summary> /// Add Strict-Transport-Security max-age=<see cref="maxAge"/> to all requests. /// Tells the user-agent to cache the domain in the STS list for the number of seconds provided. /// </summary> public SecurityHeadersBuilder AddStrictTransportSecurityMaxAge(int maxAge = OneYearInSeconds) { _policy.SetHeaders[StrictTransportSecurityConstants.Header] = string.Format(StrictTransportSecurityConstants.MaxAge, maxAge); return this; } /// <summary> /// Add Strict-Transport-Security max-age=<see cref="maxAge"/>; includeSubDomains to all requests. /// Tells the user-agent to cache the domain in the STS list for the number of seconds provided and include any sub-domains. /// </summary> public SecurityHeadersBuilder AddStrictTransportSecurityMaxAgeIncludeSubDomains(int maxAge = OneYearInSeconds) { _policy.SetHeaders[StrictTransportSecurityConstants.Header] = string.Format(StrictTransportSecurityConstants.MaxAgeIncludeSubdomains, maxAge); return this; } /// <summary> /// Add Strict-Transport-Security max-age=0 to all requests. /// Tells the user-agent to remove, or not cache the host in the STS cache /// </summary> public SecurityHeadersBuilder AddStrictTransportSecurityNoCache() { _policy.SetHeaders[StrictTransportSecurityConstants.Header] = StrictTransportSecurityConstants.NoCache; return this; } /// <summary> /// Add X-Content-Type-Options nosniff to all requests. /// Can be set to protect against MIME type confusion attacks. /// </summary> public SecurityHeadersBuilder AddContentTypeOptionsNoSniff() { _policy.SetHeaders[ContentTypeOptionsConstants.Header] = ContentTypeOptionsConstants.NoSniff; return this; } /// <summary> /// Removes the Server header from all responses /// </summary> public SecurityHeadersBuilder RemoveServerHeader() { _policy.RemoveHeaders.Add(ServerConstants.Header); return this; } /// <summary> /// Adds a custom header to all requests /// </summary> /// <param name="header">The header name</param> /// <param name="value">The value for the header</param> /// <returns></returns> public SecurityHeadersBuilder AddCustomHeader(string header, string value) { if (string.IsNullOrEmpty(header)) { throw new ArgumentNullException(nameof(header)); } _policy.SetHeaders[header] = value; return this; } /// <summary> /// Remove a header from all requests /// </summary> /// <param name="header">The to remove</param> /// <returns></returns> public SecurityHeadersBuilder RemoveHeader(string header) { if (string.IsNullOrEmpty(header)) { throw new ArgumentNullException(nameof(header)); } _policy.RemoveHeaders.Add(header); return this; } /// <summary> /// Builds a new <see cref="SecurityHeadersPolicy"/> using the entries added. /// </summary> /// <returns>The constructed <see cref="SecurityHeadersPolicy"/>.</returns> public SecurityHeadersPolicy Build() { return _policy; } } }
<?cs include:"doctype.cs" ?> <?cs include:"macros.cs" ?> <html devsite> <?cs include:"head_tag.cs" ?> <body class="gc-documentation <?cs if:(reference.gms || reference.gcm) ?>google<?cs /if ?> <?cs if:(guide||develop||training||reference||tools||sdk) ?>develop<?cs elif:design ?>design<?cs elif:distribute ?>distribute<?cs /if ?>" itemscope itemtype="http://schema.org/Article"> <div id="doc-api-level" class="<?cs var:class.since ?>" style="display:none"></div> <a name="top"></a> <?cs include:"header.cs" ?> <div class="col-12" id="doc-col"> <div id="api-info-block"> <?cs # are there inherited members ?> <?cs each:cl=class.inherited ?> <?cs if:subcount(cl.methods) ?> <?cs set:inhmethods = #1 ?> <?cs /if ?> <?cs if:subcount(cl.constants) ?> <?cs set:inhconstants = #1 ?> <?cs /if ?> <?cs if:subcount(cl.fields) ?> <?cs set:inhfields = #1 ?> <?cs /if ?> <?cs if:subcount(cl.attrs) ?> <?cs set:inhattrs = #1 ?> <?cs /if ?> <?cs /each ?> <div class="sum-details-links"> <?cs if:inhattrs || inhconstants || inhfields || inhmethods || (!class.subclasses.hidden && (subcount(class.subclasses.direct) || subcount(class.subclasses.indirect))) ?> Summary: <?cs if:subcount(class.inners) ?> <a href="#nestedclasses">Nested Classes</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:subcount(class.attrs) ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#lattrs">XML Attrs</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:inhattrs ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#inhattrs">Inherited XML Attrs</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:subcount(class.enumConstants) ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#enumconstants">Enums</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:subcount(class.constants) ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#constants">Constants</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:inhconstants ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#inhconstants">Inherited Constants</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:subcount(class.fields) ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#lfields">Fields</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:inhfields ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#inhfields">Inherited Fields</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:subcount(class.ctors.public) ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#pubctors">Ctors</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:subcount(class.ctors.protected) ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#proctors">Protected Ctors</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:subcount(class.methods.public) ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#pubmethods">Methods</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:subcount(class.methods.protected) ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#promethods">Protected Methods</a> <?cs set:linkcount = #1 ?> <?cs /if ?> <?cs if:inhmethods ?> <?cs if:linkcount ?>&#124; <?cs /if ?><a href="#inhmethods">Inherited Methods</a> <?cs /if ?> &#124; <a href="#" onclick="return toggleAllClassInherited()" id="toggleAllClassInherited">[Expand All]</a> <?cs /if ?> </div><!-- end sum-details-links --> <div class="api-level"> <?cs call:since_tags(class) ?><?cs if:class.deprecatedsince ?><br>Deprecated since <a href="<?cs var:toroot ?>guide/topics/manifest/uses-sdk-element.html#ApiLevels" >API level <?cs var:class.deprecatedsince ?></a><?cs /if ?> <?cs call:federated_refs(class) ?> </div> </div><!-- end api-info-block --> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ======== START OF CLASS DATA ======== --> <div id="jd-header"> <?cs var:class.scope ?> <?cs var:class.static ?> <?cs var:class.final ?> <?cs var:class.abstract ?> <?cs var:class.kind ?> <h1 itemprop="name"><?cs var:class.name ?></h1> <?cs set:colspan = subcount(class.inheritance) ?> <?cs each:supr = class.inheritance ?> <?cs if:colspan == 2 ?> extends <?cs call:type_link(supr.short_class) ?><br/> <?cs /if ?> <?cs if:last(supr) && subcount(supr.interfaces) ?> implements <?cs each:t=supr.interfaces ?> <?cs call:type_link(t) ?> <?cs /each ?> <?cs /if ?> <?cs set:colspan = colspan-1 ?> <?cs /each ?> </div><!-- end header --> <div id="naMessage"></div> <div id="jd-content" class="api apilevel-<?cs var:class.since ?>"> <table class="jd-inheritance-table"> <?cs set:colspan = subcount(class.inheritance) ?> <?cs each:supr = class.inheritance ?> <tr> <?cs loop:i = 1, (subcount(class.inheritance)-colspan), 1 ?> <td class="jd-inheritance-space">&nbsp;<?cs if:(subcount(class.inheritance)-colspan) == i ?>&nbsp;&nbsp;&#x21b3;<?cs /if ?></td> <?cs /loop ?> <td colspan="<?cs var:colspan ?>" class="jd-inheritance-class-cell"><?cs if:colspan == 1 ?><?cs call:class_name(class.qualifiedType) ?><?cs else ?><?cs call:type_link(supr.class) ?><?cs /if ?></td> </tr> <?cs set:colspan = colspan-1 ?> <?cs /each ?> </table> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <?cs if:subcount(class.subclasses.direct) && !class.subclasses.hidden ?> <table class="jd-sumtable jd-sumtable-subclasses"><tr><td colspan="12" style="border:none;margin:0;padding:0;"> <?cs call:expando_trigger("subclasses-direct", "closed") ?>Known Direct Subclasses <?cs call:expandable_class_list("subclasses-direct", class.subclasses.direct, "list") ?> </td></tr></table> <?cs /if ?> <?cs if:subcount(class.subclasses.indirect) && !class.subclasses.hidden ?> <table class="jd-sumtable jd-sumtable-subclasses"><tr><td colspan="12" style="border:none;margin:0;padding:0;"> <?cs call:expando_trigger("subclasses-indirect", "closed") ?>Known Indirect Subclasses <?cs call:expandable_class_list("subclasses-indirect", class.subclasses.indirect, "list") ?> </td></tr></table> <?cs /if ?> <div class="jd-descr"> <?cs call:deprecated_warning(class) ?> <?cs if:subcount(class.descr) ?> <h2>Class Overview</h2> <p itemprop="articleBody"><?cs call:tag_list(class.descr) ?></p> <?cs /if ?> <?cs call:see_also_tags(class.seeAlso) ?> </div><!-- jd-descr --> <?cs # summary macros ?> <?cs def:write_method_summary(methods, included) ?> <?cs set:count = #1 ?> <?cs each:method = methods ?> <?cs # The apilevel-N class MUST BE LAST in the sequence of class names ?> <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" > <td class="jd-typecol"><nobr> <?cs var:method.abstract ?> <?cs var:method.default ?> <?cs var:method.static ?> <?cs var:method.final ?> <?cs call:type_link(method.generic) ?> <?cs call:type_link(method.returnType) ?></nobr> </td> <td class="jd-linkcol" width="100%"><nobr> <span class="sympad"><?cs call:cond_link(method.name, toroot, method.href, included) ?></span>(<?cs call:parameter_list(method.params) ?>)</nobr> <?cs if:subcount(method.shortDescr) || subcount(method.deprecated) ?> <div class="jd-descrdiv"><?cs call:short_descr(method) ?></div> <?cs /if ?> </td></tr> <?cs set:count = count + #1 ?> <?cs /each ?> <?cs /def ?> <?cs def:write_field_summary(fields, included) ?> <?cs set:count = #1 ?> <?cs each:field=fields ?> <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:field.since ?>" > <td class="jd-typecol"><nobr> <?cs var:field.scope ?> <?cs var:field.static ?> <?cs var:field.final ?> <?cs call:type_link(field.type) ?></nobr></td> <td class="jd-linkcol"><?cs call:cond_link(field.name, toroot, field.href, included) ?></td> <td class="jd-descrcol" width="100%"><?cs call:short_descr(field) ?></td> </tr> <?cs set:count = count + #1 ?> <?cs /each ?> <?cs /def ?> <?cs def:write_constant_summary(fields, included) ?> <?cs set:count = #1 ?> <?cs each:field=fields ?> <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:field.since ?>" > <td class="jd-typecol"><?cs call:type_link(field.type) ?></td> <td class="jd-linkcol"><?cs call:cond_link(field.name, toroot, field.href, included) ?></td> <td class="jd-descrcol" width="100%"><?cs call:short_descr(field) ?></td> </tr> <?cs set:count = count + #1 ?> <?cs /each ?> <?cs /def ?> <?cs def:write_attr_summary(attrs, included) ?> <?cs set:count = #1 ?> <tr> <td><nobr><em>Attribute Name</em></nobr></td> <td><nobr><em>Related Method</em></nobr></td> <td><nobr><em>Description</em></nobr></td> </tr> <?cs each:attr=attrs ?> <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:attr.since ?>" > <td class="jd-linkcol"><?cs if:included ?><a href="<?cs var:toroot ?><?cs var:attr.href ?>"><?cs /if ?><?cs var:attr.name ?><?cs if:included ?></a><?cs /if ?></td> <td class="jd-linkcol"><?cs each:m=attr.methods ?> <?cs call:cond_link(m.name, toroot, m.href, included) ?> <?cs /each ?> </td> <td class="jd-descrcol" width="100%"><?cs call:short_descr(attr) ?>&nbsp;</td> </tr> <?cs set:count = count + #1 ?> <?cs /each ?> <?cs /def ?> <?cs def:write_inners_summary(classes) ?> <?cs set:count = #1 ?> <?cs each:cl=class.inners ?> <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:cl.since ?>" > <td class="jd-typecol"><nobr> <?cs var:cl.scope ?> <?cs var:cl.static ?> <?cs var:cl.final ?> <?cs var:cl.abstract ?> <?cs var:cl.kind ?></nobr></td> <td class="jd-linkcol"><?cs call:type_link(cl.type) ?></td> <td class="jd-descrcol" width="100%"><?cs call:short_descr(cl) ?>&nbsp;</td> </tr> <?cs set:count = count + #1 ?> <?cs /each ?> <?cs /def ?> <?cs # end macros ?> <div class="jd-descr"> <?cs # make sure there's a summary view to display ?> <?cs if:subcount(class.inners) || subcount(class.attrs) || inhattrs || subcount(class.enumConstants) || subcount(class.constants) || inhconstants || subcount(class.fields) || inhfields || subcount(class.ctors.public) || subcount(class.ctors.protected) || subcount(class.methods.public) || subcount(class.methods.protected) || inhmethods ?> <h2>Summary</h2> <?cs if:subcount(class.inners) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ======== NESTED CLASS SUMMARY ======== --> <table id="nestedclasses" class="jd-sumtable"><tr><th colspan="12">Nested Classes</th></tr> <?cs call:write_inners_summary(class.inners) ?> <?cs /if ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <?cs if:subcount(class.attrs) ?> <!-- =========== FIELD SUMMARY =========== --> <table id="lattrs" class="jd-sumtable"><tr><th colspan="12">XML Attributes</th></tr> <?cs call:write_attr_summary(class.attrs, 1) ?> <?cs /if ?> <?cs # if there are inherited attrs, write the table ?> <?cs if:inhattrs ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- =========== FIELD SUMMARY =========== --> <table id="inhattrs" class="jd-sumtable"><tr><th> <a href="#" class="toggle-all" onclick="return toggleAllInherited(this, null)">[Expand]</a> <div style="clear:left;">Inherited XML Attributes</div></th></tr> <?cs each:cl=class.inherited ?> <?cs if:subcount(cl.attrs) ?> <tr class="api apilevel-<?cs var:cl.since ?>" > <td colspan="12"> <?cs call:expando_trigger("inherited-attrs-"+cl.qualified, "closed") ?>From <?cs var:cl.kind ?> <?cs call:cond_link(cl.qualified, toroot, cl.link, cl.included) ?> <div id="inherited-attrs-<?cs var:cl.qualified ?>"> <div id="inherited-attrs-<?cs var:cl.qualified ?>-list" class="jd-inheritedlinks"> </div> <div id="inherited-attrs-<?cs var:cl.qualified ?>-summary" style="display: none;"> <table class="jd-sumtable-expando"> <?cs call:write_attr_summary(cl.attrs, cl.included) ?></table> </div> </div> </td></tr> <?cs /if ?> <?cs /each ?> </table> <?cs /if ?> <?cs if:subcount(class.enumConstants) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- =========== ENUM CONSTANT SUMMARY =========== --> <table id="enumconstants" class="jd-sumtable"><tr><th colspan="12">Enum Values</th></tr> <?cs set:count = #1 ?> <?cs each:field=class.enumConstants ?> <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:field.since ?>" > <td class="jd-descrcol"><?cs call:type_link(field.type) ?>&nbsp;</td> <td class="jd-linkcol"><?cs call:cond_link(field.name, toroot, field.href, cl.included) ?>&nbsp;</td> <td class="jd-descrcol" width="100%"><?cs call:short_descr(field) ?>&nbsp;</td> </tr> <?cs set:count = count + #1 ?> <?cs /each ?> <?cs /if ?> <?cs if:subcount(class.constants) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- =========== ENUM CONSTANT SUMMARY =========== --> <table id="constants" class="jd-sumtable"><tr><th colspan="12">Constants</th></tr> <?cs call:write_constant_summary(class.constants, 1) ?> </table> <?cs /if ?> <?cs # if there are inherited constants, write the table ?> <?cs if:inhconstants ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- =========== ENUM CONSTANT SUMMARY =========== --> <table id="inhconstants" class="jd-sumtable"><tr><th> <a href="#" class="toggle-all" onclick="return toggleAllInherited(this, null)">[Expand]</a> <div style="clear:left;">Inherited Constants</div></th></tr> <?cs each:cl=class.inherited ?> <?cs if:subcount(cl.constants) ?> <tr class="api apilevel-<?cs var:cl.since ?>" > <td colspan="12"> <?cs call:expando_trigger("inherited-constants-"+cl.qualified, "closed") ?>From <?cs var:cl.kind ?> <?cs call:cond_link(cl.qualified, toroot, cl.link, cl.included) ?> <div id="inherited-constants-<?cs var:cl.qualified ?>"> <div id="inherited-constants-<?cs var:cl.qualified ?>-list" class="jd-inheritedlinks"> </div> <div id="inherited-constants-<?cs var:cl.qualified ?>-summary" style="display: none;"> <table class="jd-sumtable-expando"> <?cs call:write_constant_summary(cl.constants, cl.included) ?></table> </div> </div> </td></tr> <?cs /if ?> <?cs /each ?> </table> <?cs /if ?> <?cs if:subcount(class.fields) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- =========== FIELD SUMMARY =========== --> <table id="lfields" class="jd-sumtable"><tr><th colspan="12">Fields</th></tr> <?cs call:write_field_summary(class.fields, 1) ?> </table> <?cs /if ?> <?cs # if there are inherited fields, write the table ?> <?cs if:inhfields ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- =========== FIELD SUMMARY =========== --> <table id="inhfields" class="jd-sumtable"><tr><th> <a href="#" class="toggle-all" onclick="return toggleAllInherited(this, null)">[Expand]</a> <div style="clear:left;">Inherited Fields</div></th></tr> <?cs each:cl=class.inherited ?> <?cs if:subcount(cl.fields) ?> <tr class="api apilevel-<?cs var:cl.since ?>" > <td colspan="12"> <?cs call:expando_trigger("inherited-fields-"+cl.qualified, "closed") ?>From <?cs var:cl.kind ?> <?cs call:cond_link(cl.qualified, toroot, cl.link, cl.included) ?> <div id="inherited-fields-<?cs var:cl.qualified ?>"> <div id="inherited-fields-<?cs var:cl.qualified ?>-list" class="jd-inheritedlinks"> </div> <div id="inherited-fields-<?cs var:cl.qualified ?>-summary" style="display: none;"> <table class="jd-sumtable-expando"> <?cs call:write_field_summary(cl.fields, cl.included) ?></table> </div> </div> </td></tr> <?cs /if ?> <?cs /each ?> </table> <?cs /if ?> <?cs if:subcount(class.ctors.public) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <table id="pubctors" class="jd-sumtable"><tr><th colspan="12">Public Constructors</th></tr> <?cs call:write_method_summary(class.ctors.public, 1) ?> </table> <?cs /if ?> <?cs if:subcount(class.ctors.protected) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ======== CONSTRUCTOR SUMMARY ======== --> <table id="proctors" class="jd-sumtable"><tr><th colspan="12">Protected Constructors</th></tr> <?cs call:write_method_summary(class.ctors.protected, 1) ?> </table> <?cs /if ?> <?cs if:subcount(class.methods.public) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========== METHOD SUMMARY =========== --> <table id="pubmethods" class="jd-sumtable"><tr><th colspan="12">Public Methods</th></tr> <?cs call:write_method_summary(class.methods.public, 1) ?> </table> <?cs /if ?> <?cs if:subcount(class.methods.protected) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========== METHOD SUMMARY =========== --> <table id="promethods" class="jd-sumtable"><tr><th colspan="12">Protected Methods</th></tr> <?cs call:write_method_summary(class.methods.protected, 1) ?> </table> <?cs /if ?> <?cs # if there are inherited methods, write the table ?> <?cs if:inhmethods ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========== METHOD SUMMARY =========== --> <table id="inhmethods" class="jd-sumtable"><tr><th> <a href="#" class="toggle-all" onclick="return toggleAllInherited(this, null)">[Expand]</a> <div style="clear:left;">Inherited Methods</div></th></tr> <?cs each:cl=class.inherited ?> <?cs if:subcount(cl.methods) ?> <tr class="api apilevel-<?cs var:cl.since ?>" > <td colspan="12"><?cs call:expando_trigger("inherited-methods-"+cl.qualified, "closed") ?> From <?cs var:cl.kind ?> <?cs if:cl.included ?> <a href="<?cs var:toroot ?><?cs var:cl.link ?>"><?cs var:cl.qualified ?></a> <?cs elif:cl.federated ?> <a href="<?cs var:cl.link ?>"><?cs var:cl.qualified ?></a> <?cs else ?> <?cs var:cl.qualified ?> <?cs /if ?> <div id="inherited-methods-<?cs var:cl.qualified ?>"> <div id="inherited-methods-<?cs var:cl.qualified ?>-list" class="jd-inheritedlinks"> </div> <div id="inherited-methods-<?cs var:cl.qualified ?>-summary" style="display: none;"> <table class="jd-sumtable-expando"> <?cs call:write_method_summary(cl.methods, cl.included) ?></table> </div> </div> </td></tr> <?cs /if ?> <?cs /each ?> </table> <?cs /if ?> <?cs /if ?> </div><!-- jd-descr (summary) --> <!-- Details --> <?cs def:write_field_details(fields) ?> <?cs each:field=fields ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <?cs # the A tag in the next line must remain where it is, so that Eclipse can parse the docs ?> <A NAME="<?cs var:field.anchor ?>"></A> <?cs # The apilevel-N class MUST BE LAST in the sequence of class names ?> <div class="jd-details api apilevel-<?cs var:field.since ?>"> <h4 class="jd-details-title"> <span class="normal"> <?cs var:field.scope ?> <?cs var:field.static ?> <?cs var:field.final ?> <?cs call:type_link(field.type) ?> </span> <?cs var:field.name ?> </h4> <div class="api-level"> <?cs call:since_tags(field) ?> <?cs call:federated_refs(field) ?> </div> <div class="jd-details-descr"> <?cs call:description(field) ?> <?cs if:subcount(field.constantValue) ?> <div class="jd-tagdata"> <span class="jd-tagtitle">Constant Value: </span> <span> <?cs if:field.constantValue.isString ?> <?cs var:field.constantValue.str ?> <?cs else ?> <?cs var:field.constantValue.dec ?> (<?cs var:field.constantValue.hex ?>) <?cs /if ?> </span> </div> <?cs /if ?> </div> </div> <?cs /each ?> <?cs /def ?> <?cs def:write_method_details(methods) ?> <?cs each:method=methods ?> <?cs # the A tag in the next line must remain where it is, so that Eclipse can parse the docs ?> <A NAME="<?cs var:method.anchor ?>"></A> <?cs # The apilevel-N class MUST BE LAST in the sequence of class names ?> <div class="jd-details api apilevel-<?cs var:method.since ?>"> <h4 class="jd-details-title"> <span class="normal"> <?cs var:method.scope ?> <?cs var:method.abstract ?> <?cs var:method.default ?> <?cs var:method.static ?> <?cs var:method.final ?> <?cs call:type_link(method.returnType) ?> </span> <span class="sympad"><?cs var:method.name ?></span> <span class="normal">(<?cs call:parameter_list(method.params) ?>)</span> </h4> <div class="api-level"> <div><?cs call:since_tags(method) ?></div> <?cs call:federated_refs(method) ?> </div> <div class="jd-details-descr"> <?cs call:description(method) ?> </div> </div> <?cs /each ?> <?cs /def ?> <?cs def:write_attr_details(attrs) ?> <?cs each:attr=attrs ?> <?cs # the A tag in the next line must remain where it is, so that Eclipse can parse the docs ?> <A NAME="<?cs var:attr.anchor ?>"></A> <div class="jd-details"> <h4 class="jd-details-title"><?cs var:attr.name ?> </h4> <div class="jd-details-descr"> <?cs call:description(attr) ?> <div class="jd-tagdata"> <h5 class="jd-tagtitle">Related Methods</h5> <ul class="nolist"> <?cs each:m=attr.methods ?> <li><a href="<?cs var:toroot ?><?cs var:m.href ?>"><?cs var:m.name ?></a></li> <?cs /each ?> </ul> </div> </div> </div> <?cs /each ?> <?cs /def ?> <!-- XML Attributes --> <?cs if:subcount(class.attrs) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========= FIELD DETAIL ======== --> <h2>XML Attributes</h2> <?cs call:write_attr_details(class.attrs) ?> <?cs /if ?> <!-- Enum Values --> <?cs if:subcount(class.enumConstants) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========= ENUM CONSTANTS DETAIL ======== --> <h2>Enum Values</h2> <?cs call:write_field_details(class.enumConstants) ?> <?cs /if ?> <!-- Constants --> <?cs if:subcount(class.constants) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========= ENUM CONSTANTS DETAIL ======== --> <h2>Constants</h2> <?cs call:write_field_details(class.constants) ?> <?cs /if ?> <!-- Fields --> <?cs if:subcount(class.fields) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========= FIELD DETAIL ======== --> <h2>Fields</h2> <?cs call:write_field_details(class.fields) ?> <?cs /if ?> <!-- Public ctors --> <?cs if:subcount(class.ctors.public) ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========= CONSTRUCTOR DETAIL ======== --> <h2>Public Constructors</h2> <?cs call:write_method_details(class.ctors.public) ?> <?cs /if ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========= CONSTRUCTOR DETAIL ======== --> <!-- Protected ctors --> <?cs if:subcount(class.ctors.protected) ?> <h2>Protected Constructors</h2> <?cs call:write_method_details(class.ctors.protected) ?> <?cs /if ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========= METHOD DETAIL ======== --> <!-- Public methdos --> <?cs if:subcount(class.methods.public) ?> <h2>Public Methods</h2> <?cs call:write_method_details(class.methods.public) ?> <?cs /if ?> <?cs # this next line must be exactly like this to be parsed by eclipse ?> <!-- ========= METHOD DETAIL ======== --> <?cs if:subcount(class.methods.protected) ?> <h2>Protected Methods</h2> <?cs call:write_method_details(class.methods.protected) ?> <?cs /if ?> <?cs # the next two lines must be exactly like this to be parsed by eclipse ?> <!-- ========= END OF CLASS DATA ========= --> <A NAME="navbar_top"></A> <?cs include:"footer.cs" ?> </div> <!-- jd-content --> </div><!-- end doc-content --> <?cs include:"trailer.cs" ?> </body> </html>
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using DebuggerApi; using NUnit.Framework; using NSubstitute; using Microsoft.VisualStudio; using Microsoft.VisualStudio.Debugger.Interop; using YetiVSI.DebugEngine; using YetiVSI.Metrics; namespace YetiVSI.Test.DebugEngine { [TestFixture] class DebugModuleTests { const uint _testLoadOrder = 123; CancelableTask.Factory _mockCancelableTaskFactory; IModuleFileLoader _mockModuleFileLoader; IModuleSearchLogHolder _mockModuleSearchLogHolder; SbModule _mockModule; ActionRecorder _mockActionRecorder; IDebugModule3 _debugModule; IDebugEngineHandler _mockEngineHandler; IGgpDebugProgram _mockDebugProgram; ISymbolSettingsProvider _mockSymbolSettingsProvider; [SetUp] public void SetUp() { _mockCancelableTaskFactory = Substitute.For<CancelableTask.Factory>(); _mockModuleFileLoader = Substitute.For<IModuleFileLoader>(); _mockModuleSearchLogHolder = Substitute.For<IModuleSearchLogHolder>(); _mockModule = Substitute.For<SbModule>(); _mockActionRecorder = Substitute.For<ActionRecorder>(null, null); var mockModuleFileLoadRecorderFactory = Substitute.For<ModuleFileLoadMetricsRecorder.Factory>(); _mockEngineHandler = Substitute.For<IDebugEngineHandler>(); _mockDebugProgram = Substitute.For<IGgpDebugProgram>(); _mockSymbolSettingsProvider = Substitute.For<ISymbolSettingsProvider>(); _debugModule = new DebugModule .Factory(_mockCancelableTaskFactory, _mockActionRecorder, mockModuleFileLoadRecorderFactory, _mockSymbolSettingsProvider) .Create(_mockModuleFileLoader, _mockModuleSearchLogHolder, _mockModule, _testLoadOrder, _mockEngineHandler, _mockDebugProgram); } [Test] public void GetInfo() { ulong testCodeLoadAddress = 456; ulong testCodeSize = 789; string testSymbolFile = "symbol file"; string testSymbolDirectory = "c:\\symbol\\dir"; string testPlatformFile = "platform file"; string testPlatformDirectory = "/platform/dir"; var mockPlatformFileSpec = Substitute.For<SbFileSpec>(); mockPlatformFileSpec.GetFilename().Returns(testPlatformFile); mockPlatformFileSpec.GetDirectory().Returns(testPlatformDirectory); var mockSymbolFileSpec = Substitute.For<SbFileSpec>(); mockSymbolFileSpec.GetFilename().Returns(testSymbolFile); mockSymbolFileSpec.GetDirectory().Returns(testSymbolDirectory); _mockModule.GetPlatformFileSpec().Returns(mockPlatformFileSpec); _mockModule.GetSymbolFileSpec().Returns(mockSymbolFileSpec); _mockModule.GetCodeLoadAddress().Returns(testCodeLoadAddress); _mockModule.GetCodeSize().Returns(testCodeSize); _mockModule.Is64Bit().Returns(true); _mockModule.HasCompileUnits().Returns(true); var flags = enum_MODULE_INFO_FIELDS.MIF_NAME | enum_MODULE_INFO_FIELDS.MIF_URL | enum_MODULE_INFO_FIELDS.MIF_URLSYMBOLLOCATION | enum_MODULE_INFO_FIELDS.MIF_LOADADDRESS | enum_MODULE_INFO_FIELDS.MIF_PREFFEREDADDRESS | enum_MODULE_INFO_FIELDS.MIF_SIZE | enum_MODULE_INFO_FIELDS.MIF_LOADORDER | enum_MODULE_INFO_FIELDS.MIF_FLAGS; var moduleInfo = new MODULE_INFO[1]; Assert.Multiple(() => { Assert.That(_debugModule.GetInfo(flags, moduleInfo), Is.EqualTo(VSConstants.S_OK)); Assert.That(moduleInfo[0].dwValidFields, Is.EqualTo(flags)); Assert.That(moduleInfo[0].m_bstrName, Is.EqualTo(testPlatformFile)); Assert.That(moduleInfo[0].m_bstrUrl, Is.EqualTo(testPlatformDirectory + "/" + testPlatformFile)); Assert.That(moduleInfo[0].m_bstrUrlSymbolLocation, Is.EqualTo(testSymbolDirectory + "\\" + testSymbolFile)); Assert.That(moduleInfo[0].m_addrLoadAddress, Is.EqualTo(testCodeLoadAddress)); Assert.That(moduleInfo[0].m_dwSize, Is.EqualTo(testCodeSize)); Assert.That(moduleInfo[0].m_dwLoadOrder, Is.EqualTo(_testLoadOrder)); Assert.That(moduleInfo[0].m_dwModuleFlags, Is.EqualTo(enum_MODULE_FLAGS.MODULE_FLAG_64BIT | enum_MODULE_FLAGS.MODULE_FLAG_SYMBOLS)); }); } [Test] public void GetSymbolInfo() { string testSearchLog = @"C:\path\test.debug... File found."; _mockModuleSearchLogHolder.GetSearchLog(_mockModule).Returns(testSearchLog); var flags = enum_SYMBOL_SEARCH_INFO_FIELDS.SSIF_VERBOSE_SEARCH_INFO; var symbolSearchInfo = new MODULE_SYMBOL_SEARCH_INFO[1]; Assert.Multiple(() => { Assert.That(_debugModule.GetSymbolInfo(flags, symbolSearchInfo), Is.EqualTo(VSConstants.S_OK)); Assert.That((enum_SYMBOL_SEARCH_INFO_FIELDS)symbolSearchInfo[0].dwValidFields, Is.EqualTo(flags)); Assert.That(symbolSearchInfo[0].bstrVerboseSearchInfo, Is.EqualTo(testSearchLog)); }); } [Test] public void GetInfoNotifiesIfModuleIsNotLoaded() { _mockModule.HasSymbolsLoaded().Returns(false); enum_MODULE_INFO_FIELDS flags = enum_MODULE_INFO_FIELDS.MIF_DEBUGMESSAGE; var moduleInfo = new MODULE_INFO[1]; int result = _debugModule.GetInfo(flags, moduleInfo); string error = "Symbols not loaded. Check 'Symbol Load Information...' for details."; Assert.Multiple(() => { Assert.That(result, Is.EqualTo(VSConstants.S_OK)); Assert.That(moduleInfo[0].dwValidFields, Is.EqualTo(flags)); Assert.That(moduleInfo[0].m_bstrDebugMessage, Is.EqualTo(error)); }); } [Test] public void GetInfoDoesNotSetDebugMessageIfModuleIsLoaded() { _mockModule.HasSymbolsLoaded().Returns(true); enum_MODULE_INFO_FIELDS flags = enum_MODULE_INFO_FIELDS.MIF_DEBUGMESSAGE; var moduleInfo = new MODULE_INFO[1]; int result = _debugModule.GetInfo(flags, moduleInfo); Assert.Multiple(() => { Assert.That(result, Is.EqualTo(VSConstants.S_OK)); Assert.That(moduleInfo[0].dwValidFields, Is.EqualTo(enum_MODULE_INFO_FIELDS.MIF_NONE)); }); } [Test] public void GetInfoDoesNotNotifyIfModuleIsExcludedButLoaded() { var excludedModules = new List<string>() { "excludedModule" }; bool useIncludeList = false; _mockSymbolSettingsProvider.GetInclusionSettings().Returns( new SymbolInclusionSettings(useIncludeList, excludedModules, new List<string>())); _mockModule.GetPlatformFileSpec().GetFilename().Returns("excludedModule"); _mockModule.HasCompileUnits().Returns(true); var flags = enum_MODULE_INFO_FIELDS.MIF_DEBUGMESSAGE; var moduleInfo = new MODULE_INFO[1]; int result = _debugModule.GetInfo(flags, moduleInfo); Assert.Multiple(() => { Assert.That(result, Is.EqualTo(VSConstants.S_OK)); Assert.That(moduleInfo[0].m_bstrDebugMessage ?? "", Does.Not.Contain("Include")); Assert.That(moduleInfo[0].m_bstrDebugMessage ?? "", Does.Not.Contain("Exclude")); }); } [Test] public void GetSymbolInfoNotifiesIfSymbolServerSupportIsDisabled() { _mockSymbolSettingsProvider.IsSymbolServerEnabled.Returns(false); var flags = enum_SYMBOL_SEARCH_INFO_FIELDS.SSIF_VERBOSE_SEARCH_INFO; var symbolSearchInfo = new MODULE_SYMBOL_SEARCH_INFO[1]; int result = _debugModule.GetSymbolInfo(flags, symbolSearchInfo); Assert.Multiple(() => { Assert.That(result, Is.EqualTo(VSConstants.S_OK)); Assert.That(symbolSearchInfo[0].bstrVerboseSearchInfo.ToLower(), Does.Contain("symbol server support")); Assert.That(symbolSearchInfo[0].bstrVerboseSearchInfo.ToLower(), Does.Contain("disabled")); }); } [Test] public void LoadSymbolsSendsEvent() { var action = Substitute.For<IAction>(); action.Record(Arg.Any<Func<bool>>()).Returns(true); _mockActionRecorder.CreateToolAction(ActionType.DebugModuleLoadSymbols).Returns(action); var task = Substitute.For<ICancelableTask<LoadModuleFilesResult>>(); task.Result.Returns( x => new LoadModuleFilesResult() { ResultCode = VSConstants.S_OK }); _mockCancelableTaskFactory .Create( Arg.Any<string>(), Arg.Any<Func<ICancelable, LoadModuleFilesResult>>()) .ReturnsForAnyArgs(task); _debugModule.LoadSymbols(); _mockEngineHandler.Received(1).SendEvent( Arg.Is<DebugEvent>(e => e is IDebugSymbolSearchEvent2), _mockDebugProgram, (IDebugThread2)null); } } }
// Copyright 2007-2008 The Apache Software Foundation. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use // this file except in compliance with the License. You may obtain a copy of the // License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. namespace MassTransit.Tests { using System; using System.Collections.Generic; using Magnum.DateTimeExtensions; using Messages; using NUnit.Framework; using TestConsumers; using TextFixtures; [TestFixture] public class MessageContext_Specs : LoopbackLocalAndRemoteTestFixture { [Test] public void A_response_should_be_published_if_no_reply_address_is_specified() { PingMessage ping = new PingMessage(); TestMessageConsumer<PongMessage> otherConsumer = new TestMessageConsumer<PongMessage>(); RemoteBus.Subscribe(otherConsumer); TestCorrelatedConsumer<PongMessage, Guid> consumer = new TestCorrelatedConsumer<PongMessage, Guid>(ping.CorrelationId); LocalBus.Subscribe(consumer); FutureMessage<PongMessage> pong = new FutureMessage<PongMessage>(); RemoteBus.Subscribe<PingMessage>(message => { pong.Set(new PongMessage(message.CorrelationId)); CurrentMessage.Respond(pong.Message); }); LocalBus.Publish(ping); Assert.IsTrue(pong.IsAvailable(3.Seconds()), "No pong generated"); consumer.ShouldHaveReceivedMessage(pong.Message, 3.Seconds()); otherConsumer.ShouldHaveReceivedMessage(pong.Message, 1.Seconds()); } [Test] public void A_response_should_be_sent_directly_if_a_reply_address_is_specified() { PingMessage ping = new PingMessage(); TestMessageConsumer<PongMessage> otherConsumer = new TestMessageConsumer<PongMessage>(); RemoteBus.Subscribe(otherConsumer); TestCorrelatedConsumer<PongMessage, Guid> consumer = new TestCorrelatedConsumer<PongMessage, Guid>(ping.CorrelationId); LocalBus.Subscribe(consumer); FutureMessage<PongMessage> pong = new FutureMessage<PongMessage>(); RemoteBus.Subscribe<PingMessage>(message => { pong.Set(new PongMessage(message.CorrelationId)); CurrentMessage.Respond(pong.Message); }); LocalBus.Publish(ping, context => context.SendResponseTo(LocalBus)); Assert.IsTrue(pong.IsAvailable(3.Seconds()), "No pong generated"); consumer.ShouldHaveReceivedMessage(pong.Message, 3.Seconds()); otherConsumer.ShouldNotHaveReceivedMessage(pong.Message, 1.Seconds()); } [Test] public void The_destination_address_should_pass() { FutureMessage<PingMessage> received = new FutureMessage<PingMessage>(); LocalBus.Subscribe<PingMessage>(message => { Assert.AreEqual(LocalBus.Endpoint.Uri, CurrentMessage.Headers.DestinationAddress); received.Set(message); }); LocalBus.Publish(new PingMessage()); Assert.IsTrue(received.IsAvailable(5.Seconds()), "No message was received"); } [Test] public void The_fault_address_should_pass() { FutureMessage<PingMessage> received = new FutureMessage<PingMessage>(); LocalBus.Subscribe<PingMessage>(message => { Assert.AreEqual(LocalBus.Endpoint.Uri, CurrentMessage.Headers.FaultAddress); received.Set(message); }); LocalBus.Publish(new PingMessage(), context => context.SendFaultTo(LocalBus)); Assert.IsTrue(received.IsAvailable(5.Seconds()), "No message was received"); } [Test] public void The_response_address_should_pass() { FutureMessage<PingMessage> received = new FutureMessage<PingMessage>(); LocalBus.Subscribe<PingMessage>(message => { Assert.AreEqual(LocalBus.Endpoint.Uri, CurrentMessage.Headers.ResponseAddress); received.Set(message); }); LocalBus.Publish(new PingMessage(), context => context.SendResponseTo(LocalBus)); Assert.IsTrue(received.IsAvailable(5.Seconds()), "No message was received"); } [Test] public void The_source_address_should_pass() { FutureMessage<PingMessage> received = new FutureMessage<PingMessage>(); LocalBus.Subscribe<PingMessage>(message => { Assert.AreEqual(LocalBus.Endpoint.Uri, CurrentMessage.Headers.SourceAddress); received.Set(message); }); LocalBus.Publish(new PingMessage()); Assert.IsTrue(received.IsAvailable(5.Seconds()), "No message was received"); } } [TestFixture] public class When_publishing_a_message_with_no_consumers : LoopbackLocalAndRemoteTestFixture { [Test] public void The_method_should_be_called_to_notify_the_caller() { var ping = new PingMessage(); bool noConsumers = false; LocalBus.Publish(ping, x => { x.IfNoSubscribers<PingMessage>(message => { Assert.IsInstanceOf<PingMessage>(message); noConsumers = true; }); }); Assert.IsTrue(noConsumers, "There should have been no consumers"); } [Test] public void The_method_should_not_carry_over_the_subsequent_calls() { var ping = new PingMessage(); int hitCount = 0; LocalBus.Publish(ping, x => x.IfNoSubscribers<PingMessage>(message => hitCount++)); LocalBus.Publish(ping); Assert.AreEqual(1, hitCount, "There should have been no consumers"); } } [TestFixture] public class When_publishing_a_message_with_an_each_consumer_action_specified : LoopbackLocalAndRemoteTestFixture { [Test] public void The_method_should_not_be_called_when_there_are_no_subscribers() { var ping = new PingMessage(); List<Uri> consumers = new List<Uri>(); LocalBus.Publish(ping, x => { x.ForEachSubscriber<PingMessage>((message,consumer) => consumers.Add(consumer.Uri)); }); Assert.AreEqual(0, consumers.Count); } [Test] public void The_method_should_be_called_for_each_destination_endpoint() { LocalBus.Subscribe<PingMessage>(x => { }); var ping = new PingMessage(); List<Uri> consumers = new List<Uri>(); LocalBus.Publish(ping, x => { x.ForEachSubscriber<PingMessage>((message,endpoint) => consumers.Add(endpoint.Uri)); }); Assert.AreEqual(1, consumers.Count); Assert.AreEqual(LocalBus.Endpoint.Uri, consumers[0]); } [Test] public void The_method_should_not_carry_over_to_the_next_call_context() { var ping = new PingMessage(); List<Uri> consumers = new List<Uri>(); LocalBus.Publish(ping, x => { x.ForEachSubscriber<PingMessage>((message,endpoint) => consumers.Add(endpoint.Uri)); }); LocalBus.Subscribe<PingMessage>(x => { }); LocalBus.Publish(ping); Assert.AreEqual(0, consumers.Count); } [Test] public void The_method_should_be_called_for_each_destination_endpoint_when_there_are_multiple() { LocalBus.Subscribe<PingMessage>(x => { }); RemoteBus.Subscribe<PingMessage>(x => { }); var ping = new PingMessage(); List<Uri> consumers = new List<Uri>(); LocalBus.Publish(ping, x => { x.ForEachSubscriber<PingMessage>((message,endpoint) => consumers.Add(endpoint.Uri)); }); Assert.AreEqual(2, consumers.Count); Assert.IsTrue(consumers.Contains(LocalBus.Endpoint.Uri)); Assert.IsTrue(consumers.Contains(RemoteBus.Endpoint.Uri)); } } }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Threading; using System.Threading.Tasks; using Xunit; namespace System.ComponentModel.EventBasedAsync.Tests { public class AsyncOperationTests { private const int SpinTimeoutSeconds = 30; [Fact] public static void Noop() { // Test that a simple AsyncOperation can be dispatched and completed via AsyncOperationManager Task.Run(() => { var operation = new TestAsyncOperation(op => { }); operation.Wait(); Assert.True(operation.Completed); Assert.False(operation.Cancelled); Assert.Null(operation.Exception); }).Wait(); } [Fact] public static void ThrowAfterAsyncComplete() { Task.Run(() => { var operation = new TestAsyncOperation(op => { }); operation.Wait(); SendOrPostCallback noopCallback = state => { }; Assert.Throws<InvalidOperationException>(() => operation.AsyncOperation.Post(noopCallback, null)); Assert.Throws<InvalidOperationException>(() => operation.AsyncOperation.PostOperationCompleted(noopCallback, null)); Assert.Throws<InvalidOperationException>(() => operation.AsyncOperation.OperationCompleted()); }).Wait(); } [Fact] public static void ThrowAfterSynchronousComplete() { Task.Run(() => { var operation = AsyncOperationManager.CreateOperation(null); operation.OperationCompleted(); SendOrPostCallback noopCallback = state => { }; Assert.Throws<InvalidOperationException>(() => operation.Post(noopCallback, null)); Assert.Throws<InvalidOperationException>(() => operation.PostOperationCompleted(noopCallback, null)); Assert.Throws<InvalidOperationException>(() => operation.OperationCompleted()); }).Wait(); } [Fact] public static void Cancel() { // Test that cancellation gets passed all the way through PostOperationCompleted(callback, AsyncCompletedEventArgs) Task.Run(() => { var cancelEvent = new ManualResetEventSlim(); var operation = new TestAsyncOperation(op => { Assert.True(cancelEvent.Wait(TimeSpan.FromSeconds(SpinTimeoutSeconds))); }, cancelEvent: cancelEvent); operation.Cancel(); operation.Wait(); Assert.True(operation.Completed); Assert.True(operation.Cancelled); Assert.Null(operation.Exception); }).Wait(); } [Fact] public static void Throw() { // Test that exceptions get passed all the way through PostOperationCompleted(callback, AsyncCompletedEventArgs) Task.Run(() => { var operation = new TestAsyncOperation(op => { throw new TestException("Test throw"); }); Assert.Throws<TestException>(() => operation.Wait()); }).Wait(); } [Fact] public static void PostNullDelegate() { // the xUnit SynchronizationContext - AysncTestSyncContext interferes with the current SynchronizationContext // used by AsyncOperation when there is exception thrown -> the SC.OperationCompleted() is not called. // use new SC here to avoid this issue var orignal = SynchronizationContext.Current; try { SynchronizationContext.SetSynchronizationContext(null); // Pass a non-null state just to emphasize we're only testing passing a null delegate var state = new object(); var operation = AsyncOperationManager.CreateOperation(state); Assert.Throws<ArgumentNullException>(() => operation.Post(null, state)); Assert.Throws<ArgumentNullException>(() => operation.PostOperationCompleted(null, state)); } finally { SynchronizationContext.SetSynchronizationContext(orignal); } } // A simple wrapper for AsyncOperation which executes the specified delegate and a completion handler asynchronously. public class TestAsyncOperation { private readonly object _operationId; private readonly Action<TestAsyncOperation> _executeDelegate; private readonly ManualResetEventSlim _cancelEvent; private readonly ManualResetEventSlim _completeEvent; public AsyncOperation AsyncOperation { get; private set; } public bool Completed { get { return _completeEvent.IsSet; } } public bool Cancelled { get { return _cancelEvent.IsSet; } } public Exception Exception { get; private set; } public TestAsyncOperation(Action<TestAsyncOperation> executeDelegate, ManualResetEventSlim cancelEvent = null) { // Create an async operation passing an object as the state so we can // verify that state is passed properly. _operationId = new object(); AsyncOperation = AsyncOperationManager.CreateOperation(_operationId); Assert.Same(_operationId, AsyncOperation.UserSuppliedState); Assert.Same(AsyncOperationManager.SynchronizationContext, AsyncOperation.SynchronizationContext); _completeEvent = new ManualResetEventSlim(false); _cancelEvent = cancelEvent ?? new ManualResetEventSlim(false); // Post work to the wrapped synchronization context _executeDelegate = executeDelegate; AsyncOperation.Post((SendOrPostCallback)ExecuteWorker, _operationId); } public void Wait() { Assert.True(_completeEvent.Wait(TimeSpan.FromSeconds(SpinTimeoutSeconds))); if (Exception != null) { throw Exception; } } public void Cancel() { CompleteOperationAsync(cancelled: true); } private void ExecuteWorker(object operationId) { Assert.Same(_operationId, operationId); Exception exception = null; try { _executeDelegate(this); } catch (Exception e) { exception = e; } finally { CompleteOperationAsync(exception: exception); } } private void CompleteOperationAsync(Exception exception = null, bool cancelled = false) { if (!(Completed || Cancelled)) { AsyncOperation.PostOperationCompleted( (SendOrPostCallback)OnOperationCompleted, new AsyncCompletedEventArgs( exception, cancelled, _operationId)); } } private void OnOperationCompleted(object state) { AsyncCompletedEventArgs e = Assert.IsType<AsyncCompletedEventArgs>(state); Assert.Equal(_operationId, e.UserState); Exception = e.Error; // Make sure to set _cancelEvent before _completeEvent so that anyone waiting on // _completeEvent will not be at risk of reading Cancelled before it is set. if (e.Cancelled) _cancelEvent.Set(); _completeEvent.Set(); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. using System; using System.Collections.Generic; using System.Text.Json; using Azure.Core; namespace Azure.Security.KeyVault.Certificates { /// <summary> /// A policy which governs the lifecycle a properties of a certificate managed by Azure Key Vault. /// </summary> public class CertificatePolicy : IJsonSerializable, IJsonDeserializable { private const string DefaultSubject = "CN=DefaultPolicy"; private const string DefaultIssuerName = "Self"; private const string KeyTypePropertyName = "kty"; private const string ReuseKeyPropertyName = "reuse_key"; private const string ExportablePropertyName = "exportable"; private const string CurveNamePropertyName = "crv"; private const string KeySizePropertyName = "key_size"; private const string KeyPropsPropertyName = "key_props"; private const string SecretPropsPropertyName = "secret_props"; private const string X509PropsPropertyName = "x509_props"; private const string LifetimeActionsPropertyName = "lifetime_actions"; private const string IssuerPropertyName = "issuer"; private const string AttributesPropertyName = "attributes"; private const string ContentTypePropertyName = "contentType"; private const string SubjectPropertyName = "subject"; private const string SansPropertyName = "sans"; private const string KeyUsagePropertyName = "key_usage"; private const string EkusPropertyName = "ekus"; private const string ValidityMonthsPropertyName = "validity_months"; private const string EnabledPropertyName = "enabled"; private const string CreatedPropertyName = "created"; private const string UpdatedPropertyName = "updated"; private static readonly JsonEncodedText s_keyTypePropertyNameBytes = JsonEncodedText.Encode(KeyTypePropertyName); private static readonly JsonEncodedText s_reuseKeyPropertyNameBytes = JsonEncodedText.Encode(ReuseKeyPropertyName); private static readonly JsonEncodedText s_exportablePropertyNameBytes = JsonEncodedText.Encode(ExportablePropertyName); private static readonly JsonEncodedText s_curveNamePropertyNameBytes = JsonEncodedText.Encode(CurveNamePropertyName); private static readonly JsonEncodedText s_keySizePropertyNameBytes = JsonEncodedText.Encode(KeySizePropertyName); private static readonly JsonEncodedText s_lifetimeActionsPropertyNameBytes = JsonEncodedText.Encode(LifetimeActionsPropertyName); private static readonly JsonEncodedText s_issuerPropertyNameBytes = JsonEncodedText.Encode(IssuerPropertyName); private static readonly JsonEncodedText s_attributesPropertyNameBytes = JsonEncodedText.Encode(AttributesPropertyName); private static readonly JsonEncodedText s_keyPropsPropertyNameBytes = JsonEncodedText.Encode(KeyPropsPropertyName); private static readonly JsonEncodedText s_secretPropsPropertyNameBytes = JsonEncodedText.Encode(SecretPropsPropertyName); private static readonly JsonEncodedText s_x509PropsPropertyNameBytes = JsonEncodedText.Encode(X509PropsPropertyName); private static readonly JsonEncodedText s_contentTypePropertyNameBytes = JsonEncodedText.Encode(ContentTypePropertyName); private static readonly JsonEncodedText s_subjectPropertyNameBytes = JsonEncodedText.Encode(SubjectPropertyName); private static readonly JsonEncodedText s_sansPropertyNameBytes = JsonEncodedText.Encode(SansPropertyName); private static readonly JsonEncodedText s_keyUsagePropertyNameBytes = JsonEncodedText.Encode(KeyUsagePropertyName); private static readonly JsonEncodedText s_ekusPropertyNameBytes = JsonEncodedText.Encode(EkusPropertyName); private static readonly JsonEncodedText s_validityMonthsPropertyNameBytes = JsonEncodedText.Encode(ValidityMonthsPropertyName); private static readonly JsonEncodedText s_enabledPropertyNameBytes = JsonEncodedText.Encode(EnabledPropertyName); private IssuerParameters _issuer; /// <summary> /// Initializes a new instance of the <see cref="CertificatePolicy"/> class. /// </summary> /// <param name="issuerName">The name of an issuer for the certificate, including values from <see cref="WellKnownIssuerNames"/>.</param> /// <param name="subject">The subject name of the certificate, such as "CN=contoso.com".</param> /// <exception cref="ArgumentException"><paramref name="subject"/> or <paramref name="issuerName"/> is empty.</exception> /// <exception cref="ArgumentNullException"><paramref name="subject"/> or <paramref name="issuerName"/> is null.</exception> public CertificatePolicy(string issuerName, string subject) { Argument.AssertNotNullOrEmpty(issuerName, nameof(issuerName)); Argument.AssertNotNullOrEmpty(subject, nameof(subject)); IssuerName = issuerName; Subject = subject; } /// <summary> /// Initializes a new instance of the <see cref="CertificatePolicy"/> class. /// </summary> /// <param name="issuerName">The name of an issuer for the certificate, including values from <see cref="WellKnownIssuerNames"/>.</param> /// <param name="subjectAlternativeNames">The subject alternative names (SANs) of the certificate.</param> /// <exception cref="ArgumentException"><paramref name="issuerName"/> is empty or <paramref name="subjectAlternativeNames"/> contains empty collection properties.</exception> /// <exception cref="ArgumentNullException"><paramref name="subjectAlternativeNames"/> or <paramref name="issuerName"/> is null.</exception> public CertificatePolicy(string issuerName, SubjectAlternativeNames subjectAlternativeNames) { Argument.AssertNotNullOrEmpty(issuerName, nameof(issuerName)); Argument.AssertNotNull(subjectAlternativeNames, nameof(subjectAlternativeNames)); if (subjectAlternativeNames.IsEmpty) { throw new ArgumentException("Value cannot contain empty collection properties.", nameof(subjectAlternativeNames)); } SubjectAlternativeNames = subjectAlternativeNames; IssuerName = issuerName; } /// <summary> /// Initializes a new instance of the <see cref="CertificatePolicy"/> class. /// </summary> /// <param name="issuerName">The name of an issuer for the certificate, including values from <see cref="WellKnownIssuerNames"/>.</param> /// <param name="subject">The subject name of the certificate, such as "CN=contoso.com".</param> /// <param name="subjectAlternativeNames">The subject alternative names (SANs) of the certificate.</param> /// <exception cref="ArgumentException"><paramref name="subject"/> or <paramref name="issuerName"/> is empty, or <paramref name="subjectAlternativeNames"/> contains empty collection properties.</exception> /// <exception cref="ArgumentNullException"><paramref name="subject"/>, <paramref name="issuerName"/>, or <paramref name="subjectAlternativeNames"/> is null.</exception> public CertificatePolicy(string issuerName, string subject, SubjectAlternativeNames subjectAlternativeNames) { Argument.AssertNotNullOrEmpty(issuerName, nameof(issuerName)); Argument.AssertNotNullOrEmpty(subject, nameof(subject)); Argument.AssertNotNull(subjectAlternativeNames, nameof(subjectAlternativeNames)); if (subjectAlternativeNames.IsEmpty) { throw new ArgumentException("Value cannot contain empty collection properties.", nameof(subjectAlternativeNames)); } Subject = subject; IssuerName = issuerName; SubjectAlternativeNames = subjectAlternativeNames; } /// <summary> /// Initializes a new instance of the <see cref="CertificatePolicy"/> class to pass to /// <see cref="CertificateClient.ImportCertificate(ImportCertificateOptions, System.Threading.CancellationToken)"/> or /// <see cref="CertificateClient.ImportCertificateAsync(ImportCertificateOptions, System.Threading.CancellationToken)"/>. /// </summary> /// <remarks> /// <para> /// Use this constructor if, for example, you want to import a PEM-encoded certificate. The <see cref="IssuerName"/> will be /// <see cref="WellKnownIssuerNames.Unknown"/> and the <see cref="Subject"/> and <see cref="SubjectAlternativeNames"/> will /// be parsed from the imported certificate. /// <code snippet="Snippet:CertificateClientLiveTests_VerifyImportCertificatePem" language="csharp"> /// byte[] certificateBytes = File.ReadAllBytes(&quot;certificate.pem&quot;); /// /// ImportCertificateOptions options = new ImportCertificateOptions(certificateName, certificateBytes) /// { /// Policy = new CertificatePolicy /// { /// ContentType = CertificateContentType.Pem /// } /// }; /// </code> /// </para> /// <para> /// You must use one of the other constructors to pass an instance to /// <see cref="CertificateClient.StartCreateCertificate(string, CertificatePolicy, bool?, IDictionary{string, string}, System.Threading.CancellationToken)"/> or /// <see cref="CertificateClient.StartCreateCertificateAsync(string, CertificatePolicy, bool?, IDictionary{string, string}, System.Threading.CancellationToken)"/> /// because <see cref="IssuerName"/> and one of <see cref="Subject"/> or <see cref="SubjectAlternativeNames"/> are required. /// </para> /// </remarks> public CertificatePolicy() { } /// <summary> /// Gets a new <see cref="CertificatePolicy"/> suitable for self-signed certificate requests /// with the <see cref="Subject"/> "CN=DefaultPolicy". To change the Subject, create a new instance /// using one of the constructors. /// </summary> public static CertificatePolicy Default => new CertificatePolicy(DefaultIssuerName, DefaultSubject); /// <summary> /// Gets or sets the type of backing key to be generated when issuing new certificates. /// </summary> public CertificateKeyType? KeyType { get; set; } /// <summary> /// Gets or sets a value indicating whether the certificate key should be reused when rotating the certificate. /// </summary> public bool? ReuseKey { get; set; } /// <summary> /// Gets or sets a value indicating whether the certificate key is exportable from the vault or secure certificate store. /// </summary> public bool? Exportable { get; set; } /// <summary> /// Gets or sets the curve which back an Elliptic Curve (EC) key. /// </summary> public CertificateKeyCurveName? KeyCurveName { get; set; } /// <summary> /// Gets or sets the size of the RSA key. The value must be a valid RSA key length such as 2048 or 4092. /// </summary> public int? KeySize { get; set; } /// <summary> /// Gets the subject name of a certificate. /// </summary> public string Subject { get; internal set; } /// <summary> /// Gets the subject alternative names (SANs) of a certificate. /// </summary> public SubjectAlternativeNames SubjectAlternativeNames { get; internal set; } /// <summary> /// Gets the name of an issuer for a certificate. /// </summary> public string IssuerName { get => _issuer.IssuerName; internal set => _issuer.IssuerName = value; } /// <summary> /// Gets or sets the <see cref="CertificateContentType"/> of the certificate. /// </summary> /// <remarks> /// Set to <see cref="CertificateContentType.Pkcs12"/> when <see cref="KeyVaultCertificate.Cer"/> contains your raw PKCS#12/PFX bytes, /// or to <see cref="CertificateContentType.Pem"/> when <see cref="KeyVaultCertificate.Cer"/> contains your ASCII PEM-encoded bytes. /// If not specified, <see cref="CertificateContentType.Pkcs12"/> is assumed. /// </remarks> public CertificateContentType? ContentType { get; set; } /// <summary> /// Gets or sets the certificate type of a certificate. /// </summary> public string CertificateType { get => _issuer.CertificateType; set => _issuer.CertificateType = value; } /// <summary> /// Gets or sets a value indicating whether a certificate should be published to the certificate transparency list when created. /// </summary> public bool? CertificateTransparency { get => _issuer.CertificateTransparency; set => _issuer.CertificateTransparency = value; } /// <summary> /// Gets or sets the validity period for a certificate in months. /// </summary> public int? ValidityInMonths { get; set; } /// <summary> /// Gets or sets a value indicating whether the certificate is currently enabled. If null, the server default will be used. /// </summary> public bool? Enabled { get; set; } /// <summary> /// Gets a <see cref="DateTimeOffset"/> indicating when the certificate was updated. /// </summary> public DateTimeOffset? UpdatedOn { get; internal set; } /// <summary> /// Gets a <see cref="DateTimeOffset"/> indicating when the certificate was created. /// </summary> public DateTimeOffset? CreatedOn { get; internal set; } /// <summary> /// Gets the allowed usages for the key of the certificate. /// </summary> public IList<CertificateKeyUsage> KeyUsage { get; } = new List<CertificateKeyUsage>(); /// <summary> /// Gets the allowed enhanced key usages (EKUs) of the certificate. /// </summary> public IList<string> EnhancedKeyUsage { get; } = new List<string>(); /// <summary> /// Gets the actions to be executed at specified times in the certificates lifetime. /// Currently, only a single <see cref="LifetimeAction"/> is allowed. /// </summary> public IList<LifetimeAction> LifetimeActions { get; } = new List<LifetimeAction>(); void IJsonDeserializable.ReadProperties(JsonElement json) { foreach (JsonProperty prop in json.EnumerateObject()) { switch (prop.Name) { case KeyPropsPropertyName: ReadKeyProperties(prop.Value); break; case SecretPropsPropertyName: ReadSecretProperties(prop.Value); break; case X509PropsPropertyName: ReadX509CertificateProperties(prop.Value); break; case IssuerPropertyName: _issuer.ReadProperties(prop.Value); break; case AttributesPropertyName: ReadAttributesProperties(prop.Value); break; case LifetimeActionsPropertyName: foreach (JsonElement actionElem in prop.Value.EnumerateArray()) { LifetimeActions.Add(LifetimeAction.FromJsonObject(actionElem)); } break; } } } void IJsonSerializable.WriteProperties(Utf8JsonWriter json) { // Key Props if (KeyType.HasValue || KeyCurveName.HasValue || KeySize.HasValue || ReuseKey.HasValue || Exportable.HasValue) { json.WriteStartObject(s_keyPropsPropertyNameBytes); WriteKeyProperties(json); json.WriteEndObject(); } // Secret Props if (ContentType.HasValue) { json.WriteStartObject(s_secretPropsPropertyNameBytes); WriteSecretProperties(json); json.WriteEndObject(); } // X509 Props if (Subject != null || (SubjectAlternativeNames != null && !SubjectAlternativeNames.IsEmpty) || !KeyUsage.IsNullOrEmpty() || !EnhancedKeyUsage.IsNullOrEmpty() || ValidityInMonths.HasValue) { json.WriteStartObject(s_x509PropsPropertyNameBytes); WriteX509CertificateProperties(json); json.WriteEndObject(); } // Issuer Props if (IssuerName != null || CertificateType != null || CertificateTransparency.HasValue) { json.WriteStartObject(s_issuerPropertyNameBytes); _issuer.WriteProperties(json); json.WriteEndObject(); } if (Enabled.HasValue) { json.WriteStartObject(s_attributesPropertyNameBytes); WriteAttributesProperties(json); json.WriteEndObject(); } if (!LifetimeActions.IsNullOrEmpty()) { json.WriteStartArray(s_lifetimeActionsPropertyNameBytes); foreach (LifetimeAction action in LifetimeActions) { if (action != null) { json.WriteStartObject(); ((IJsonSerializable)action).WriteProperties(json); json.WriteEndObject(); } } json.WriteEndArray(); } } private void ReadKeyProperties(JsonElement json) { foreach (JsonProperty prop in json.EnumerateObject()) { switch (prop.Name) { case KeyTypePropertyName: KeyType = prop.Value.GetString(); break; case ReuseKeyPropertyName: ReuseKey = prop.Value.GetBoolean(); break; case ExportablePropertyName: Exportable = prop.Value.GetBoolean(); break; case CurveNamePropertyName: KeyCurveName = prop.Value.GetString(); break; case KeySizePropertyName: KeySize = prop.Value.GetInt32(); break; } } } private void WriteKeyProperties(Utf8JsonWriter json) { if (KeyType.HasValue) { json.WriteString(s_keyTypePropertyNameBytes, KeyType.ToString()); } if (ReuseKey.HasValue) { json.WriteBoolean(s_reuseKeyPropertyNameBytes, ReuseKey.Value); } if (Exportable.HasValue) { json.WriteBoolean(s_exportablePropertyNameBytes, Exportable.Value); } if (KeyCurveName.HasValue) { json.WriteString(s_curveNamePropertyNameBytes, KeyCurveName.ToString()); } if (KeySize.HasValue) { json.WriteNumber(s_keySizePropertyNameBytes, KeySize.Value); } } private void ReadSecretProperties(JsonElement json) { if (json.TryGetProperty(ContentTypePropertyName, out JsonElement contentTypeProp)) { ContentType = contentTypeProp.GetString(); } } private void WriteSecretProperties(Utf8JsonWriter json) { if (ContentType.HasValue) { json.WriteString(s_contentTypePropertyNameBytes, ContentType.ToString()); } } private void ReadX509CertificateProperties(JsonElement json) { foreach (JsonProperty prop in json.EnumerateObject()) { switch (prop.Name) { case SubjectPropertyName: Subject = prop.Value.GetString(); break; case SansPropertyName: SubjectAlternativeNames = new SubjectAlternativeNames(); ((IJsonDeserializable)SubjectAlternativeNames).ReadProperties(prop.Value); break; case KeyUsagePropertyName: foreach (JsonElement usageElem in prop.Value.EnumerateArray()) { KeyUsage.Add(usageElem.GetString()); } break; case EkusPropertyName: foreach (JsonElement usageElem in prop.Value.EnumerateArray()) { EnhancedKeyUsage.Add(usageElem.GetString()); } break; case ValidityMonthsPropertyName: ValidityInMonths = prop.Value.GetInt32(); break; } } } private void WriteX509CertificateProperties(Utf8JsonWriter json) { if (Subject != null) { json.WriteString(s_subjectPropertyNameBytes, Subject); } if (SubjectAlternativeNames != null && !SubjectAlternativeNames.IsEmpty) { json.WriteStartObject(s_sansPropertyNameBytes); ((IJsonSerializable)SubjectAlternativeNames).WriteProperties(json); json.WriteEndObject(); } if (!KeyUsage.IsNullOrEmpty()) { json.WriteStartArray(s_keyUsagePropertyNameBytes); foreach (CertificateKeyUsage usage in KeyUsage) { json.WriteStringValue(usage.ToString()); } json.WriteEndArray(); } if (!EnhancedKeyUsage.IsNullOrEmpty()) { json.WriteStartArray(s_ekusPropertyNameBytes); foreach (var usage in EnhancedKeyUsage) { json.WriteStringValue(usage); } json.WriteEndArray(); } if (ValidityInMonths.HasValue) { json.WriteNumber(s_validityMonthsPropertyNameBytes, ValidityInMonths.Value); } } private void ReadAttributesProperties(JsonElement json) { foreach (JsonProperty prop in json.EnumerateObject()) { switch (prop.Name) { case EnabledPropertyName: Enabled = prop.Value.GetBoolean(); break; case CreatedPropertyName: CreatedOn = DateTimeOffset.FromUnixTimeSeconds(prop.Value.GetInt64()); break; case UpdatedPropertyName: UpdatedOn = DateTimeOffset.FromUnixTimeSeconds(prop.Value.GetInt64()); break; } } } private void WriteAttributesProperties(Utf8JsonWriter json) { if (Enabled.HasValue) { json.WriteBoolean(s_enabledPropertyNameBytes, Enabled.Value); } } } }
// *********************************************************************** // Copyright (c) 2009 Charlie Poole, Rob Prouse // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // *********************************************************************** using System; using System.Collections; namespace NUnit.Framework.Constraints { /// <summary> /// ConstraintExpression represents a compound constraint in the /// process of being constructed from a series of syntactic elements. /// /// Individual elements are appended to the expression as they are /// reorganized. When a constraint is appended, it is returned as the /// value of the operation so that modifiers may be applied. However, /// any partially built expression is attached to the constraint for /// later resolution. When an operator is appended, the partial /// expression is returned. If it's a self-resolving operator, then /// a ResolvableConstraintExpression is returned. /// </summary> public class ConstraintExpression { #region Instance Fields /// <summary> /// The ConstraintBuilder holding the elements recognized so far /// </summary> protected readonly ConstraintBuilder builder; #endregion #region Constructors /// <summary> /// Initializes a new instance of the <see cref="ConstraintExpression"/> class. /// </summary> public ConstraintExpression() : this(new ConstraintBuilder()) { } /// <summary> /// Initializes a new instance of the <see cref="ConstraintExpression"/> /// class passing in a ConstraintBuilder, which may be pre-populated. /// </summary> /// <param name="builder">The builder.</param> public ConstraintExpression(ConstraintBuilder builder) { Guard.ArgumentNotNull(builder, nameof(builder)); this.builder = builder; } #endregion #region ToString() /// <summary> /// Returns a string representation of the expression as it /// currently stands. This should only be used for testing, /// since it has the side-effect of resolving the expression. /// </summary> /// <returns></returns> public override string ToString() { return builder.Resolve().ToString(); } #endregion #region Append Methods /// <summary> /// Appends an operator to the expression and returns the /// resulting expression itself. /// </summary> public ConstraintExpression Append(ConstraintOperator op) { builder.Append(op); return this; } /// <summary> /// Appends a self-resolving operator to the expression and /// returns a new ResolvableConstraintExpression. /// </summary> public ResolvableConstraintExpression Append(SelfResolvingOperator op) { builder.Append(op); return new ResolvableConstraintExpression(builder); } /// <summary> /// Appends a constraint to the expression and returns that /// constraint, which is associated with the current state /// of the expression being built. Note that the constraint /// is not reduced at this time. For example, if there /// is a NotOperator on the stack we don't reduce and /// return a NotConstraint. The original constraint must /// be returned because it may support modifiers that /// are yet to be applied. /// </summary> public Constraint Append(Constraint constraint) { builder.Append(constraint); return constraint; } #endregion #region Not /// <summary> /// Returns a ConstraintExpression that negates any /// following constraint. /// </summary> public ConstraintExpression Not { get { return this.Append(new NotOperator()); } } /// <summary> /// Returns a ConstraintExpression that negates any /// following constraint. /// </summary> public ConstraintExpression No { get { return this.Append(new NotOperator()); } } #endregion #region All /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if all of them succeed. /// </summary> public ConstraintExpression All { get { return this.Append(new AllOperator()); } } #endregion #region Some /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if at least one of them succeeds. /// </summary> public ConstraintExpression Some { get { return this.Append(new SomeOperator()); } } #endregion #region None /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding if all of them fail. /// </summary> public ConstraintExpression None { get { return this.Append(new NoneOperator()); } } #endregion #region Exactly(n) /// <summary> /// Returns a ConstraintExpression, which will apply /// the following constraint to all members of a collection, /// succeeding only if a specified number of them succeed. /// </summary> public ItemsConstraintExpression Exactly(int expectedCount) { builder.Append(new ExactCountOperator(expectedCount)); return new ItemsConstraintExpression(builder); } #endregion #region One /// <summary> /// Returns a <see cref="ItemsConstraintExpression"/>, which will /// apply the following constraint to a collection of length one, succeeding /// only if exactly one of them succeeds. /// </summary> public ItemsConstraintExpression One { get { builder.Append(new ExactCountOperator(1)); return new ItemsConstraintExpression(builder); } } #endregion #region Property /// <summary> /// Returns a new PropertyConstraintExpression, which will either /// test for the existence of the named property on the object /// being tested or apply any following constraint to that property. /// </summary> public ResolvableConstraintExpression Property(string name) { return this.Append(new PropOperator(name)); } #endregion #region Length /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Length property of the object being tested. /// </summary> public ResolvableConstraintExpression Length { get { return Property("Length"); } } #endregion #region Count /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Count property of the object being tested. /// </summary> public ResolvableConstraintExpression Count { get { return Property("Count"); } } #endregion #region Message /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the Message property of the object being tested. /// </summary> public ResolvableConstraintExpression Message { get { return Property("Message"); } } #endregion #region InnerException /// <summary> /// Returns a new ConstraintExpression, which will apply the following /// constraint to the InnerException property of the object being tested. /// </summary> public ResolvableConstraintExpression InnerException { get { return Property("InnerException"); } } #endregion #region Attribute /// <summary> /// Returns a new AttributeConstraint checking for the /// presence of a particular attribute on an object. /// </summary> public ResolvableConstraintExpression Attribute(Type expectedType) { return this.Append(new AttributeOperator(expectedType)); } /// <summary> /// Returns a new AttributeConstraint checking for the /// presence of a particular attribute on an object. /// </summary> public ResolvableConstraintExpression Attribute<TExpected>() { return Attribute(typeof(TExpected)); } #endregion #region With /// <summary> /// With is currently a NOP - reserved for future use. /// </summary> public ConstraintExpression With { get { return this.Append(new WithOperator()); } } #endregion #region Matches /// <summary> /// Returns the constraint provided as an argument - used to allow custom /// custom constraints to easily participate in the syntax. /// </summary> public Constraint Matches(IResolveConstraint constraint) { return this.Append((Constraint)constraint.Resolve()); } /// <summary> /// Returns the constraint provided as an argument - used to allow custom /// custom constraints to easily participate in the syntax. /// </summary> public Constraint Matches<TActual>(Predicate<TActual> predicate) { return this.Append(new PredicateConstraint<TActual>(predicate)); } #endregion #region Null /// <summary> /// Returns a constraint that tests for null /// </summary> public NullConstraint Null { get { return (NullConstraint)this.Append(new NullConstraint()); } } #endregion #region True /// <summary> /// Returns a constraint that tests for True /// </summary> public TrueConstraint True { get { return (TrueConstraint)this.Append(new TrueConstraint()); } } #endregion #region False /// <summary> /// Returns a constraint that tests for False /// </summary> public FalseConstraint False { get { return (FalseConstraint)this.Append(new FalseConstraint()); } } #endregion #region Positive /// <summary> /// Returns a constraint that tests for a positive value /// </summary> public GreaterThanConstraint Positive { get { return (GreaterThanConstraint)this.Append(new GreaterThanConstraint(0)); } } #endregion #region Negative /// <summary> /// Returns a constraint that tests for a negative value /// </summary> public LessThanConstraint Negative { get { return (LessThanConstraint)this.Append(new LessThanConstraint(0)); } } #endregion #region Zero /// <summary> /// Returns a constraint that tests if item is equal to zero /// </summary> public EqualConstraint Zero { get { return (EqualConstraint)this.Append(new EqualConstraint(0)); } } #endregion #region NaN /// <summary> /// Returns a constraint that tests for NaN /// </summary> public NaNConstraint NaN { get { return (NaNConstraint)this.Append(new NaNConstraint()); } } #endregion #region Empty /// <summary> /// Returns a constraint that tests for empty /// </summary> public EmptyConstraint Empty { get { return (EmptyConstraint)this.Append(new EmptyConstraint()); } } #endregion #region Unique /// <summary> /// Returns a constraint that tests whether a collection /// contains all unique items. /// </summary> public UniqueItemsConstraint Unique { get { return (UniqueItemsConstraint)this.Append(new UniqueItemsConstraint()); } } #endregion #if SERIALIZATION /// <summary> /// Returns a constraint that tests whether an object graph is serializable in binary format. /// </summary> public BinarySerializableConstraint BinarySerializable { get { return (BinarySerializableConstraint)this.Append(new BinarySerializableConstraint()); } } /// <summary> /// Returns a constraint that tests whether an object graph is serializable in XML format. /// </summary> public XmlSerializableConstraint XmlSerializable { get { return (XmlSerializableConstraint)this.Append(new XmlSerializableConstraint()); } } #endif #region EqualTo /// <summary> /// Returns a constraint that tests two items for equality /// </summary> public EqualConstraint EqualTo(object expected) { return (EqualConstraint)this.Append(new EqualConstraint(expected)); } #endregion #region SameAs /// <summary> /// Returns a constraint that tests that two references are the same object /// </summary> public SameAsConstraint SameAs(object expected) { return (SameAsConstraint)this.Append(new SameAsConstraint(expected)); } #endregion #region GreaterThan /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than the supplied argument /// </summary> public GreaterThanConstraint GreaterThan(object expected) { return (GreaterThanConstraint)this.Append(new GreaterThanConstraint(expected)); } #endregion #region GreaterThanOrEqualTo /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than or equal to the supplied argument /// </summary> public GreaterThanOrEqualConstraint GreaterThanOrEqualTo(object expected) { return (GreaterThanOrEqualConstraint)this.Append(new GreaterThanOrEqualConstraint(expected)); } /// <summary> /// Returns a constraint that tests whether the /// actual value is greater than or equal to the supplied argument /// </summary> public GreaterThanOrEqualConstraint AtLeast(object expected) { return (GreaterThanOrEqualConstraint)this.Append(new GreaterThanOrEqualConstraint(expected)); } #endregion #region LessThan /// <summary> /// Returns a constraint that tests whether the /// actual value is less than the supplied argument /// </summary> public LessThanConstraint LessThan(object expected) { return (LessThanConstraint)this.Append(new LessThanConstraint(expected)); } #endregion #region LessThanOrEqualTo /// <summary> /// Returns a constraint that tests whether the /// actual value is less than or equal to the supplied argument /// </summary> public LessThanOrEqualConstraint LessThanOrEqualTo(object expected) { return (LessThanOrEqualConstraint)this.Append(new LessThanOrEqualConstraint(expected)); } /// <summary> /// Returns a constraint that tests whether the /// actual value is less than or equal to the supplied argument /// </summary> public LessThanOrEqualConstraint AtMost(object expected) { return (LessThanOrEqualConstraint)this.Append(new LessThanOrEqualConstraint(expected)); } #endregion #region TypeOf /// <summary> /// Returns a constraint that tests whether the actual /// value is of the exact type supplied as an argument. /// </summary> public ExactTypeConstraint TypeOf(Type expectedType) { return (ExactTypeConstraint)this.Append(new ExactTypeConstraint(expectedType)); } /// <summary> /// Returns a constraint that tests whether the actual /// value is of the exact type supplied as an argument. /// </summary> public ExactTypeConstraint TypeOf<TExpected>() { return (ExactTypeConstraint)this.Append(new ExactTypeConstraint(typeof(TExpected))); } #endregion #region InstanceOf /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> public InstanceOfTypeConstraint InstanceOf(Type expectedType) { return (InstanceOfTypeConstraint)this.Append(new InstanceOfTypeConstraint(expectedType)); } /// <summary> /// Returns a constraint that tests whether the actual value /// is of the type supplied as an argument or a derived type. /// </summary> public InstanceOfTypeConstraint InstanceOf<TExpected>() { return (InstanceOfTypeConstraint)this.Append(new InstanceOfTypeConstraint(typeof(TExpected))); } #endregion #region AssignableFrom /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableFromConstraint AssignableFrom(Type expectedType) { return (AssignableFromConstraint)this.Append(new AssignableFromConstraint(expectedType)); } /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableFromConstraint AssignableFrom<TExpected>() { return (AssignableFromConstraint)this.Append(new AssignableFromConstraint(typeof(TExpected))); } #endregion #region AssignableTo /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableToConstraint AssignableTo(Type expectedType) { return (AssignableToConstraint)this.Append(new AssignableToConstraint(expectedType)); } /// <summary> /// Returns a constraint that tests whether the actual value /// is assignable from the type supplied as an argument. /// </summary> public AssignableToConstraint AssignableTo<TExpected>() { return (AssignableToConstraint)this.Append(new AssignableToConstraint(typeof(TExpected))); } #endregion #region EquivalentTo /// <summary> /// Returns a constraint that tests whether the actual value /// is a collection containing the same elements as the /// collection supplied as an argument. /// </summary> public CollectionEquivalentConstraint EquivalentTo(IEnumerable expected) { return (CollectionEquivalentConstraint)this.Append(new CollectionEquivalentConstraint(expected)); } #endregion #region SubsetOf /// <summary> /// Returns a constraint that tests whether the actual value /// is a subset of the collection supplied as an argument. /// </summary> public CollectionSubsetConstraint SubsetOf(IEnumerable expected) { return (CollectionSubsetConstraint)this.Append(new CollectionSubsetConstraint(expected)); } #endregion #region SupersetOf /// <summary> /// Returns a constraint that tests whether the actual value /// is a superset of the collection supplied as an argument. /// </summary> public CollectionSupersetConstraint SupersetOf(IEnumerable expected) { return (CollectionSupersetConstraint)this.Append(new CollectionSupersetConstraint(expected)); } #endregion #region Ordered /// <summary> /// Returns a constraint that tests whether a collection is ordered /// </summary> public CollectionOrderedConstraint Ordered { get { return (CollectionOrderedConstraint)this.Append(new CollectionOrderedConstraint()); } } #endregion #region Member /// <summary> /// Returns a new <see cref="SomeItemsConstraint"/> checking for the /// presence of a particular object in the collection. /// </summary> public SomeItemsConstraint Member(object expected) { return (SomeItemsConstraint)this.Append(new SomeItemsConstraint(new EqualConstraint(expected))); } #endregion #region Contains /// <summary> /// Returns a new <see cref="SomeItemsConstraint"/> checking for the /// presence of a particular object in the collection. /// </summary> public SomeItemsConstraint Contains(object expected) { return (SomeItemsConstraint)this.Append(new SomeItemsConstraint(new EqualConstraint(expected))); } /// <summary> /// Returns a new ContainsConstraint. This constraint /// will, in turn, make use of the appropriate second-level /// constraint, depending on the type of the actual argument. /// This overload is only used if the item sought is a string, /// since any other type implies that we are looking for a /// collection member. /// </summary> public ContainsConstraint Contains(string expected) { return (ContainsConstraint)this.Append(new ContainsConstraint(expected)); } /// <summary> /// Returns a new <see cref="SomeItemsConstraint"/> checking for the /// presence of a particular object in the collection. /// </summary> public SomeItemsConstraint Contain(object expected) { return Contains(expected); } /// <summary> /// Returns a new ContainsConstraint. This constraint /// will, in turn, make use of the appropriate second-level /// constraint, depending on the type of the actual argument. /// This overload is only used if the item sought is a string, /// since any other type implies that we are looking for a /// collection member. /// </summary> public ContainsConstraint Contain(string expected) { return Contains(expected); } #endregion #region DictionaryContains /// <summary> /// Returns a new DictionaryContainsKeyConstraint checking for the /// presence of a particular key in the Dictionary key collection. /// </summary> /// <param name="expected">The key to be matched in the Dictionary key collection</param> public DictionaryContainsKeyConstraint ContainKey(object expected) { return (DictionaryContainsKeyConstraint)this.Append(new DictionaryContainsKeyConstraint(expected)); } /// <summary> /// Returns a new DictionaryContainsValueConstraint checking for the /// presence of a particular value in the Dictionary value collection. /// </summary> /// <param name="expected">The value to be matched in the Dictionary value collection</param> public DictionaryContainsValueConstraint ContainValue(object expected) { return (DictionaryContainsValueConstraint)this.Append(new DictionaryContainsValueConstraint(expected)); } #endregion #region StringContaining /// <summary> /// Returns a constraint that succeeds if the actual /// value contains the substring supplied as an argument. /// </summary> [Obsolete("Deprecated, use Contains")] public SubstringConstraint StringContaining(string expected) { return (SubstringConstraint)this.Append(new SubstringConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value contains the substring supplied as an argument. /// </summary> [Obsolete("Deprecated, use Contains")] public SubstringConstraint ContainsSubstring(string expected) { return (SubstringConstraint)this.Append(new SubstringConstraint(expected)); } #endregion #region StartsWith /// <summary> /// Returns a constraint that succeeds if the actual /// value starts with the substring supplied as an argument. /// </summary> public StartsWithConstraint StartWith(string expected) { return (StartsWithConstraint)this.Append(new StartsWithConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value starts with the substring supplied as an argument. /// </summary> public StartsWithConstraint StartsWith(string expected) { return (StartsWithConstraint)this.Append(new StartsWithConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value starts with the substring supplied as an argument. /// </summary> [Obsolete("Deprecated, use Does.StartWith or StartsWith")] public StartsWithConstraint StringStarting(string expected) { return (StartsWithConstraint)this.Append(new StartsWithConstraint(expected)); } #endregion #region EndsWith /// <summary> /// Returns a constraint that succeeds if the actual /// value ends with the substring supplied as an argument. /// </summary> public EndsWithConstraint EndWith(string expected) { return (EndsWithConstraint)this.Append(new EndsWithConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value ends with the substring supplied as an argument. /// </summary> public EndsWithConstraint EndsWith(string expected) { return (EndsWithConstraint)this.Append(new EndsWithConstraint(expected)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value ends with the substring supplied as an argument. /// </summary> [Obsolete("Deprecated, use Does.EndWith or EndsWith")] public EndsWithConstraint StringEnding(string expected) { return (EndsWithConstraint)this.Append(new EndsWithConstraint(expected)); } #endregion #region Matches /// <summary> /// Returns a constraint that succeeds if the actual /// value matches the regular expression supplied as an argument. /// </summary> public RegexConstraint Match(string pattern) { return (RegexConstraint)this.Append(new RegexConstraint(pattern)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value matches the regular expression supplied as an argument. /// </summary> public RegexConstraint Matches(string pattern) { return (RegexConstraint)this.Append(new RegexConstraint(pattern)); } /// <summary> /// Returns a constraint that succeeds if the actual /// value matches the regular expression supplied as an argument. /// </summary> [Obsolete("Deprecated, use Does.Match or Matches")] public RegexConstraint StringMatching(string pattern) { return (RegexConstraint)this.Append(new RegexConstraint(pattern)); } #endregion #region SamePath /// <summary> /// Returns a constraint that tests whether the path provided /// is the same as an expected path after canonicalization. /// </summary> public SamePathConstraint SamePath(string expected) { return (SamePathConstraint)this.Append(new SamePathConstraint(expected)); } #endregion #region SubPath /// <summary> /// Returns a constraint that tests whether the path provided /// is the a subpath of the expected path after canonicalization. /// </summary> public SubPathConstraint SubPathOf(string expected) { return (SubPathConstraint)this.Append(new SubPathConstraint(expected)); } #endregion #region SamePathOrUnder /// <summary> /// Returns a constraint that tests whether the path provided /// is the same path or under an expected path after canonicalization. /// </summary> public SamePathOrUnderConstraint SamePathOrUnder(string expected) { return (SamePathOrUnderConstraint)this.Append(new SamePathOrUnderConstraint(expected)); } #endregion #region InRange /// <summary> /// Returns a constraint that tests whether the actual value falls /// inclusively within a specified range. /// </summary> /// <param name="from">Inclusive beginning of the range.</param> /// <param name="to">Inclusive end of the range.</param> public RangeConstraint InRange(object from, object to) { return (RangeConstraint)this.Append(new RangeConstraint(from, to)); } #endregion #region Exist /// <summary> /// Returns a constraint that succeeds if the value /// is a file or directory and it exists. /// </summary> public Constraint Exist { get { return Append(new FileOrDirectoryExistsConstraint()); } } #endregion #region AnyOf /// <summary> /// Returns a constraint that tests if an item is equal to any of parameters /// </summary> /// <param name="expected">Expected values</param> public AnyOfConstraint AnyOf(params object[] expected) { if (expected == null) { expected = new object[] { null }; } return (AnyOfConstraint)this.Append(new AnyOfConstraint(expected)); } #endregion } }
using J2N.Numerics; using YAF.Lucene.Net.Support; using System; using System.Diagnostics; namespace YAF.Lucene.Net.Util { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// BitSet of fixed length (<see cref="numBits"/>), backed by accessible (<see cref="GetBits()"/>) /// <see cref="T:long[]"/>, accessed with a <see cref="long"/> index. Use it only if you intend to store more /// than 2.1B bits, otherwise you should use <see cref="FixedBitSet"/>. /// <para/> /// NOTE: This was LongBitSet in Lucene /// <para/> /// @lucene.internal /// </summary> #if FEATURE_SERIALIZABLE [Serializable] #endif public sealed class Int64BitSet { private readonly long[] bits; private readonly long numBits; private readonly int numWords; /// <summary> /// If the given <see cref="Int64BitSet"/> is large enough to hold /// <paramref name="numBits"/>, returns the given <paramref name="bits"/>, otherwise returns a new /// <see cref="Int64BitSet"/> which can hold the requested number of bits. /// /// <para/> /// <b>NOTE:</b> the returned bitset reuses the underlying <see cref="T:long[]"/> of /// the given <paramref name="bits"/> if possible. Also, reading <see cref="Length"/> on the /// returned bits may return a value greater than <paramref name="numBits"/>. /// </summary> public static Int64BitSet EnsureCapacity(Int64BitSet bits, long numBits) { if (numBits < bits.Length) { return bits; } else { int numWords = Bits2words(numBits); long[] arr = bits.GetBits(); if (numWords >= arr.Length) { arr = ArrayUtil.Grow(arr, numWords + 1); } return new Int64BitSet(arr, arr.Length << 6); } } /// <summary> /// Returns the number of 64 bit words it would take to hold <paramref name="numBits"/>. </summary> public static int Bits2words(long numBits) { int numLong = (int)((long)((ulong)numBits >> 6)); if ((numBits & 63) != 0) { numLong++; } return numLong; } public Int64BitSet(long numBits) { this.numBits = numBits; bits = new long[Bits2words(numBits)]; numWords = bits.Length; } public Int64BitSet(long[] storedBits, long numBits) { this.numWords = Bits2words(numBits); if (numWords > storedBits.Length) { throw new System.ArgumentException("The given long array is too small to hold " + numBits + " bits"); } this.numBits = numBits; this.bits = storedBits; } /// <summary> /// Returns the number of bits stored in this bitset. </summary> public long Length { get { return numBits; } } /// <summary> /// Expert. </summary> [WritableArray] public long[] GetBits() { return bits; } /// <summary> /// Returns number of set bits. NOTE: this visits every /// long in the backing bits array, and the result is not /// internally cached! /// </summary> public long Cardinality() { return BitUtil.Pop_Array(bits, 0, bits.Length); } public bool Get(long index) { Debug.Assert(index >= 0 && index < numBits, "index=" + index); int i = (int)(index >> 6); // div 64 // signed shift will keep a negative index and force an // array-index-out-of-bounds-exception, removing the need for an explicit check. int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; return (bits[i] & bitmask) != 0; } public void Set(long index) { Debug.Assert(index >= 0 && index < numBits, "index=" + index + " numBits=" + numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; bits[wordNum] |= bitmask; } public bool GetAndSet(long index) { Debug.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; bool val = (bits[wordNum] & bitmask) != 0; bits[wordNum] |= bitmask; return val; } public void Clear(long index) { Debug.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); int bit = (int)(index & 0x03f); long bitmask = 1L << bit; bits[wordNum] &= ~bitmask; } public bool GetAndClear(long index) { Debug.Assert(index >= 0 && index < numBits); int wordNum = (int)(index >> 6); // div 64 int bit = (int)(index & 0x3f); // mod 64 long bitmask = 1L << bit; bool val = (bits[wordNum] & bitmask) != 0; bits[wordNum] &= ~bitmask; return val; } /// <summary> /// Returns the index of the first set bit starting at the <paramref name="index"/> specified. /// -1 is returned if there are no more set bits. /// </summary> public long NextSetBit(long index) { Debug.Assert(index >= 0 && index < numBits); int i = (int)(index >> 6); int subIndex = (int)(index & 0x3f); // index within the word long word = bits[i] >> subIndex; // skip all the bits to the right of index if (word != 0) { return index + word.TrailingZeroCount(); } while (++i < numWords) { word = bits[i]; if (word != 0) { return (i << 6) + word.TrailingZeroCount(); } } return -1; } /// <summary> /// Returns the index of the last set bit before or on the <paramref name="index"/> specified. /// -1 is returned if there are no more set bits. /// </summary> public long PrevSetBit(long index) { Debug.Assert(index >= 0 && index < numBits, "index=" + index + " numBits=" + numBits); int i = (int)(index >> 6); int subIndex = (int)(index & 0x3f); // index within the word long word = (bits[i] << (63 - subIndex)); // skip all the bits to the left of index if (word != 0) { return (i << 6) + subIndex - word.LeadingZeroCount(); // See LUCENE-3197 } while (--i >= 0) { word = bits[i]; if (word != 0) { return (i << 6) + 63 - word.LeadingZeroCount(); } } return -1; } /// <summary> /// this = this OR other </summary> public void Or(Int64BitSet other) { Debug.Assert(other.numWords <= numWords, "numWords=" + numWords + ", other.numWords=" + other.numWords); int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { bits[pos] |= other.bits[pos]; } } /// <summary> /// this = this XOR other </summary> public void Xor(Int64BitSet other) { Debug.Assert(other.numWords <= numWords, "numWords=" + numWords + ", other.numWords=" + other.numWords); int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { bits[pos] ^= other.bits[pos]; } } /// <summary> /// Returns <c>true</c> if the sets have any elements in common </summary> public bool Intersects(Int64BitSet other) { int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { if ((bits[pos] & other.bits[pos]) != 0) { return true; } } return false; } /// <summary> /// this = this AND other </summary> public void And(Int64BitSet other) { int pos = Math.Min(numWords, other.numWords); while (--pos >= 0) { bits[pos] &= other.bits[pos]; } if (numWords > other.numWords) { Arrays.Fill(bits, other.numWords, numWords, 0L); } } /// <summary> /// this = this AND NOT other </summary> public void AndNot(Int64BitSet other) { int pos = Math.Min(numWords, other.bits.Length); while (--pos >= 0) { bits[pos] &= ~other.bits[pos]; } } // NOTE: no .isEmpty() here because that's trappy (ie, // typically isEmpty is low cost, but this one wouldn't // be) /// <summary> /// Flips a range of bits /// </summary> /// <param name="startIndex"> Lower index </param> /// <param name="endIndex"> One-past the last bit to flip </param> public void Flip(long startIndex, long endIndex) { Debug.Assert(startIndex >= 0 && startIndex < numBits); Debug.Assert(endIndex >= 0 && endIndex <= numBits); if (endIndex <= startIndex) { return; } int startWord = (int)(startIndex >> 6); int endWord = (int)((endIndex - 1) >> 6); /* ///* Grrr, java shifting wraps around so -1L>>>64 == -1 /// for that reason, make sure not to use endmask if the bits to flip will /// be zero in the last word (redefine endWord to be the last changed...) /// long startmask = -1L << (startIndex & 0x3f); // example: 11111...111000 /// long endmask = -1L >>> (64-(endIndex & 0x3f)); // example: 00111...111111 /// ** */ long startmask = -1L << (int)startIndex; long endmask = (long)(unchecked(((ulong)-1L)) >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap if (startWord == endWord) { bits[startWord] ^= (startmask & endmask); return; } bits[startWord] ^= startmask; for (int i = startWord + 1; i < endWord; i++) { bits[i] = ~bits[i]; } bits[endWord] ^= endmask; } /// <summary> /// Sets a range of bits /// </summary> /// <param name="startIndex"> Lower index </param> /// <param name="endIndex"> One-past the last bit to set </param> public void Set(long startIndex, long endIndex) { Debug.Assert(startIndex >= 0 && startIndex < numBits); Debug.Assert(endIndex >= 0 && endIndex <= numBits); if (endIndex <= startIndex) { return; } int startWord = (int)(startIndex >> 6); int endWord = (int)((endIndex - 1) >> 6); long startmask = -1L << (int)startIndex; long endmask = (long)(0xffffffffffffffffUL >> (int)-endIndex);//-(int)((uint)1L >> (int)-endIndex); // 64-(endIndex&0x3f) is the same as -endIndex due to wrap if (startWord == endWord) { bits[startWord] |= (startmask & endmask); return; } bits[startWord] |= startmask; Arrays.Fill(bits, startWord + 1, endWord, -1L); bits[endWord] |= endmask; } /// <summary> /// Clears a range of bits. /// </summary> /// <param name="startIndex"> Lower index </param> /// <param name="endIndex"> One-past the last bit to clear </param> public void Clear(long startIndex, long endIndex) { Debug.Assert(startIndex >= 0 && startIndex < numBits); Debug.Assert(endIndex >= 0 && endIndex <= numBits); if (endIndex <= startIndex) { return; } int startWord = (int)(startIndex >> 6); int endWord = (int)((endIndex - 1) >> 6); // Casting long to int discards MSBs, so it is no problem because we are taking mod 64. long startmask = (-1L) << (int)startIndex; // -1 << (startIndex mod 64) long endmask = (-1L) << (int)endIndex; // -1 << (endIndex mod 64) if ((endIndex & 0x3f) == 0) { endmask = 0; } startmask = ~startmask; if (startWord == endWord) { bits[startWord] &= (startmask | endmask); return; } bits[startWord] &= startmask; Arrays.Fill(bits, startWord + 1, endWord, 0L); bits[endWord] &= endmask; } public Int64BitSet Clone() { long[] bits = new long[this.bits.Length]; Array.Copy(this.bits, 0, bits, 0, bits.Length); return new Int64BitSet(bits, numBits); } /// <summary> /// Returns <c>true</c> if both sets have the same bits set </summary> public override bool Equals(object o) { if (this == o) { return true; } if (!(o is Int64BitSet)) { return false; } Int64BitSet other = (Int64BitSet)o; if (numBits != other.Length) { return false; } return Arrays.Equals(bits, other.bits); } public override int GetHashCode() { long h = 0; for (int i = numWords; --i >= 0; ) { h ^= bits[i]; h = (h << 1) | ((long)((ulong)h >> 63)); // rotate left } // fold leftmost bits into right and add a constant to prevent // empty sets from returning 0, which is too common. return (int)((h >> 32) ^ h) + unchecked((int)0x98761234); } } }
using System; using System.Text; namespace Org.BouncyCastle.Math.EC.Abc { /** * Class representing a simple version of a big decimal. A * <code>SimpleBigDecimal</code> is basically a * {@link java.math.IBigInteger BigInteger} with a few digits on the right of * the decimal point. The number of (binary) digits on the right of the decimal * point is called the <code>scale</code> of the <code>SimpleBigDecimal</code>. * Unlike in {@link java.math.BigDecimal BigDecimal}, the scale is not adjusted * automatically, but must be set manually. All <code>SimpleBigDecimal</code>s * taking part in the same arithmetic operation must have equal scale. The * result of a multiplication of two <code>SimpleBigDecimal</code>s returns a * <code>SimpleBigDecimal</code> with double scale. */ internal class SimpleBigDecimal // : Number { // private static final long serialVersionUID = 1L; private readonly IBigInteger bigInt; private readonly int scale; /** * Returns a <code>SimpleBigDecimal</code> representing the same numerical * value as <code>value</code>. * @param value The value of the <code>SimpleBigDecimal</code> to be * created. * @param scale The scale of the <code>SimpleBigDecimal</code> to be * created. * @return The such created <code>SimpleBigDecimal</code>. */ public static SimpleBigDecimal GetInstance(IBigInteger val, int scale) { return new SimpleBigDecimal(val.ShiftLeft(scale), scale); } /** * Constructor for <code>SimpleBigDecimal</code>. The value of the * constructed <code>SimpleBigDecimal</code> Equals <code>bigInt / * 2<sup>scale</sup></code>. * @param bigInt The <code>bigInt</code> value parameter. * @param scale The scale of the constructed <code>SimpleBigDecimal</code>. */ public SimpleBigDecimal(IBigInteger bigInt, int scale) { if (scale < 0) throw new ArgumentException("scale may not be negative"); this.bigInt = bigInt; this.scale = scale; } private SimpleBigDecimal(SimpleBigDecimal limBigDec) { bigInt = limBigDec.bigInt; scale = limBigDec.scale; } private void CheckScale(SimpleBigDecimal b) { if (scale != b.scale) throw new ArgumentException("Only SimpleBigDecimal of same scale allowed in arithmetic operations"); } public SimpleBigDecimal AdjustScale(int newScale) { if (newScale < 0) throw new ArgumentException("scale may not be negative"); if (newScale == scale) return this; return new SimpleBigDecimal(bigInt.ShiftLeft(newScale - scale), newScale); } public SimpleBigDecimal Add(SimpleBigDecimal b) { CheckScale(b); return new SimpleBigDecimal(bigInt.Add(b.bigInt), scale); } public SimpleBigDecimal Add(IBigInteger b) { return new SimpleBigDecimal(bigInt.Add(b.ShiftLeft(scale)), scale); } public SimpleBigDecimal Negate() { return new SimpleBigDecimal(bigInt.Negate(), scale); } public SimpleBigDecimal Subtract(SimpleBigDecimal b) { return Add(b.Negate()); } public SimpleBigDecimal Subtract(IBigInteger b) { return new SimpleBigDecimal(bigInt.Subtract(b.ShiftLeft(scale)), scale); } public SimpleBigDecimal Multiply(SimpleBigDecimal b) { CheckScale(b); return new SimpleBigDecimal(bigInt.Multiply(b.bigInt), scale + scale); } public SimpleBigDecimal Multiply(IBigInteger b) { return new SimpleBigDecimal(bigInt.Multiply(b), scale); } public SimpleBigDecimal Divide(SimpleBigDecimal b) { CheckScale(b); IBigInteger dividend = bigInt.ShiftLeft(scale); return new SimpleBigDecimal(dividend.Divide(b.bigInt), scale); } public SimpleBigDecimal Divide(IBigInteger b) { return new SimpleBigDecimal(bigInt.Divide(b), scale); } public SimpleBigDecimal ShiftLeft(int n) { return new SimpleBigDecimal(bigInt.ShiftLeft(n), scale); } public int CompareTo(SimpleBigDecimal val) { CheckScale(val); return bigInt.CompareTo(val.bigInt); } public int CompareTo(IBigInteger val) { return bigInt.CompareTo(val.ShiftLeft(scale)); } public IBigInteger Floor() { return bigInt.ShiftRight(scale); } public IBigInteger Round() { SimpleBigDecimal oneHalf = new SimpleBigDecimal(BigInteger.One, 1); return Add(oneHalf.AdjustScale(scale)).Floor(); } public int IntValue { get { return Floor().IntValue; } } public long LongValue { get { return Floor().LongValue; } } // public double doubleValue() // { // return new Double(ToString()).doubleValue(); // } // // public float floatValue() // { // return new Float(ToString()).floatValue(); // } public int Scale { get { return scale; } } public override string ToString() { if (scale == 0) return bigInt.ToString(); IBigInteger floorBigInt = Floor(); IBigInteger fract = bigInt.Subtract(floorBigInt.ShiftLeft(scale)); if (bigInt.SignValue < 0) { fract = BigInteger.One.ShiftLeft(scale).Subtract(fract); } if ((floorBigInt.SignValue == -1) && (!(fract.Equals(BigInteger.Zero)))) { floorBigInt = floorBigInt.Add(BigInteger.One); } string leftOfPoint = floorBigInt.ToString(); char[] fractCharArr = new char[scale]; string fractStr = fract.ToString(2); int fractLen = fractStr.Length; int zeroes = scale - fractLen; for (int i = 0; i < zeroes; i++) { fractCharArr[i] = '0'; } for (int j = 0; j < fractLen; j++) { fractCharArr[zeroes + j] = fractStr[j]; } string rightOfPoint = new string(fractCharArr); StringBuilder sb = new StringBuilder(leftOfPoint); sb.Append("."); sb.Append(rightOfPoint); return sb.ToString(); } public override bool Equals( object obj) { if (this == obj) return true; SimpleBigDecimal other = obj as SimpleBigDecimal; if (other == null) return false; return bigInt.Equals(other.bigInt) && scale == other.scale; } public override int GetHashCode() { return bigInt.GetHashCode() ^ scale; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using NUnit.Framework; using WhitespaceAnalyzer = Lucene.Net.Analysis.WhitespaceAnalyzer; using Document = Lucene.Net.Documents.Document; using Field = Lucene.Net.Documents.Field; using Index = Lucene.Net.Documents.Field.Index; using Store = Lucene.Net.Documents.Field.Store; using IndexReader = Lucene.Net.Index.IndexReader; using IndexWriter = Lucene.Net.Index.IndexWriter; using MaxFieldLength = Lucene.Net.Index.IndexWriter.MaxFieldLength; using Directory = Lucene.Net.Store.Directory; using RAMDirectory = Lucene.Net.Store.RAMDirectory; using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; using _TestUtil = Lucene.Net.Util._TestUtil; namespace Lucene.Net.Search { [TestFixture] public class TestDocIdSet:LuceneTestCase { private class AnonymousClassDocIdSet_Renamed_Class:DocIdSet { public AnonymousClassDocIdSet_Renamed_Class(int maxdoc, TestDocIdSet enclosingInstance) { InitBlock(maxdoc, enclosingInstance); } private class AnonymousClassDocIdSetIterator:DocIdSetIterator { public AnonymousClassDocIdSetIterator(int maxdoc, AnonymousClassDocIdSet_Renamed_Class enclosingInstance) { InitBlock(maxdoc, enclosingInstance); } private void InitBlock(int maxdoc, AnonymousClassDocIdSet_Renamed_Class enclosingInstance) { this.maxdoc = maxdoc; this.enclosingInstance = enclosingInstance; } private int maxdoc; private AnonymousClassDocIdSet_Renamed_Class enclosingInstance; public AnonymousClassDocIdSet_Renamed_Class Enclosing_Instance { get { return enclosingInstance; } } internal int docid = - 1; /** @deprecated use {@link #DocID()} instead. */ public override int Doc() { return docid; } public override int DocID() { return docid; } /// <deprecated> use {@link #NextDoc()} instead. /// </deprecated> [Obsolete("use NextDoc() instead. ")] public override bool Next() { return NextDoc() != NO_MORE_DOCS; } //@Override public override int NextDoc() { docid++; return docid < maxdoc?docid:(docid = NO_MORE_DOCS); } /// <deprecated> use {@link #Advance(int)} instead. /// </deprecated> [Obsolete("use Advance(int) instead. ")] public override bool SkipTo(int target) { return Advance(target) != NO_MORE_DOCS; } //@Override public override int Advance(int target) { while (NextDoc() < target) { } return docid; } } private void InitBlock(int maxdoc, TestDocIdSet enclosingInstance) { this.maxdoc = maxdoc; this.enclosingInstance = enclosingInstance; } private int maxdoc; private TestDocIdSet enclosingInstance; public TestDocIdSet Enclosing_Instance { get { return enclosingInstance; } } // @Override public override DocIdSetIterator Iterator() { return new AnonymousClassDocIdSetIterator(maxdoc, this); } } private class AnonymousClassFilteredDocIdSet:FilteredDocIdSet { private void InitBlock(TestDocIdSet enclosingInstance) { this.enclosingInstance = enclosingInstance; } private TestDocIdSet enclosingInstance; public TestDocIdSet Enclosing_Instance { get { return enclosingInstance; } } internal AnonymousClassFilteredDocIdSet(TestDocIdSet enclosingInstance, Lucene.Net.Search.DocIdSet Param1):base(Param1) { InitBlock(enclosingInstance); } // @Override public /*protected internal*/ override bool Match(int docid) { return docid % 2 == 0; //validate only even docids } } [Serializable] private class AnonymousClassFilter:Filter { public AnonymousClassFilter(TestDocIdSet enclosingInstance) { InitBlock(enclosingInstance); } private void InitBlock(TestDocIdSet enclosingInstance) { this.enclosingInstance = enclosingInstance; } private TestDocIdSet enclosingInstance; public TestDocIdSet Enclosing_Instance { get { return enclosingInstance; } } public override DocIdSet GetDocIdSet(IndexReader reader) { return null; } } [Test] public virtual void TestFilteredDocIdSet() { int maxdoc = 10; DocIdSet innerSet = new AnonymousClassDocIdSet_Renamed_Class(maxdoc, this); DocIdSet filteredSet = new AnonymousClassFilteredDocIdSet(this, innerSet); DocIdSetIterator iter = filteredSet.Iterator(); System.Collections.ArrayList list = new System.Collections.ArrayList(); int doc = iter.Advance(3); if (doc != DocIdSetIterator.NO_MORE_DOCS) { list.Add((System.Int32) doc); while ((doc = iter.NextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { list.Add((System.Int32) doc); } } int[] docs = new int[list.Count]; int c = 0; System.Collections.IEnumerator intIter = list.GetEnumerator(); while (intIter.MoveNext()) { docs[c++] = ((System.Int32) intIter.Current); } int[] answer = new int[]{4, 6, 8}; bool same = SupportClass.CollectionsHelper.Equals(answer, docs); if (!same) { System.Console.Out.WriteLine("answer: " + _TestUtil.ArrayToString(answer)); System.Console.Out.WriteLine("gotten: " + _TestUtil.ArrayToString(docs)); Assert.Fail(); } } [Test] public virtual void TestNullDocIdSet() { // Tests that if a Filter produces a null DocIdSet, which is given to // IndexSearcher, everything works fine. This came up in LUCENE-1754. Directory dir = new RAMDirectory(); IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), MaxFieldLength.UNLIMITED); Document doc = new Document(); doc.Add(new Field("c", "val", Field.Store.NO, Field.Index.NOT_ANALYZED_NO_NORMS)); writer.AddDocument(doc); writer.Close(); // First verify the document is searchable. IndexSearcher searcher = new IndexSearcher(dir, true); Assert.AreEqual(1, searcher.Search(new MatchAllDocsQuery(), 10).TotalHits); // Now search w/ a Filter which returns a null DocIdSet Filter f = new AnonymousClassFilter(this); Assert.AreEqual(0, searcher.Search(new MatchAllDocsQuery(), f, 10).TotalHits); searcher.Close(); } } }
using System; using System.Linq; using System.Text; using System.Drawing; using System.Collections.Generic; using BizHawk.Emulation.Common; using BizHawk.Emulation.Common.IEmulatorExtensions; using BizHawk.Client.Common; using BizHawk.Client.Common.InputAdapterExtensions; using BizHawk.Bizware.BizwareGL; namespace BizHawk.Client.EmuHawk { /// <summary> /// This is an old abstracted rendering class that the OSD system is using to get its work done. /// We should probably just use a GuiRenderer (it was designed to do that) although wrapping it with /// more information for OSDRendering could be helpful I suppose /// </summary> public interface IBlitter { IBlitterFont GetFontType(string fontType); void DrawString(string s, IBlitterFont font, Color color, float x, float y); SizeF MeasureString(string s, IBlitterFont font); Rectangle ClipBounds { get; set; } } class UIMessage { public string Message; public DateTime ExpireAt; } class UIDisplay { public string Message; public int X; public int Y; public int Anchor; public Color ForeColor; public Color BackGround; } public class OSDManager { public string FPS { get; set; } public IBlitterFont MessageFont; public void Dispose() { } public void Begin(IBlitter blitter) { MessageFont = blitter.GetFontType(nameof(MessageFont)); } public Color FixedMessagesColor { get { return Color.FromArgb(Global.Config.MessagesColor); } } public Color FixedAlertMessageColor { get { return Color.FromArgb(Global.Config.AlertMessageColor); } } public OSDManager() { } private float GetX(IBlitter g, int x, int anchor, string message) { var size = g.MeasureString(message, MessageFont); switch (anchor) { default: case 0: //Top Left case 2: //Bottom Left return x; case 1: //Top Right case 3: //Bottom Right return g.ClipBounds.Width - x - size.Width; } } private float GetY(IBlitter g, int y, int anchor, string message) { var size = g.MeasureString(message, MessageFont); switch (anchor) { default: case 0: //Top Left case 1: //Top Right return y; case 2: //Bottom Left case 3: //Bottom Right return g.ClipBounds.Height - y - size.Height; } } private string MakeFrameCounter() { if (Global.MovieSession.Movie.IsFinished) { var sb = new StringBuilder(); sb .Append(Global.Emulator.Frame) .Append('/') .Append(Global.MovieSession.Movie.FrameCount) .Append(" (Finished)"); return sb.ToString(); } if (Global.MovieSession.Movie.IsPlaying) { var sb = new StringBuilder(); sb .Append(Global.Emulator.Frame) .Append('/') .Append(Global.MovieSession.Movie.FrameCount); return sb.ToString(); } if (Global.MovieSession.Movie.IsRecording) { return Global.Emulator.Frame.ToString(); } return Global.Emulator.Frame.ToString(); } private List<UIMessage> messages = new List<UIMessage>(5); private List<UIDisplay> GUITextList = new List<UIDisplay>(); public void AddMessage(string message) { //RTC_HIJACK : Disable OSD Messages (Add this block) if ((bool?)(RTCV.NetCore.AllSpec.CorruptCoreSpec?[RTCV.CorruptCore.RTCSPEC.CORE_EMULATOROSDDISABLED.ToString()]) ?? false) return; //-------------------------------- messages.Add(new UIMessage { Message = message, ExpireAt = DateTime.Now + TimeSpan.FromSeconds(2) }); } public void AddGUIText(string message, int x, int y, Color backGround, Color foreColor, int anchor) { GUITextList.Add(new UIDisplay { Message = message, X = x, Y = y, BackGround = backGround, ForeColor = foreColor, Anchor = anchor }); } public void ClearGUIText() { GUITextList.Clear(); } public void DrawMessages(IBlitter g) { if (!Global.Config.DisplayMessages) { return; } messages.RemoveAll(m => DateTime.Now > m.ExpireAt); int line = 1; if (Global.Config.StackOSDMessages) { for (int i = messages.Count - 1; i >= 0; i--, line++) { float x = GetX(g, Global.Config.DispMessagex, Global.Config.DispMessageanchor, messages[i].Message); float y = GetY(g, Global.Config.DispMessagey, Global.Config.DispMessageanchor, messages[i].Message); if (Global.Config.DispMessageanchor < 2) { y += ((line - 1) * 18); } else { y -= ((line - 1) * 18); } //g.DrawString(messages[i].Message, MessageFont, Color.Black, x + 2, y + 2); g.DrawString(messages[i].Message, MessageFont, FixedMessagesColor, x, y); } } else { if (messages.Any()) { int i = messages.Count - 1; float x = GetX(g, Global.Config.DispMessagex, Global.Config.DispMessageanchor, messages[i].Message); float y = GetY(g, Global.Config.DispMessagey, Global.Config.DispMessageanchor, messages[i].Message); if (Global.Config.DispMessageanchor < 2) { y += ((line - 1) * 18); } else { y -= ((line - 1) * 18); } //g.DrawString(messages[i].Message, MessageFont, Color.Black, x + 2, y + 2); g.DrawString(messages[i].Message, MessageFont, FixedMessagesColor, x, y); } } foreach (var text in GUITextList) { try { float posx = GetX(g, text.X, text.Anchor, text.Message); float posy = GetY(g, text.Y, text.Anchor, text.Message); //g.DrawString(text.Message, MessageFont, text.BackGround, posx + 2, posy + 2); g.DrawString(text.Message, MessageFont, text.ForeColor, posx, posy); } catch (Exception) { return; } } } public string InputStrMovie() { var lg = Global.MovieSession.LogGeneratorInstance(); lg.SetSource(Global.MovieSession.MovieControllerAdapter); return lg.GenerateInputDisplay(); } public string InputStrImmediate() { var lg = Global.MovieSession.LogGeneratorInstance(); lg.SetSource(Global.AutofireStickyXORAdapter); return lg.GenerateInputDisplay(); } public string InputPrevious() { if (Global.MovieSession.Movie.IsActive && !Global.MovieSession.Movie.IsFinished) { var lg = Global.MovieSession.LogGeneratorInstance(); var state = Global.MovieSession.Movie.GetInputState(Global.Emulator.Frame - 1); if (state != null) { lg.SetSource(state); return lg.GenerateInputDisplay(); } } return ""; } public string InputStrOrAll() { var m = (Global.MovieSession.Movie.IsActive && !Global.MovieSession.Movie.IsFinished && Global.Emulator.Frame > 0) ? Global.MovieSession.Movie.GetInputState(Global.Emulator.Frame - 1) : Global.MovieSession.MovieControllerInstance(); var lg = Global.MovieSession.LogGeneratorInstance(); lg.SetSource(Global.AutofireStickyXORAdapter.Or(m)); return lg.GenerateInputDisplay(); } public string InputStrSticky() { var stickyOr = new StickyOrAdapter { Source = Global.StickyXORAdapter, SourceStickyOr = Global.AutofireStickyXORAdapter }; return MakeStringFor(stickyOr); } private string MakeStringFor(IController controller) { var lg = Global.MovieSession.LogGeneratorInstance(); lg.SetSource(controller); return lg.GenerateInputDisplay(); } public string MakeIntersectImmediatePrevious() { if (Global.MovieSession.Movie.IsActive) { var m = Global.MovieSession.Movie.IsActive && !Global.MovieSession.Movie.IsFinished ? Global.MovieSession.Movie.GetInputState(Global.Emulator.Frame - 1) : Global.MovieSession.MovieControllerInstance(); var lg = Global.MovieSession.LogGeneratorInstance(); lg.SetSource(Global.AutofireStickyXORAdapter.And(m)); return lg.GenerateInputDisplay(); } return ""; } public string MakeRerecordCount() { if (Global.MovieSession.Movie.IsActive) { return Global.MovieSession.Movie.Rerecords.ToString(); } return ""; } private void DrawOsdMessage(IBlitter g, string message, Color color, float x, float y) { //g.DrawString(message, MessageFont, Color.Black, x + 1, y + 1); g.DrawString(message, MessageFont, color, x, y); } /// <summary> /// Display all screen info objects like fps, frame counter, lag counter, and input display /// </summary> public void DrawScreenInfo(IBlitter g) { if (Global.Config.DisplayFrameCounter && !Global.Game.IsNullInstance) { string message = MakeFrameCounter(); float x = GetX(g, Global.Config.DispFrameCx, Global.Config.DispFrameanchor, message); float y = GetY(g, Global.Config.DispFrameCy, Global.Config.DispFrameanchor, message); DrawOsdMessage(g, message, Color.FromArgb(Global.Config.MessagesColor), x, y); if (GlobalWin.MainForm.IsLagFrame) { DrawOsdMessage(g, Global.Emulator.Frame.ToString(), FixedAlertMessageColor, x, y); } } if (Global.Config.DisplayInput && !Global.Game.IsNullInstance) { if ((Global.MovieSession.Movie.IsPlaying && !Global.MovieSession.Movie.IsFinished) || (Global.MovieSession.Movie.IsFinished && Global.Emulator.Frame == Global.MovieSession.Movie.InputLogLength)) // Account for the last frame of the movie, the movie state is immediately "Finished" here but we still want to show the input { var input = InputStrMovie(); var x = GetX(g, Global.Config.DispInpx, Global.Config.DispInpanchor, input); var y = GetY(g, Global.Config.DispInpy, Global.Config.DispInpanchor, input); Color c = Color.FromArgb(Global.Config.MovieInput); //g.DrawString(input, MessageFont, Color.Black, x + 1, y + 1); g.DrawString(input, MessageFont, c, x, y); } else // TODO: message config -- allow setting of "previous", "mixed", and "auto" { var previousColor = Color.Orange; Color immediateColor = Color.FromArgb(Global.Config.MessagesColor); var autoColor = Color.Pink; var changedColor = Color.PeachPuff; //we need some kind of string for calculating position when right-anchoring, of something like that var bgStr = InputStrOrAll(); var x = GetX(g, Global.Config.DispInpx, Global.Config.DispInpanchor, bgStr); var y = GetY(g, Global.Config.DispInpy, Global.Config.DispInpanchor, bgStr); //now, we're going to render these repeatedly, with higher-priority things overriding //first display previous frame's input. //note: that's only available in case we're working on a movie var previousStr = InputPrevious(); g.DrawString(previousStr, MessageFont, previousColor, x, y); //next, draw the immediate input. //that is, whatever's being held down interactively right this moment even if the game is paused //this includes things held down due to autohold or autofire //I know, this is all really confusing var immediate = InputStrImmediate(); g.DrawString(immediate, MessageFont, immediateColor, x, y); //next draw anything that's pressed because it's sticky. //this applies to autofire and autohold both. somehow. I dont understand it. //basically we're tinting whatever's pressed because it's sticky specially //in order to achieve this we want to avoid drawing anything pink that isnt actually held down right now //so we make an AND adapter and combine it using immediate & sticky var autoString = MakeStringFor(Global.StickyXORAdapter.Source.Xor(Global.AutofireStickyXORAdapter).And(Global.AutofireStickyXORAdapter)); g.DrawString(autoString, MessageFont, autoColor, x, y); //recolor everything that's changed from the previous input var immediateOverlay = MakeIntersectImmediatePrevious(); g.DrawString(immediateOverlay, MessageFont, changedColor, x, y); } } if (Global.MovieSession.MultiTrack.IsActive) { float x = GetX(g, Global.Config.DispMultix, Global.Config.DispMultianchor, Global.MovieSession.MultiTrack.Status); float y = GetY(g, Global.Config.DispMultiy, Global.Config.DispMultianchor, Global.MovieSession.MultiTrack.Status); DrawOsdMessage(g, Global.MovieSession.MultiTrack.Status, FixedMessagesColor, x, y); } if (Global.Config.DisplayFPS && FPS != null) { float x = GetX(g, Global.Config.DispFPSx, Global.Config.DispFPSanchor, FPS); float y = GetY(g, Global.Config.DispFPSy, Global.Config.DispFPSanchor, FPS); DrawOsdMessage(g, FPS, FixedMessagesColor, x, y); } if (Global.Config.DisplayLagCounter && Global.Emulator.CanPollInput()) { var counter = Global.Emulator.AsInputPollable().LagCount.ToString(); var x = GetX(g, Global.Config.DispLagx, Global.Config.DispLaganchor, counter); var y = GetY(g, Global.Config.DispLagy, Global.Config.DispLaganchor, counter); DrawOsdMessage(g, counter, FixedAlertMessageColor, x, y); } if (Global.Config.DisplayRerecordCount) { string rerec = MakeRerecordCount(); float x = GetX(g, Global.Config.DispRecx, Global.Config.DispRecanchor, rerec); float y = GetY(g, Global.Config.DispRecy, Global.Config.DispRecanchor, rerec); DrawOsdMessage(g, rerec, FixedMessagesColor, x, y); } if (Global.ClientControls["Autohold"] || Global.ClientControls["Autofire"]) { var disp = new StringBuilder("Held: "); foreach (string sticky in Global.StickyXORAdapter.CurrentStickies) { disp.Append(sticky).Append(' '); } foreach (string autoSticky in Global.AutofireStickyXORAdapter.CurrentStickies) { disp .Append("Auto-") .Append(autoSticky) .Append(' '); } var message = disp.ToString(); g.DrawString( message, MessageFont, Color.White, GetX(g, Global.Config.DispAutoholdx, Global.Config.DispAutoholdanchor, message), GetY(g, Global.Config.DispAutoholdy, Global.Config.DispAutoholdanchor, message)); } if (Global.MovieSession.Movie.IsActive && Global.Config.DisplaySubtitles) { var subList = Global.MovieSession.Movie.Subtitles.GetSubtitles(Global.Emulator.Frame); foreach (var sub in subList) { DrawOsdMessage(g, sub.Message, Color.FromArgb((int)sub.Color), sub.X, sub.Y); } } } } }
/* Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT License. See License.txt in the project root for license information. */ using System; using System.Collections.Generic; using System.Globalization; using System.Linq; using System.Net; using System.Text; using System.Text.RegularExpressions; using System.Threading.Tasks; using System.Web; using System.Web.Routing; using Adxstudio.Xrm.Cms; using Adxstudio.Xrm.Notes; using Adxstudio.Xrm.Services.Query; using Adxstudio.Xrm.Text; using Adxstudio.Xrm.Web.Handlers; using Microsoft.Xrm.Portal.Configuration; using Microsoft.Xrm.Portal.Web.Routing; using Microsoft.Xrm.Sdk; using Microsoft.Xrm.Sdk.Messages; using Microsoft.Xrm.Sdk.Query; namespace Adxstudio.Xrm.Activity { public class ActivityMimeAttachmentDataAdapter { private readonly IDataAdapterDependencies _dependencies; public ActivityMimeAttachmentDataAdapter(IDataAdapterDependencies dependencies) { _dependencies = dependencies; } public IEnumerable<IAttachment> GetAttachments(Guid regardingId) { return RetrieveAttachments(regardingId); } public void DownloadAttachment(HttpContextBase context, Entity entity, Entity webfile = null) { var attachment = GetAttachment(entity); DownloadFromCRM(context, attachment, webfile); } private IAttachment GetAttachment(Entity entity) { return new Attachment(() => GetAttachmentFile(entity, entity.GetAttributeValue<EntityReference>("attachmentid").Id)); } private void DownloadFromCRM(HttpContextBase context, IAttachment attachment, Entity webfile) { if (attachment == null) { context.Response.StatusCode = (int)HttpStatusCode.NotFound; return; } var crmFile = attachment as Attachment; if (crmFile == null || crmFile.AttachmentBody == null || crmFile.AttachmentBody.Length == 0) { context.Response.StatusCode = (int)HttpStatusCode.NoContent; return; } var data = attachment.AttachmentBody; var defaultCacheability = context.User.Identity.IsAuthenticated ? HttpCacheability.Private : HttpCacheability.Public; SetResponseParameters(context.Response, defaultCacheability, attachment.Entity, webfile, data); Utility.Write(context.Response, data); } private IAttachment GetAttachmentFile(Entity activityMimeAttachment, Guid id) { ulong fileSize; if (!ulong.TryParse(activityMimeAttachment.GetAttributeValue<int>("filesize").ToString(), out fileSize)) { fileSize = 0; } FileSize attachmentSize = new FileSize(fileSize); Entity attachment = null; if (activityMimeAttachment.Attributes.ContainsKey("attachmentid")) { attachment = new Entity("activitymimeattachment", activityMimeAttachment.GetAttributeValue<EntityReference>("attachmentid").Id); } return new Attachment { AttachmentContentType = activityMimeAttachment.GetAttributeValue<string>("mimetype"), AttachmentFileName = activityMimeAttachment.GetAttributeValue<string>("filename"), AttachmentIsImage = (new List<string> { "image/jpeg", "image/gif", "image/png" }).Contains( activityMimeAttachment.GetAttributeValue<string>("mimetype")), AttachmentSize = attachmentSize, AttachmentSizeDisplay = attachmentSize.ToString(), AttachmentUrl = attachment == null ? string.Empty : attachment.GetFileAttachmentUrl(_dependencies.GetWebsite()), AttachmentBody = GetAttachmentBody(activityMimeAttachment, activityMimeAttachment.Id), Entity = activityMimeAttachment }; } private byte[] GetAttachmentBody(Entity attachment, Guid id) { if (!attachment.Attributes.ContainsKey("body")) { return null; } //Get the string representation of the attachment body var body = attachment.GetAttributeValue<string>("body"); //Encode into a byte array and return return Convert.FromBase64String(body); } private static QueryExpression BuildActivityMimeAttachmentsQuery(Guid regardingId) { // Query the activitymimeattachment for all related attachments var query = new QueryExpression("activitymimeattachment"); query.ColumnSet.AddColumns("filename", "filesize", "mimetype", "objectid", "attachmentid"); query.Criteria.AddCondition("objectid", ConditionOperator.Equal, regardingId.ToString()); return query; } private IEnumerable<IAttachment> RetrieveAttachments(Guid regardingId) { QueryExpression query = BuildActivityMimeAttachmentsQuery(regardingId); // Execute the query var serviceContext = _dependencies.GetServiceContext(); var retrieveMultipleResponse = (RetrieveMultipleResponse)serviceContext.Execute(new RetrieveMultipleRequest { Query = query }); // Project the response into Attachment object var attachmentCollection = retrieveMultipleResponse.EntityCollection.Entities.Select(attachment => GetAttachmentFile(attachment, attachment.Id)); return attachmentCollection; } private static void SetResponseParameters(HttpResponseBase response, HttpCacheability defaultCacheability, Entity attachment, Entity webfile, ICollection<byte> data) { response.StatusCode = (int)HttpStatusCode.OK; response.ContentType = attachment.GetAttributeValue<string>("mimetype"); var contentDispositionText = "inline"; if (webfile != null) { var contentDispositionOptionSetValue = webfile.GetAttributeValue<OptionSetValue>("adx_contentdisposition"); if (contentDispositionOptionSetValue != null) { switch (contentDispositionOptionSetValue.Value) { case 756150000: // inline contentDispositionText = "inline"; break; case 756150001: // attachment contentDispositionText = "attachment"; break; default: contentDispositionText = "inline"; break; } } } if (string.Equals(response.ContentType, "text/html", StringComparison.OrdinalIgnoreCase) || string.Equals(response.ContentType, "application/octet-stream", StringComparison.OrdinalIgnoreCase)) { contentDispositionText = "attachment"; } var contentDisposition = new StringBuilder(contentDispositionText); AppendFilenameToContentDisposition(attachment, contentDisposition); response.AppendHeader("Content-Disposition", contentDisposition.ToString()); response.AppendHeader("Content-Length", data.Count.ToString(CultureInfo.InvariantCulture)); var section = PortalCrmConfigurationManager.GetPortalCrmSection(); var policy = section.CachePolicy.Annotation; Utility.SetResponseCachePolicy(policy, response, defaultCacheability); } private static void AppendFilenameToContentDisposition(Entity attachment, StringBuilder contentDisposition) { var filename = attachment.GetAttributeValue<string>("filename"); if (string.IsNullOrEmpty(filename)) { return; } // Escape any quotes in the filename. (There should rarely if ever be any, but still.) var escaped = filename.Replace(@"""", @"\"""); // Quote the filename parameter value. contentDisposition.AppendFormat(@";filename=""{0}""", escaped); } } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. /*============================================================ ** ** ** ** Purpose: Provides some basic access to some environment ** functionality. ** ** ============================================================*/ namespace System { using System.Buffers; using System.IO; using System.Security; using System.Resources; using System.Globalization; using System.Collections; using System.Collections.Generic; using System.Text; using System.Configuration.Assemblies; using System.Runtime.InteropServices; using System.Reflection; using System.Diagnostics; using Microsoft.Win32; using System.Runtime.CompilerServices; using System.Threading; using System.Runtime.ConstrainedExecution; using System.Runtime.Versioning; public enum EnvironmentVariableTarget { Process = 0, User = 1, Machine = 2, } internal static partial class Environment { // Assume the following constants include the terminating '\0' - use <, not <= // System environment variables are stored in the registry, and have // a size restriction that is separate from both normal environment // variables and registry value name lengths, according to MSDN. // MSDN doesn't detail whether the name is limited to 1024, or whether // that includes the contents of the environment variable. private const int MaxSystemEnvVariableLength = 1024; private const int MaxUserEnvVariableLength = 255; private const int MaxMachineNameLength = 256; // Looks up the resource string value for key. // // if you change this method's signature then you must change the code that calls it // in excep.cpp and probably you will have to visit mscorlib.h to add the new signature // as well as metasig.h to create the new signature type internal static String GetResourceStringLocal(String key) { return SR.GetResourceString(key); } /*==================================TickCount=================================== **Action: Gets the number of ticks since the system was started. **Returns: The number of ticks since the system was started. **Arguments: None **Exceptions: None ==============================================================================*/ public static extern int TickCount { [MethodImplAttribute(MethodImplOptions.InternalCall)] get; } // Terminates this process with the given exit code. [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] internal static extern void _Exit(int exitCode); public static void Exit(int exitCode) { _Exit(exitCode); } public static extern int ExitCode { [MethodImplAttribute(MethodImplOptions.InternalCall)] get; [MethodImplAttribute(MethodImplOptions.InternalCall)] set; } // Note: The CLR's Watson bucketization code looks at the caller of the FCALL method // to assign blame for crashes. Don't mess with this, such as by making it call // another managed helper method, unless you consult with some CLR Watson experts. [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern void FailFast(String message); // This overload of FailFast will allow you to specify the exception object // whose bucket details *could* be used when undergoing the failfast process. // To be specific: // // 1) When invoked from within a managed EH clause (fault/finally/catch), // if the exception object is preallocated, the runtime will try to find its buckets // and use them. If the exception object is not preallocated, it will use the bucket // details contained in the object (if any). // // 2) When invoked from outside the managed EH clauses (fault/finally/catch), // if the exception object is preallocated, the runtime will use the callsite's // IP for bucketing. If the exception object is not preallocated, it will use the bucket // details contained in the object (if any). [MethodImplAttribute(MethodImplOptions.InternalCall)] public static extern void FailFast(String message, Exception exception); #if FEATURE_WIN32_REGISTRY // This is only used by RegistryKey on Windows. public static String ExpandEnvironmentVariables(String name) { if (name == null) throw new ArgumentNullException(nameof(name)); if (name.Length == 0) { return name; } int currentSize = 100; StringBuilder blob = new StringBuilder(currentSize); // A somewhat reasonable default size int size; blob.Length = 0; size = Win32Native.ExpandEnvironmentStrings(name, blob, currentSize); if (size == 0) Marshal.ThrowExceptionForHR(Marshal.GetHRForLastWin32Error()); while (size > currentSize) { currentSize = size; blob.Capacity = currentSize; blob.Length = 0; size = Win32Native.ExpandEnvironmentStrings(name, blob, currentSize); if (size == 0) Marshal.ThrowExceptionForHR(Marshal.GetHRForLastWin32Error()); } return blob.ToString(); } #endif // FEATURE_WIN32_REGISTRY [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] private static extern Int32 GetProcessorCount(); public static int ProcessorCount { get { return GetProcessorCount(); } } /*==============================GetCommandLineArgs============================== **Action: Gets the command line and splits it appropriately to deal with whitespace, ** quotes, and escape characters. **Returns: A string array containing your command line arguments. **Arguments: None **Exceptions: None. ==============================================================================*/ public static String[] GetCommandLineArgs() { /* * There are multiple entry points to a hosted app. * The host could use ::ExecuteAssembly() or ::CreateDelegate option * ::ExecuteAssembly() -> In this particular case, the runtime invokes the main method based on the arguments set by the host, and we return those arguments * * ::CreateDelegate() -> In this particular case, the host is asked to create a * delegate based on the appDomain, assembly and methodDesc passed to it. * which the caller uses to invoke the method. In this particular case we do not have * any information on what arguments would be passed to the delegate. * So our best bet is to simply use the commandLine that was used to invoke the process. * in case it is present. */ if (s_CommandLineArgs != null) return (string[])s_CommandLineArgs.Clone(); return GetCommandLineArgsNative(); } [MethodImplAttribute(MethodImplOptions.InternalCall)] private static extern String[] GetCommandLineArgsNative(); private static string[] s_CommandLineArgs = null; private static void SetCommandLineArgs(string[] cmdLineArgs) { s_CommandLineArgs = cmdLineArgs; } private unsafe static char[] GetEnvironmentCharArray() { char[] block = null; RuntimeHelpers.PrepareConstrainedRegions(); char* pStrings = null; try { pStrings = Win32Native.GetEnvironmentStrings(); if (pStrings == null) { throw new OutOfMemoryException(); } // Format for GetEnvironmentStrings is: // [=HiddenVar=value\0]* [Variable=value\0]* \0 // See the description of Environment Blocks in MSDN's // CreateProcess page (null-terminated array of null-terminated strings). // Search for terminating \0\0 (two unicode \0's). char* p = pStrings; while (!(*p == '\0' && *(p + 1) == '\0')) p++; int len = (int)(p - pStrings + 1); block = new char[len]; fixed (char* pBlock = block) string.wstrcpy(pBlock, pStrings, len); } finally { if (pStrings != null) Win32Native.FreeEnvironmentStrings(pStrings); } return block; } /*===================================NewLine==================================== **Action: A property which returns the appropriate newline string for the given ** platform. **Returns: \r\n on Win32. **Arguments: None. **Exceptions: None. ==============================================================================*/ public static String NewLine { get { #if PLATFORM_WINDOWS return "\r\n"; #else return "\n"; #endif // PLATFORM_WINDOWS } } /*===================================Version==================================== **Action: Returns the COM+ version struct, describing the build number. **Returns: **Arguments: **Exceptions: ==============================================================================*/ public static Version Version { get { // Previously this represented the File version of mscorlib.dll. Many other libraries in the framework and outside took dependencies on the first three parts of this version // remaining constant throughout 4.x. From 4.0 to 4.5.2 this was fine since the file version only incremented the last part.Starting with 4.6 we switched to a file versioning // scheme that matched the product version. In order to preserve compatibility with existing libraries, this needs to be hard-coded. return new Version(4, 0, 30319, 42000); } } #if !FEATURE_PAL private static Lazy<bool> s_IsWindows8OrAbove = new Lazy<bool>(() => { ulong conditionMask = Win32Native.VerSetConditionMask(0, Win32Native.VER_MAJORVERSION, Win32Native.VER_GREATER_EQUAL); conditionMask = Win32Native.VerSetConditionMask(conditionMask, Win32Native.VER_MINORVERSION, Win32Native.VER_GREATER_EQUAL); conditionMask = Win32Native.VerSetConditionMask(conditionMask, Win32Native.VER_SERVICEPACKMAJOR, Win32Native.VER_GREATER_EQUAL); conditionMask = Win32Native.VerSetConditionMask(conditionMask, Win32Native.VER_SERVICEPACKMINOR, Win32Native.VER_GREATER_EQUAL); // Windows 8 version is 6.2 var version = new Win32Native.OSVERSIONINFOEX { MajorVersion = 6, MinorVersion = 2, ServicePackMajor = 0, ServicePackMinor = 0 }; return Win32Native.VerifyVersionInfoW(version, Win32Native.VER_MAJORVERSION | Win32Native.VER_MINORVERSION | Win32Native.VER_SERVICEPACKMAJOR | Win32Native.VER_SERVICEPACKMINOR, conditionMask); }); internal static bool IsWindows8OrAbove => s_IsWindows8OrAbove.Value; #endif #if FEATURE_COMINTEROP // Does the current version of Windows have Windows Runtime suppport? private static Lazy<bool> s_IsWinRTSupported = new Lazy<bool>(() => { return WinRTSupported(); }); internal static bool IsWinRTSupported => s_IsWinRTSupported.Value; [DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)] [return: MarshalAs(UnmanagedType.Bool)] private static extern bool WinRTSupported(); #endif // FEATURE_COMINTEROP /*==================================StackTrace================================== **Action: **Returns: **Arguments: **Exceptions: ==============================================================================*/ public static String StackTrace { [MethodImpl(MethodImplOptions.NoInlining)] // Prevent inlining from affecting where the stacktrace starts get { return Internal.Runtime.Augments.EnvironmentAugments.StackTrace; } } internal static String GetStackTrace(Exception e, bool needFileInfo) { // Note: Setting needFileInfo to true will start up COM and set our // apartment state. Try to not call this when passing "true" // before the EE's ExecuteMainMethod has had a chance to set up the // apartment state. -- StackTrace st; if (e == null) st = new StackTrace(needFileInfo); else st = new StackTrace(e, needFileInfo); // Do no include a trailing newline for backwards compatibility return st.ToString(System.Diagnostics.StackTrace.TraceFormat.Normal); } public static extern bool HasShutdownStarted { [MethodImplAttribute(MethodImplOptions.InternalCall)] get; } internal static bool UserInteractive { get { return true; } } public static int CurrentManagedThreadId { get { return Thread.CurrentThread.ManagedThreadId; } } internal static extern int CurrentProcessorNumber { [MethodImplAttribute(MethodImplOptions.InternalCall)] get; } // The upper bits of t_executionIdCache are the executionId. The lower bits of // the t_executionIdCache are counting down to get it periodically refreshed. // TODO: Consider flushing the executionIdCache on Wait operations or similar // actions that are likely to result in changing the executing core [ThreadStatic] private static int t_executionIdCache; private const int ExecutionIdCacheShift = 16; private const int ExecutionIdCacheCountDownMask = (1 << ExecutionIdCacheShift) - 1; private const int ExecutionIdRefreshRate = 5000; private static int RefreshExecutionId() { int executionId = CurrentProcessorNumber; // On Unix, CurrentProcessorNumber is implemented in terms of sched_getcpu, which // doesn't exist on all platforms. On those it doesn't exist on, GetCurrentProcessorNumber // returns -1. As a fallback in that case and to spread the threads across the buckets // by default, we use the current managed thread ID as a proxy. if (executionId < 0) executionId = Environment.CurrentManagedThreadId; Debug.Assert(ExecutionIdRefreshRate <= ExecutionIdCacheCountDownMask); // Mask with Int32.MaxValue to ensure the execution Id is not negative t_executionIdCache = ((executionId << ExecutionIdCacheShift) & Int32.MaxValue) | ExecutionIdRefreshRate; return executionId; } // Cached processor number used as a hint for which per-core stack to access. It is periodically // refreshed to trail the actual thread core affinity. internal static int CurrentExecutionId { [MethodImpl(MethodImplOptions.AggressiveInlining)] get { int executionIdCache = t_executionIdCache--; if ((executionIdCache & ExecutionIdCacheCountDownMask) == 0) return RefreshExecutionId(); return (executionIdCache >> ExecutionIdCacheShift); } } public static string GetEnvironmentVariable(string variable) { if (variable == null) { throw new ArgumentNullException(nameof(variable)); } // separated from the EnvironmentVariableTarget overload to help with tree shaking in common case return GetEnvironmentVariableCore(variable); } internal static string GetEnvironmentVariable(string variable, EnvironmentVariableTarget target) { if (variable == null) { throw new ArgumentNullException(nameof(variable)); } ValidateTarget(target); return GetEnvironmentVariableCore(variable, target); } public static void SetEnvironmentVariable(string variable, string value) { ValidateVariableAndValue(variable, ref value); // separated from the EnvironmentVariableTarget overload to help with tree shaking in common case SetEnvironmentVariableCore(variable, value); } internal static void SetEnvironmentVariable(string variable, string value, EnvironmentVariableTarget target) { ValidateVariableAndValue(variable, ref value); ValidateTarget(target); SetEnvironmentVariableCore(variable, value, target); } private static void ValidateVariableAndValue(string variable, ref string value) { if (variable == null) { throw new ArgumentNullException(nameof(variable)); } if (variable.Length == 0) { throw new ArgumentException(SR.Argument_StringZeroLength, nameof(variable)); } if (variable[0] == '\0') { throw new ArgumentException(SR.Argument_StringFirstCharIsZero, nameof(variable)); } if (variable.IndexOf('=') != -1) { throw new ArgumentException(SR.Argument_IllegalEnvVarName, nameof(variable)); } if (string.IsNullOrEmpty(value) || value[0] == '\0') { // Explicitly null out value if it's empty value = null; } } private static void ValidateTarget(EnvironmentVariableTarget target) { if (target != EnvironmentVariableTarget.Process && target != EnvironmentVariableTarget.Machine && target != EnvironmentVariableTarget.User) { throw new ArgumentOutOfRangeException(nameof(target), target, SR.Format(SR.Arg_EnumIllegalVal, target)); } } private static string GetEnvironmentVariableCore(string variable) { Span<char> buffer = stackalloc char[128]; // A somewhat reasonable default size return GetEnvironmentVariableCoreHelper(variable, buffer); } private static string GetEnvironmentVariableCoreHelper(string variable, Span<char> buffer) { int requiredSize = Win32Native.GetEnvironmentVariable(variable, buffer); if (requiredSize == 0 && Marshal.GetLastWin32Error() == Win32Native.ERROR_ENVVAR_NOT_FOUND) { return null; } if (requiredSize > buffer.Length) { char[] chars = ArrayPool<char>.Shared.Rent(requiredSize); try { return GetEnvironmentVariableCoreHelper(variable, chars); } finally { ArrayPool<char>.Shared.Return(chars); } } return new string(buffer.Slice(0, requiredSize)); } private static string GetEnvironmentVariableCore(string variable, EnvironmentVariableTarget target) { if (target == EnvironmentVariableTarget.Process) return GetEnvironmentVariableCore(variable); #if FEATURE_WIN32_REGISTRY if (AppDomain.IsAppXModel()) #endif { return null; } #if FEATURE_WIN32_REGISTRY RegistryKey baseKey; string keyName; if (target == EnvironmentVariableTarget.Machine) { baseKey = Registry.LocalMachine; keyName = @"System\CurrentControlSet\Control\Session Manager\Environment"; } else if (target == EnvironmentVariableTarget.User) { baseKey = Registry.CurrentUser; keyName = "Environment"; } else { throw new ArgumentException(SR.Format(SR.Arg_EnumIllegalVal, (int)target)); } using (RegistryKey environmentKey = baseKey.OpenSubKey(keyName, writable: false)) { return environmentKey?.GetValue(variable) as string; } #endif } internal static IEnumerable<KeyValuePair<string, string>> EnumerateEnvironmentVariables() { // Format for GetEnvironmentStrings is: // (=HiddenVar=value\0 | Variable=value\0)* \0 // See the description of Environment Blocks in MSDN's // CreateProcess page (null-terminated array of null-terminated strings). // Note the =HiddenVar's aren't always at the beginning. // Copy strings out, parsing into pairs and inserting into the table. // The first few environment variable entries start with an '='. // The current working directory of every drive (except for those drives // you haven't cd'ed into in your DOS window) are stored in the // environment block (as =C:=pwd) and the program's exit code is // as well (=ExitCode=00000000). char[] block = GetEnvironmentCharArray(); for (int i = 0; i < block.Length; i++) { int startKey = i; // Skip to key. On some old OS, the environment block can be corrupted. // Some will not have '=', so we need to check for '\0'. while (block[i] != '=' && block[i] != '\0') i++; if (block[i] == '\0') continue; // Skip over environment variables starting with '=' if (i - startKey == 0) { while (block[i] != 0) i++; continue; } string key = new string(block, startKey, i - startKey); i++; // skip over '=' int startValue = i; while (block[i] != 0) i++; // Read to end of this entry string value = new string(block, startValue, i - startValue); // skip over 0 handled by for loop's i++ yield return new KeyValuePair<string, string>(key, value); } } internal static IEnumerable<KeyValuePair<string, string>> EnumerateEnvironmentVariables(EnvironmentVariableTarget target) { if (target == EnvironmentVariableTarget.Process) return EnumerateEnvironmentVariables(); return EnumerateEnvironmentVariablesFromRegistry(target); } internal static IEnumerable<KeyValuePair<string, string>> EnumerateEnvironmentVariablesFromRegistry(EnvironmentVariableTarget target) { #if FEATURE_WIN32_REGISTRY if (AppDomain.IsAppXModel()) #endif { // Without registry support we have nothing to return ValidateTarget(target); yield break; } #if FEATURE_WIN32_REGISTRY RegistryKey baseKey; string keyName; if (target == EnvironmentVariableTarget.Machine) { baseKey = Registry.LocalMachine; keyName = @"System\CurrentControlSet\Control\Session Manager\Environment"; } else if (target == EnvironmentVariableTarget.User) { baseKey = Registry.CurrentUser; keyName = @"Environment"; } else { throw new ArgumentOutOfRangeException(nameof(target), target, SR.Format(SR.Arg_EnumIllegalVal, target)); } using (RegistryKey environmentKey = baseKey.OpenSubKey(keyName, writable: false)) { if (environmentKey != null) { foreach (string name in environmentKey.GetValueNames()) { string value = environmentKey.GetValue(name, "").ToString(); yield return new KeyValuePair<string, string>(name, value); } } } #endif // FEATURE_WIN32_REGISTRY } private static void SetEnvironmentVariableCore(string variable, string value) { // explicitly null out value if is the empty string. if (string.IsNullOrEmpty(value) || value[0] == '\0') value = null; if (!Win32Native.SetEnvironmentVariable(variable, value)) { int errorCode = Marshal.GetLastWin32Error(); switch (errorCode) { case Win32Native.ERROR_ENVVAR_NOT_FOUND: // Allow user to try to clear a environment variable return; case Win32Native.ERROR_FILENAME_EXCED_RANGE: // The error message from Win32 is "The filename or extension is too long", // which is not accurate. throw new ArgumentException(SR.Format(SR.Argument_LongEnvVarValue)); case Win32Native.ERROR_NOT_ENOUGH_MEMORY: case Win32Native.ERROR_NO_SYSTEM_RESOURCES: throw new OutOfMemoryException(Interop.Kernel32.GetMessage(errorCode)); default: throw new ArgumentException(Interop.Kernel32.GetMessage(errorCode)); } } } private static void SetEnvironmentVariableCore(string variable, string value, EnvironmentVariableTarget target) { if (target == EnvironmentVariableTarget.Process) { SetEnvironmentVariableCore(variable, value); return; } #if FEATURE_WIN32_REGISTRY if (AppDomain.IsAppXModel()) #endif { // other targets ignored return; } #if FEATURE_WIN32_REGISTRY // explicitly null out value if is the empty string. if (string.IsNullOrEmpty(value) || value[0] == '\0') value = null; RegistryKey baseKey; string keyName; if (target == EnvironmentVariableTarget.Machine) { baseKey = Registry.LocalMachine; keyName = @"System\CurrentControlSet\Control\Session Manager\Environment"; } else if (target == EnvironmentVariableTarget.User) { // User-wide environment variables stored in the registry are limited to 255 chars for the environment variable name. const int MaxUserEnvVariableLength = 255; if (variable.Length >= MaxUserEnvVariableLength) { throw new ArgumentException(SR.Argument_LongEnvVarValue, nameof(variable)); } baseKey = Registry.CurrentUser; keyName = "Environment"; } else { throw new ArgumentException(SR.Format(SR.Arg_EnumIllegalVal, (int)target)); } using (RegistryKey environmentKey = baseKey.OpenSubKey(keyName, writable: true)) { if (environmentKey != null) { if (value == null) { environmentKey.DeleteValue(variable, throwOnMissingValue: false); } else { environmentKey.SetValue(variable, value); } } } // send a WM_SETTINGCHANGE message to all windows IntPtr r = Interop.User32.SendMessageTimeout(new IntPtr(Interop.User32.HWND_BROADCAST), Interop.User32.WM_SETTINGCHANGE, IntPtr.Zero, "Environment", 0, 1000, IntPtr.Zero); Debug.Assert(r != IntPtr.Zero, "SetEnvironmentVariable failed: " + Marshal.GetLastWin32Error()); #endif // FEATURE_WIN32_REGISTRY } } }
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; using BTDB.Buffer; using BTDB.KVDBLayer; using BTDB.StreamLayer; namespace BTDB.ChunkCache { public class DiskChunkCache : IChunkCache, IDisposable { readonly IFileCollection _fileCollection; readonly int _keySize; readonly long _cacheCapacity; readonly int _sizeLimitOfOneValueFile; readonly int _maxValueFileCount; readonly ConcurrentDictionary<ByteStructs.Key20, CacheValue> _cache = new ConcurrentDictionary<ByteStructs.Key20, CacheValue>(new ByteStructs.Key20EqualityComparer()); readonly ConcurrentDictionary<uint, IFileInfo> _fileInfos = new ConcurrentDictionary<uint, IFileInfo>(); uint _cacheValueFileId; IFileCollectionFile _cacheValueFile; AbstractBufferedWriter _cacheValueWriter; long _fileGeneration; Task _compactionTask; CancellationTokenSource _compactionCts; readonly object _startNewValueFileLocker = new object(); internal static readonly byte[] MagicStartOfFile = { (byte)'B', (byte)'T', (byte)'D', (byte)'B', (byte)'C', (byte)'h', (byte)'u', (byte)'n', (byte)'k', (byte)'C', (byte)'a', (byte)'c', (byte)'h', (byte)'e', (byte)'1' }; struct CacheValue { internal uint AccessRate; internal uint FileId; internal uint FileOfs; internal uint ContentLength; } public DiskChunkCache(IFileCollection fileCollection, int keySize, long cacheCapacity) { if (keySize != 20) throw new NotSupportedException("Only keySize of 20 (Usefull for SHA1) is supported for now"); if (cacheCapacity < 1000) throw new ArgumentOutOfRangeException(nameof(cacheCapacity), "Minimum for cache capacity is 1kB"); _fileCollection = fileCollection; _keySize = keySize; _cacheCapacity = cacheCapacity; cacheCapacity = cacheCapacity / 1000 * (980 - keySize); // decrease for size of HashIndex if (cacheCapacity / 8 > int.MaxValue) { _maxValueFileCount = checked((int)(cacheCapacity / int.MaxValue)); _sizeLimitOfOneValueFile = int.MaxValue; } else { _maxValueFileCount = 8; _sizeLimitOfOneValueFile = (int)(cacheCapacity / 8); } try { LoadContent(); } catch { _cache.Clear(); } if (_cache.Count == 0) { foreach (var collectionFile in _fileInfos.Keys) { _fileCollection.GetFile(collectionFile).Remove(); } _fileInfos.Clear(); _fileGeneration = 0; } } void LoadContent() { AbstractBufferedReader reader; foreach (var collectionFile in _fileCollection.Enumerate()) { reader = collectionFile.GetExclusiveReader(); if (!reader.CheckMagic(MagicStartOfFile)) continue; // Don't touch files alien files var fileType = (DiskChunkFileType)reader.ReadUInt8(); IFileInfo fileInfo; switch (fileType) { case DiskChunkFileType.HashIndex: fileInfo = new FileHashIndex(reader); break; case DiskChunkFileType.PureValues: fileInfo = new FilePureValues(reader); break; default: fileInfo = UnknownFile.Instance; break; } if (_fileGeneration < fileInfo.Generation) _fileGeneration = fileInfo.Generation; _fileInfos.TryAdd(collectionFile.Index, fileInfo); } var hashFilePair = _fileInfos.Where(f => f.Value.FileType == DiskChunkFileType.HashIndex).OrderByDescending( f => f.Value.Generation).FirstOrDefault(); if (hashFilePair.Value == null) return; reader = _fileCollection.GetFile(hashFilePair.Key).GetExclusiveReader(); FileHashIndex.SkipHeader(reader); if (((FileHashIndex)hashFilePair.Value).KeySize != _keySize) return; var keyBuf = ByteBuffer.NewSync(new byte[_keySize]); while (true) { var cacheValue = new CacheValue(); cacheValue.FileOfs = reader.ReadVUInt32(); if (cacheValue.FileOfs == 0) break; cacheValue.FileId = reader.ReadVUInt32(); cacheValue.AccessRate = reader.ReadVUInt32(); cacheValue.ContentLength = reader.ReadVUInt32(); reader.ReadBlock(keyBuf); _cache.TryAdd(new ByteStructs.Key20(keyBuf), cacheValue); } } public void Put(ByteBuffer key, ByteBuffer content) { if (key.Length != _keySize) throw new ArgumentException("Key has wrong Length not equal to KeySize"); if (content.Length == 0) throw new ArgumentException("Empty Content cannot be stored"); var k = new ByteStructs.Key20(key); CacheValue cacheValue; if (_cache.TryGetValue(k, out cacheValue)) { return; } cacheValue.AccessRate = 1; again: var writer = _cacheValueWriter; while (writer == null || writer.GetCurrentPosition() + content.Length > _sizeLimitOfOneValueFile) { StartNewValueFile(); writer = _cacheValueWriter; } lock (writer) { if (writer != _cacheValueWriter) goto again; cacheValue.FileId = _cacheValueFileId; cacheValue.FileOfs = (uint)_cacheValueWriter.GetCurrentPosition(); _cacheValueWriter.WriteBlock(content); } cacheValue.ContentLength = (uint)content.Length; _cache.TryAdd(k, cacheValue); } void StartNewValueFile() { lock (_startNewValueFileLocker) { QuickFinishCompaction(); var fileInfo = new FilePureValues(AllocNewFileGeneration()); if (_cacheValueWriter != null) { lock (_cacheValueWriter) { _cacheValueFile.HardFlush(); SetNewValueFile(); } } else { SetNewValueFile(); } fileInfo.WriteHeader(_cacheValueWriter); _fileInfos.TryAdd(_cacheValueFileId, fileInfo); _compactionCts = new CancellationTokenSource(); _compactionTask = Task.Factory.StartNew(CompactionCore, _compactionCts.Token, TaskCreationOptions.LongRunning, TaskScheduler.Default); } } void SetNewValueFile() { _cacheValueFile = _fileCollection.AddFile("cav"); _cacheValueFileId = _cacheValueFile.Index; _cacheValueWriter = _cacheValueFile.GetAppenderWriter(); } internal struct RateFilePair { internal RateFilePair(ulong accessRate, uint fileId) { AccessRate = accessRate; FileId = fileId; } internal ulong AccessRate; internal uint FileId; } void CompactionCore() { var token = _compactionCts.Token; var usage = new Dictionary<uint, ulong>(); var finishedUsageStats = true; uint maxAccessRate = 0; foreach (var cacheValue in _cache.Values) { if (token.IsCancellationRequested) { finishedUsageStats = false; break; } ulong accessRateRunningTotal; usage.TryGetValue(cacheValue.FileId, out accessRateRunningTotal); uint accessRate = cacheValue.AccessRate; if (maxAccessRate < accessRate) maxAccessRate = accessRate; accessRateRunningTotal += accessRate; usage[cacheValue.FileId] = accessRateRunningTotal; } var usageList = new List<RateFilePair>(); var fileIdsToRemove = new List<uint>(); foreach (var fileInfo in _fileInfos) { if (fileInfo.Value.FileType != DiskChunkFileType.PureValues) continue; if (fileInfo.Key == _cacheValueFileId) continue; ulong accessRate; if (!usage.TryGetValue(fileInfo.Key, out accessRate) && finishedUsageStats) { fileIdsToRemove.Add(fileInfo.Key); continue; } usageList.Add(new RateFilePair(accessRate, fileInfo.Key)); } usageList.Sort((a, b) => a.AccessRate > b.AccessRate ? -1 : a.AccessRate < b.AccessRate ? 1 : 0); while (usageList.Count >= _maxValueFileCount) { var fileId = usageList.Last().FileId; if (usageList.Count == _maxValueFileCount) PreserveJustMostOftenUsed(fileId); else ClearFileFromCache(fileId); fileIdsToRemove.Add(fileId); usageList.RemoveAt(usageList.Count - 1); } FlushCurrentValueFile(); StoreHashIndex(); foreach (var fileid in fileIdsToRemove) { _fileCollection.GetFile(fileid).Remove(); _fileInfos.TryRemove(fileid); } } void FlushCurrentValueFile() { var writer = _cacheValueWriter; if (writer != null) lock (writer) { _cacheValueFile.HardFlush(); } } void PreserveJustMostOftenUsed(uint fileId) { var freqencies = new List<uint>(); foreach (var itemPair in _cache) { if (itemPair.Value.FileId == fileId) { freqencies.Add(itemPair.Value.AccessRate); } } var preserveRate = freqencies.OrderByDescending(r => r).Skip(freqencies.Count / 5).FirstOrDefault(); foreach (var itemPair in _cache) { if (itemPair.Value.FileId == fileId) { if (preserveRate < itemPair.Value.AccessRate) { var cacheValue = itemPair.Value; var content = new byte[cacheValue.ContentLength]; _fileCollection.GetFile(cacheValue.FileId).RandomRead(content, 0, (int)cacheValue.ContentLength, cacheValue.FileOfs); var writer = _cacheValueWriter; if (writer == null) { goto remove; } lock (writer) { if (writer != _cacheValueWriter) { goto remove; } if (writer.GetCurrentPosition() + cacheValue.ContentLength > _sizeLimitOfOneValueFile) { goto remove; } cacheValue.FileId = _cacheValueFileId; cacheValue.FileOfs = (uint)_cacheValueWriter.GetCurrentPosition(); _cacheValueWriter.WriteBlock(content); } _cache.TryUpdate(itemPair.Key, cacheValue, itemPair.Value); continue; } remove: _cache.TryRemove(itemPair.Key); } } } void ClearFileFromCache(uint fileId) { foreach (var itemPair in _cache) { if (itemPair.Value.FileId == fileId) { _cache.TryRemove(itemPair.Key); } } } void QuickFinishCompaction() { var compactionCTS = _compactionCts; if (compactionCTS != null) compactionCTS.Cancel(); var task = _compactionTask; if (task != null) { try { task.Wait(); } catch { } } } public Task<ByteBuffer> Get(ByteBuffer key) { if (key.Length != _keySize) throw new ArgumentException("Key has wrong Length not equal to KeySize"); var tcs = new TaskCompletionSource<ByteBuffer>(); try { var k = new ByteStructs.Key20(key); CacheValue cacheValue; if (_cache.TryGetValue(k, out cacheValue)) { var newCacheValue = cacheValue; newCacheValue.AccessRate = cacheValue.AccessRate + 1; _cache.TryUpdate(k, newCacheValue, cacheValue); // It is not problem if update fails, it will have just lower access rate then real var result = new byte[cacheValue.ContentLength]; _fileCollection.GetFile(cacheValue.FileId).RandomRead(result, 0, (int)cacheValue.ContentLength, cacheValue.FileOfs); tcs.SetResult(ByteBuffer.NewAsync(result)); return tcs.Task; } } catch { } // It is better to return nothing than throw exception tcs.SetResult(ByteBuffer.NewEmpty()); return tcs.Task; } public string CalcStats() { var res = new StringBuilder(); res.AppendFormat("Files {0} FileInfos {1} FileGeneration {2} Cached items {3}{4}", _fileCollection.GetCount(), _fileInfos.Count, _fileGeneration, _cache.Count, Environment.NewLine); var totalSize = 0UL; var totalControledSize = 0UL; foreach (var fileCollectionFile in _fileCollection.Enumerate()) { IFileInfo fileInfo; _fileInfos.TryGetValue(fileCollectionFile.Index, out fileInfo); var size = fileCollectionFile.GetSize(); totalSize += size; if (fileInfo == null) { res.AppendFormat("{0} Size: {1} Unknown to cache{2}", fileCollectionFile.Index, size, Environment.NewLine); } else { res.AppendFormat("{0} Size: {1} Type: {2} {3}", fileCollectionFile.Index, size, fileInfo.FileType, Environment.NewLine); totalControledSize += size; } } res.AppendFormat("TotalSize {0} TotalControledSize {1} Limit {2}{3}", totalSize, totalControledSize, _cacheCapacity, Environment.NewLine); Debug.Assert(totalControledSize <= (ulong)_cacheCapacity); return res.ToString(); } public void Dispose() { lock (_startNewValueFileLocker) { QuickFinishCompaction(); FlushCurrentValueFile(); StoreHashIndex(); } } void StoreHashIndex() { RemoveAllHashIndexAndUnknownFiles(); var file = _fileCollection.AddFile("chi"); var writer = file.GetAppenderWriter(); var keyCount = _cache.Count; var fileInfo = new FileHashIndex(AllocNewFileGeneration(), _keySize, keyCount); _fileInfos.TryAdd(file.Index, fileInfo); fileInfo.WriteHeader(writer); var keyBuf = ByteBuffer.NewSync(new byte[_keySize]); foreach (var cachePair in _cache) { cachePair.Key.FillBuffer(keyBuf); writer.WriteVUInt32(cachePair.Value.FileOfs); writer.WriteVUInt32(cachePair.Value.FileId); writer.WriteVUInt32(cachePair.Value.AccessRate); writer.WriteVUInt32(cachePair.Value.ContentLength); writer.WriteBlock(keyBuf); } writer.WriteVUInt32(0); // Zero FileOfs as end of file mark file.HardFlush(); } void RemoveAllHashIndexAndUnknownFiles() { foreach (var infoPair in _fileInfos) { if (infoPair.Value.FileType == DiskChunkFileType.HashIndex || infoPair.Value.FileType == DiskChunkFileType.Unknown) { var fileId = infoPair.Key; _fileCollection.GetFile(fileId).Remove(); _fileInfos.TryRemove(fileId); } } } long AllocNewFileGeneration() { return Interlocked.Increment(ref _fileGeneration); } } }
using UnityEngine; using System.Collections; // script to change the color of the cube on keypress public class ExampleBehaviourSrc : MonoBehaviour { void Update() { if(Input.GetKeyDown(KeyCode.R)) { gameObject.renderer.material.color = Color.red; } else if (Input.GetKeyDown(KeyCode.G)) { gameObject.renderer.material.color = Color.green; } else if (Input.GetKeyDown(KeyCode.B)) { gameObject.renderer.material.color = Color.blue; } } } public class ExampleVariableTut : MonoBehaviour { int myInt = 56; void Start() { myInt = 12; // can use Console.ReadLine() to supply the params for the function int staff = MultiplyByTwo(4, 12); Console.WriteLine(staff); Debug.Log(myInt * 45); } int MultiplyByTwo (int num, int, secondnum) { int ret; ret = num * secondnum; return ret; } void static customFunction () { } } public class ExampleBasicSynatx : MonoBehaviour { /* can find transform.position.x transform.position.y */ // coffee example game public float cofTemp = 85.0f; public float hotLimitTemp = 70.0f; public float coldLimitTemp = 40.0f; void Update() { if(Input.GetKeyDown(KeyCode.Space)) { checkTemperature(); } cofTemp -= Time.deltaTime * 5f; } void checkTemperature() { if (cofTemp > hotLimitTemp) { Debug.Log("Too Hot"); } else if ( cofTemp < coldLimitTemp) { Debug.Log("Too Cold"); } else { Debug.Log("Just right"); } } int myFunction (int age, string name) { int staff = age * 56; int[][] arr = new int[][] { new string[] { name, "sup"}, new int[] {staff, 675}}; return arr; } } public class ExampleLoopExers : MonoBehaviour { int staffs = 100; public void Start() { bool sC = false; //do while do { print("staff"); }while(sC == true); // while while(staffs > 0 ) { staffs--; print(staffs); } // for loop for(int i = 0; i < staffs; i++) { Debug.Log("Creating stafffs " + i); } // foreach string[] strings = new string[10]; strings[0] = "First"; strings[1] = "second"; strings[2] = "third"; strings[3] = "forth"; strings[4] = "fif"; strings[5] = "sixth"; strings[6] = "seventh"; foreach(string s in strings) { print (s); } // Stoped on 06. Loops // Go to the next one } } public class ExampleScopeAndAccess : MonoBehaviour { public int alpha = 5; private int beta = 0; private int gamma = 5; //new class instance private ExampleLoopExers myLoopClass; //new class instance private AnotherClass myAnotherClass; void Start() { alpha = 45; // use new class instance myAnotherClass = new AnotherClass(); myAnotherClass.FruitMachine(3, 54); // use new class instance myLoopClass = new ExampleLoopExers(); myLoopClass.Start(); } void Example(int a, int b) { int ans; ans = a * b * alpha; Debug.Log(ans); } void Update() { Debug.Log("Alpha is set to: " + alpha); } } public class AnotherClass { public int apples; public int bananas; private int stapler; private int sellotape; public void FruitMachine(int a, int b) { int ans; ans = a + b; Debug.Log("Fruit total: " + ans); } private void OfficeSort(int a, int b) { int answer; answer = a + b; Debug.Log("Office supplies total: " + answer); } } // Main functions of Unity engine public class MajorParts : MonoBehaviour { // Awake and Start are called on script initialization, just once void Awake() { //good for references between scripts, initialisation //ex. Enemy appear, get their ammo } void Start() { // runs only when/ if script component is enabled //ex. Enemy gets ability to shoot } // Update called after Awake and Start and called repeatedly every frame void Update() { // called every frame // Used for regular updates /* -moving non-physics objs -simple timers -receiving input */ // UPDATE INTERVAL TIME VARY Debug.Log("Update time: " + Time.deltaTime); } void FixedUpdata() { // Called Every Physics Step // Fixed Update intervals are consistent // Used for regular updates such as : // - Adjusting Physics (Rigidbody) objects Debug.Log("FixedUpdate time: " + Time.deltaTime); } } // super impressive ass science around the physics in Unity // https://unity3d.com/learn/tutorials/modules/beginner/scripting/vector-maths-dot-cross-products?playlist=17117 /* - Vector2 - Vector3 - Vector3.Dot(VectorA, VectorB); - Vector3.Cross(VectorA, VectorB); */ // cool thing attach it to the light and pressing space can switch toggle it public class ExampleEnAbleingComponents : MonoBehaviour { private Light myLight; void Start() { myLight = GetComponent<Light>(); } void Update() { if(Input.GetKeyDown(KeyCode.Space)) { myLight.enabled = !myLight.enabled; } } } // cool thing attach it to the light and pressing space can switch toggle it public class ExampleGameObjActiveInactive : MonoBehaviour { public GameObject myObject; void Start() { // on load will set it inactive gameObject.SetActive(false); // you can check the state of the game objs with Debug.Log("Active self: " + myObject.activeSelf); Debug.Log("Active in hierarchy: " + myObject.activeInHierarchy); } } // 13. stoped on translate and rotate
using System; using System.Text; namespace Lucene.Net.Search { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using AttributeSource = Lucene.Net.Util.AttributeSource; using BytesRef = Lucene.Net.Util.BytesRef; using Term = Lucene.Net.Index.Term; using Terms = Lucene.Net.Index.Terms; using TermsEnum = Lucene.Net.Index.TermsEnum; using ToStringUtils = Lucene.Net.Util.ToStringUtils; /// <summary> /// A <see cref="Query"/> that matches documents within an range of terms. /// /// <para/>This query matches the documents looking for terms that fall into the /// supplied range according to /// <see cref="byte.CompareTo(byte)"/>. It is not intended /// for numerical ranges; use <see cref="NumericRangeQuery"/> instead. /// /// <para/>This query uses the /// <see cref="MultiTermQuery.CONSTANT_SCORE_AUTO_REWRITE_DEFAULT"/> /// rewrite method. /// <para/> /// @since 2.9 /// </summary> public class TermRangeQuery : MultiTermQuery { private readonly BytesRef lowerTerm; // LUCENENET: marked readonly private readonly BytesRef upperTerm; // LUCENENET: marked readonly private readonly bool includeLower; // LUCENENET: marked readonly private readonly bool includeUpper; // LUCENENET: marked readonly /// <summary> /// Constructs a query selecting all terms greater/equal than <paramref name="lowerTerm"/> /// but less/equal than <paramref name="upperTerm"/>. /// /// <para/> /// If an endpoint is <c>null</c>, it is said /// to be "open". Either or both endpoints may be open. Open endpoints may not /// be exclusive (you can't select all but the first or last term without /// explicitly specifying the term to exclude.) /// </summary> /// <param name="field"> The field that holds both lower and upper terms. </param> /// <param name="lowerTerm"> /// The term text at the lower end of the range. </param> /// <param name="upperTerm"> /// The term text at the upper end of the range. </param> /// <param name="includeLower"> /// If true, the <paramref name="lowerTerm"/> is /// included in the range. </param> /// <param name="includeUpper"> /// If true, the <paramref name="upperTerm"/> is /// included in the range. </param> public TermRangeQuery(string field, BytesRef lowerTerm, BytesRef upperTerm, bool includeLower, bool includeUpper) : base(field) { this.lowerTerm = lowerTerm; this.upperTerm = upperTerm; this.includeLower = includeLower; this.includeUpper = includeUpper; } /// <summary> /// Factory that creates a new <see cref="TermRangeQuery"/> using <see cref="string"/>s for term text. /// </summary> public static TermRangeQuery NewStringRange(string field, string lowerTerm, string upperTerm, bool includeLower, bool includeUpper) { BytesRef lower = lowerTerm == null ? null : new BytesRef(lowerTerm); BytesRef upper = upperTerm == null ? null : new BytesRef(upperTerm); return new TermRangeQuery(field, lower, upper, includeLower, includeUpper); } /// <summary> /// Returns the lower value of this range query </summary> public virtual BytesRef LowerTerm => lowerTerm; /// <summary> /// Returns the upper value of this range query </summary> public virtual BytesRef UpperTerm => upperTerm; /// <summary> /// Returns <c>true</c> if the lower endpoint is inclusive </summary> public virtual bool IncludesLower => includeLower; /// <summary> /// Returns <c>true</c> if the upper endpoint is inclusive </summary> public virtual bool IncludesUpper => includeUpper; protected override TermsEnum GetTermsEnum(Terms terms, AttributeSource atts) { if (lowerTerm != null && upperTerm != null && lowerTerm.CompareTo(upperTerm) > 0) { return TermsEnum.EMPTY; } TermsEnum tenum = terms.GetEnumerator(); if ((lowerTerm == null || (includeLower && lowerTerm.Length == 0)) && upperTerm == null) { return tenum; } return new TermRangeTermsEnum(tenum, lowerTerm, upperTerm, includeLower, includeUpper); } /// <summary> /// Prints a user-readable version of this query. </summary> public override string ToString(string field) { StringBuilder buffer = new StringBuilder(); if (!Field.Equals(field, StringComparison.Ordinal)) { buffer.Append(Field); buffer.Append(":"); } buffer.Append(includeLower ? '[' : '{'); // TODO: all these toStrings for queries should just output the bytes, it might not be UTF-8! buffer.Append(lowerTerm != null ? ("*".Equals(Term.ToString(lowerTerm), StringComparison.Ordinal) ? "\\*" : Term.ToString(lowerTerm)) : "*"); buffer.Append(" TO "); buffer.Append(upperTerm != null ? ("*".Equals(Term.ToString(upperTerm), StringComparison.Ordinal) ? "\\*" : Term.ToString(upperTerm)) : "*"); buffer.Append(includeUpper ? ']' : '}'); buffer.Append(ToStringUtils.Boost(Boost)); return buffer.ToString(); } public override int GetHashCode() { const int prime = 31; int result = base.GetHashCode(); result = prime * result + (includeLower ? 1231 : 1237); result = prime * result + (includeUpper ? 1231 : 1237); result = prime * result + ((lowerTerm == null) ? 0 : lowerTerm.GetHashCode()); result = prime * result + ((upperTerm == null) ? 0 : upperTerm.GetHashCode()); return result; } public override bool Equals(object obj) { if (this == obj) { return true; } if (!base.Equals(obj)) { return false; } if (this.GetType() != obj.GetType()) { return false; } TermRangeQuery other = (TermRangeQuery)obj; if (includeLower != other.includeLower) { return false; } if (includeUpper != other.includeUpper) { return false; } if (lowerTerm == null) { if (other.lowerTerm != null) { return false; } } else if (!lowerTerm.Equals(other.lowerTerm)) { return false; } if (upperTerm == null) { if (other.upperTerm != null) { return false; } } else if (!upperTerm.Equals(other.upperTerm)) { return false; } return true; } } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ namespace NPOI.HSSF.UserModel { using System; //using NPOI.HSSF.Model; using NPOI.HSSF.Record; using NPOI.HSSF.Record.Aggregates; using NPOI.SS; using NPOI.SS.Formula; using NPOI.SS.Formula.PTG; using NPOI.SS.Formula.Udf; using NPOI.SS.UserModel; using NPOI.Util; /** * Internal POI use only * * @author Josh Micich */ public class HSSFEvaluationWorkbook : IFormulaRenderingWorkbook, IEvaluationWorkbook, IFormulaParsingWorkbook { private static POILogger logger = POILogFactory.GetLogger(typeof(HSSFEvaluationWorkbook)); private HSSFWorkbook _uBook; private NPOI.HSSF.Model.InternalWorkbook _iBook; public static HSSFEvaluationWorkbook Create(NPOI.SS.UserModel.IWorkbook book) { if (book == null) { return null; } return new HSSFEvaluationWorkbook((HSSFWorkbook)book); } private HSSFEvaluationWorkbook(HSSFWorkbook book) { _uBook = book; _iBook = book.Workbook; } public int GetExternalSheetIndex(String sheetName) { int sheetIndex = _uBook.GetSheetIndex(sheetName); return _iBook.CheckExternSheet(sheetIndex); } public int GetExternalSheetIndex(String workbookName, String sheetName) { return _iBook.GetExternalSheetIndex(workbookName, sheetName); } public ExternalName GetExternalName(int externSheetIndex, int externNameIndex) { return _iBook.GetExternalName(externSheetIndex, externNameIndex); } public NameXPtg GetNameXPtg(String name) { return _iBook.GetNameXPtg(name, _uBook.GetUDFFinder()); } public IEvaluationName GetName(String name,int sheetIndex) { for (int i = 0; i < _iBook.NumNames; i++) { NameRecord nr = _iBook.GetNameRecord(i); if (nr.SheetNumber == sheetIndex + 1 && name.Equals(nr.NameText, StringComparison.OrdinalIgnoreCase)) { return new Name(nr, i); } } return sheetIndex == -1 ? null : GetName(name, -1); } public int GetSheetIndex(IEvaluationSheet evalSheet) { HSSFSheet sheet = ((HSSFEvaluationSheet)evalSheet).HSSFSheet; return _uBook.GetSheetIndex(sheet); } public int GetSheetIndex(String sheetName) { return _uBook.GetSheetIndex(sheetName); } public String GetSheetName(int sheetIndex) { return _uBook.GetSheetName(sheetIndex); } public IEvaluationSheet GetSheet(int sheetIndex) { return new HSSFEvaluationSheet((HSSFSheet)_uBook.GetSheetAt(sheetIndex)); } public int ConvertFromExternSheetIndex(int externSheetIndex) { return _iBook.GetSheetIndexFromExternSheetIndex(externSheetIndex); } public ExternalSheet GetExternalSheet(int externSheetIndex) { return _iBook.GetExternalSheet(externSheetIndex); } public String ResolveNameXText(NameXPtg n) { return _iBook.ResolveNameXText(n.SheetRefIndex, n.NameIndex); } public String GetSheetNameByExternSheet(int externSheetIndex) { return _iBook.FindSheetNameFromExternSheet(externSheetIndex); } public String GetNameText(NamePtg namePtg) { return _iBook.GetNameRecord(namePtg.Index).NameText; } public IEvaluationName GetName(NamePtg namePtg) { int ix = namePtg.Index; return new Name(_iBook.GetNameRecord(ix), ix); } public Ptg[] GetFormulaTokens(IEvaluationCell evalCell) { ICell cell = ((HSSFEvaluationCell)evalCell).HSSFCell; //if (false) //{ // // re-parsing the formula text also works, but is a waste of time // // It is useful from time to time to run all unit tests with this code // // to make sure that all formulas POI can evaluate can also be parsed. // try // { // return HSSFFormulaParser.Parse(cell.CellFormula, _uBook, FormulaType.Cell, _uBook.GetSheetIndex(cell.GetSheet())); // } // catch (FormulaParseException e) // { // // Note - as of Bugzilla 48036 (svn r828244, r828247) POI is capable of evaluating // // IntesectionPtg. However it is still not capable of parsing it. // // So FormulaEvalTestData.xls now contains a few formulas that produce errors here. // logger.Log(POILogger.ERROR, e.Message); // } //} FormulaRecordAggregate fr = (FormulaRecordAggregate)((HSSFCell)cell).CellValueRecord; return fr.FormulaTokens; } public UDFFinder GetUDFFinder() { return _uBook.GetUDFFinder(); } private class Name : IEvaluationName { private NameRecord _nameRecord; private int _index; public Name(NameRecord nameRecord, int index) { _nameRecord = nameRecord; _index = index; } public Ptg[] NameDefinition { get{ return _nameRecord.NameDefinition; } } public String NameText { get{ return _nameRecord.NameText; } } public bool HasFormula { get{ return _nameRecord.HasFormula; } } public bool IsFunctionName { get{ return _nameRecord.IsFunctionName; } } public bool IsRange { get { return _nameRecord.HasFormula; // TODO - is this right? } } public NamePtg CreatePtg() { return new NamePtg(_index); } } public SpreadsheetVersion GetSpreadsheetVersion() { return SpreadsheetVersion.EXCEL97; } } }