content
stringlengths
5
1.04M
avg_line_length
float64
1.75
12.9k
max_line_length
int64
2
244k
alphanum_fraction
float64
0
0.98
licenses
list
repository_name
stringlengths
7
92
path
stringlengths
3
249
size
int64
5
1.04M
lang
stringclasses
2 values
using Garnet.Detail.Pagination.Asp.Configurations; using Garnet.Pagination.DependencyInjection; using Garnet.Pagination.Configurations; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; namespace Garnet.Detail.Pagination.Asp.DependencyInjection; /// <summary> /// Bunch of methods to register Garnet pagination requirements for an ASP project /// </summary> public static class GarnetPaginationDependencyInjection { /// <summary> /// Register Garnet pagination requirements for an ASP project to the service collection /// DON'T USE <see cref="Garnet.Pagination.DependencyInjection.GarnetPaginationDependencyInjection.AddGarnetPagination(Microsoft.Extensions.DependencyInjection.IServiceCollection,Microsoft.Extensions.Configuration.IConfiguration,string)"/>. THIS METHOD CALLS THAT TOO. /// </summary> /// <param name="serviceCollection">The service collection to register pagination requirements to</param> /// <param name="configuration">To load <see cref="PaginationConfig"/> and <see cref="PaginationFilterConfig"/> and <see cref="PaginationAspRequestConfig"/> with <paramref name="configurationPath"/></param> /// <param name="configurationPath">Path to load <see cref="PaginationConfig"/> and <see cref="PaginationFilterConfig"/> (with 'Filter' sub key) and <see cref="PaginationOrderConfig"/> (with 'Order' sub key) and <see cref="PaginationAspRequestConfig"/> (with 'AspRequestParameters' sub key) and <see cref="PaginationAspResponseConfig"/> (with 'AspResponseParameters' sub key) from <paramref name="configuration"/></param> /// <returns><paramref name="serviceCollection"/> after applied configurations</returns> public static IServiceCollection AddGarnetPaginationAsp(this IServiceCollection serviceCollection, IConfiguration configuration, string configurationPath = "Garnet.Pagination") { var paginationAspRequestConfig = configuration.GetValue<PaginationAspRequestConfig>($"{configurationPath}:AspRequestParameters"); serviceCollection.AddSingleton(paginationAspRequestConfig); var paginationAspResponseConfig = configuration.GetValue<PaginationAspResponseConfig>($"{configurationPath}:AspResponseParameters"); serviceCollection.AddSingleton(paginationAspResponseConfig); return serviceCollection.AddGarnetPagination(configuration, configurationPath); } /// <summary> /// Register Garnet pagination requirements for an ASP project to the service collection /// DON'T USE <see cref="Garnet.Pagination.DependencyInjection.GarnetPaginationDependencyInjection.AddGarnetPagination(Microsoft.Extensions.DependencyInjection.IServiceCollection,Microsoft.Extensions.Configuration.IConfiguration,string)"/>. THIS METHOD CALLS THAT TOO. /// </summary> /// <param name="serviceCollection">The service collection to register pagination requirements to</param> /// <param name="paginationConfig">To be used in pagination. Using default value if pass null</param> /// <param name="paginationFilterConfig">To be used for filtering data. Using default value if pass null</param> /// <param name="paginationOrderConfig">To be used for ordering data. Using default value if pass null</param> /// <param name="paginationAspRequestConfig">To be used for mapping incoming requests to the corresponding object. Using default value if pass null</param> /// <param name="paginationAspResponseConfig">To be used for exposing pagination result. Using default value if pass null</param> /// <returns><paramref name="serviceCollection"/> after applied configurations</returns> public static IServiceCollection AddGarnetPaginationAsp(this IServiceCollection serviceCollection, PaginationConfig paginationConfig = null, PaginationFilterConfig paginationFilterConfig = null, PaginationOrderConfig paginationOrderConfig = null, PaginationAspRequestConfig paginationAspRequestConfig = null, PaginationAspResponseConfig paginationAspResponseConfig = null) { serviceCollection.AddSingleton(paginationAspRequestConfig ?? new PaginationAspRequestConfig()); serviceCollection.AddSingleton(paginationAspResponseConfig ?? new PaginationAspResponseConfig()); return serviceCollection.AddGarnetPagination(paginationConfig, paginationFilterConfig, paginationOrderConfig); } }
71.983871
425
0.775935
[ "MIT" ]
Abbas-b-b/Garnet.Pagination
src/Garnet.Detail.Pagination.Asp/DependencyInjection/GarnetPaginationDependencyInjection.cs
4,463
C#
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace MoveBoxApp.Properties { [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")] internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase { private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings()))); public static Settings Default { get { return defaultInstance; } } } }
34.354839
151
0.581221
[ "BSD-3-Clause" ]
kiwizen/DSED-01
INITIAL_DESIGN/TEST_PROJ/MovingBoxApp/MoveBoxApp/Properties/Settings.Designer.cs
1,067
C#
using FileSystem.Logging; using Structure; using System; using System.Collections.Generic; using System.Linq; using System.Runtime.Caching; namespace Cache { internal class CacheAccessor { /* * The general idea here is that we are literally caching everything possible from app start. * * We'll need to cache collections of references to things and the things themselves. * Caching collections of things will result in flipping the cache constantly * * The administrative website will edit the Lookup Data in the database which wont get refreshed * until someone tells it to (or the entire thing reboots) * * IData data is ALWAYS cached and saved to a different place because it is live in-game data and even if * we add, say, one damage to the Combat Knife item in the db it doesn't mean all Combat Knife objects in game * get retroactively updated. There will be superadmin level website commands to do this and in-game commands for admins. * */ /// <summary> /// The place everything gets stored /// </summary> private ObjectCache _globalCache = MemoryCache.Default; /// <summary> /// The general storage policy /// </summary> private CacheItemPolicy _globalPolicy = new CacheItemPolicy(); /// <summary> /// The cache type (affects the "ids") /// </summary> private CacheType _type; /// <summary> /// Create a new CacheAccessor with its type /// </summary> /// <param name="type">The type of item we're caching</param> internal CacheAccessor(CacheType type) { _type = type; } /// <summary> /// Adds an object to the cache /// </summary> /// <param name="objectToCache">the object to cache</param> /// <param name="cacheKey">the key to cache it under</param> public void Add(object objectToCache, ICacheKey cacheKey) { if (Exists(cacheKey)) Remove(cacheKey); _globalCache.AddOrGetExisting(cacheKey.KeyHash(), objectToCache, _globalPolicy); } /// <summary> /// Adds an object to the cache /// </summary> /// <param name="objectToCache">the object to cache</param> /// <param name="cacheKey">the string key to cache it under</param> public void Add(object objectToCache, string cacheKey) { if (Exists(cacheKey)) Remove(cacheKey); _globalCache.AddOrGetExisting(cacheKey, objectToCache, _globalPolicy); } /// <summary> /// fills a list of entities from the cache of a single type that match the birthmarks sent in /// </summary> /// <typeparam name="T">the system type for the entity</typeparam> /// <param name="birthmarks">the birthmarks to retrieve</param> /// <returns>a list of the entities from the cache</returns> public IEnumerable<T> GetMany<T>(IEnumerable<long> ids) where T : IData { try { return _globalCache.Where(keyValuePair => keyValuePair.Value.GetType().GetInterfaces().Contains(typeof(T)) && ids.Contains(((T)keyValuePair.Value).ID)) .Select(kvp => (T)kvp.Value); } catch (Exception ex) { LoggingUtility.LogError(ex); } return Enumerable.Empty<T>(); } /// <summary> /// Get all entities of a type from the cache /// </summary> /// <typeparam name="T">the system type for the entity</typeparam> /// <returns>a list of the entities from the cache</returns> public IEnumerable<T> GetAll<T>() { try { return _globalCache.Where(keyValuePair => keyValuePair.Value.GetType() == typeof(T) || (typeof(T).IsInterface && keyValuePair.Value.GetType().GetInterfaces().Contains(typeof(T))) ).Select(kvp => (T)kvp.Value); } catch (Exception ex) { LoggingUtility.LogError(ex); } return Enumerable.Empty<T>(); } /// <summary> /// When base type and maintype want to be less ambigious /// </summary> /// <typeparam name="T">The base type (like ILocation)</typeparam> /// <param name="mainType">The inheriting type (like IRoom)</param> /// <returns>all the stuff and things</returns> public IEnumerable<T> GetAll<T>(Type mainType) { try { return _globalCache.Where(keyValuePair => keyValuePair.Value.GetType().GetInterfaces().Contains(typeof(T)) && keyValuePair.Value.GetType() == mainType) .Select(kvp => (T)kvp.Value); } catch (Exception ex) { LoggingUtility.LogError(ex); } return Enumerable.Empty<T>(); } /// <summary> /// Gets one non-entity from the cache by its key /// </summary> /// <typeparam name="T">the type of the entity</typeparam> /// <param name="key">the key it was cached with</param> /// <returns>the entity requested</returns> public T Get<T>(string key) { try { return (T)_globalCache[key]; } catch (Exception ex) { LoggingUtility.LogError(ex); } return default(T); } /// <summary> /// Gets one entity from the cache by its key /// </summary> /// <typeparam name="T">the type of the entity</typeparam> /// <param name="key">the key it was cached with</param> /// <returns>the entity requested</returns> public T Get<T>(ICacheKey key) { try { return (T)_globalCache[key.KeyHash()]; } catch (Exception ex) { LoggingUtility.LogError(ex); } return default(T); } /// <summary> /// Removes an entity from the cache by its key /// </summary> /// <param name="key">the key of the entity to remove</param> public void Remove(ICacheKey key) { _globalCache.Remove(key.KeyHash()); } /// <summary> /// Removes an non-entity from the cache by its key /// </summary> /// <param name="key">the key of the entity to remove</param> public void Remove(string key) { _globalCache.Remove(key); } /// <summary> /// Checks if an entity is in the cache /// </summary> /// <param name="key">the key of the entity</param> /// <returns>if it is in the cache of not</returns> public bool Exists(ICacheKey key) { return _globalCache.Get(key.KeyHash()) != null; } /// <summary> /// Checks if an non-entity is in the cache /// </summary> /// <param name="key">the key of the entity</param> /// <returns>if it is in the cache of not</returns> public bool Exists(string key) { return _globalCache.Get(key) != null; } } }
34.931818
167
0.533767
[ "MIT" ]
SwiftAusterity/Cottontail
Cache/CacheAccessor.cs
7,687
C#
using Microsoft.VisualStudio.TestTools.UnitTesting; using MultiPrecision; using System; namespace MultiPrecisionTest.Common { public partial class MultiPrecisionTest { [TestMethod] public void RangeTest() { Console.WriteLine(MultiPrecision<Pow2.N4>.DecimalDigits); Console.WriteLine(MultiPrecision<Pow2.N4>.MinValue); Console.WriteLine(MultiPrecision<Pow2.N4>.MaxValue); Console.Write("\n"); Console.WriteLine(MultiPrecision<Pow2.N8>.DecimalDigits); Console.WriteLine(MultiPrecision<Pow2.N8>.MinValue); Console.WriteLine(MultiPrecision<Pow2.N8>.MaxValue); Console.Write("\n"); Console.WriteLine(MultiPrecision<Pow2.N16>.DecimalDigits); Console.WriteLine(MultiPrecision<Pow2.N16>.MinValue); Console.WriteLine(MultiPrecision<Pow2.N16>.MaxValue); Console.Write("\n"); Console.WriteLine(MultiPrecision<Pow2.N32>.DecimalDigits); Console.WriteLine(MultiPrecision<Pow2.N32>.MinValue); Console.WriteLine(MultiPrecision<Pow2.N32>.MaxValue); Console.Write("\n"); Console.WriteLine(MultiPrecision<Pow2.N64>.DecimalDigits); Console.WriteLine(MultiPrecision<Pow2.N64>.MinValue); Console.WriteLine(MultiPrecision<Pow2.N64>.MaxValue); Console.Write("\n"); Console.WriteLine(MultiPrecision<Pow2.N128>.DecimalDigits); Console.WriteLine(MultiPrecision<Pow2.N256>.DecimalDigits); Console.WriteLine(MultiPrecision<Pow2.N512>.DecimalDigits); Console.WriteLine(MultiPrecision<Pow2.N1024>.DecimalDigits); } } }
37.369565
72
0.662013
[ "MIT" ]
tk-yoshimura/MultiPrecision
MultiPrecisionTest/MultiPrecision/Common/MultiPrecisionTest_range.cs
1,719
C#
using System; using System.Xml; using System.Xml.Serialization; using System.IO; namespace VoxelImporter.grendgine_collada { [System.SerializableAttribute()] [System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true)] public partial class Grendgine_Collada_Library_Lights { [XmlAttribute("id")] public string ID; [XmlAttribute("name")] public string Name; [XmlElement(ElementName = "light")] public Grendgine_Collada_Light[] Light; [XmlElement(ElementName = "asset")] public Grendgine_Collada_Asset Asset; [XmlElement(ElementName = "extra")] public Grendgine_Collada_Extra[] Extra; } }
22.034483
64
0.744914
[ "MIT" ]
Syrapt0r/Protocol-18
Assets/VoxelImporter/Scripts/Editor/Library/Collada_Main/Collada_Core/Lighting/Grendgine_Collada_Library_Lights.cs
639
C#
using System.Collections.Immutable; namespace MApplication { internal class DisplayText { public DisplayText(ImmutableList<DisplayLine> lines) { Lines = lines; } public ImmutableList<DisplayLine> Lines { get; } } }
18.266667
60
0.620438
[ "MIT" ]
mahalex/MParser
MApplication/DisplayText.cs
276
C#
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("ExtractStudentsByNames")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("ExtractStudentsByNames")] [assembly: AssemblyCopyright("Copyright © 2013")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("bdc0b251-173b-4b03-a027-94c56b9c5949")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
38.297297
84
0.748765
[ "MIT" ]
vladislav-karamfilov/TelerikAcademy
OOP Projects/ExtensionMethodsDelegatesLambdaLINQ/ExtractStudentsByNames/Properties/AssemblyInfo.cs
1,420
C#
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows; using System.Windows.Controls; using System.Windows.Data; using System.Windows.Documents; using System.Windows.Input; using System.Windows.Media; using System.Windows.Media.Imaging; using System.Windows.Shapes; namespace Perudo_UI.View { /// <summary> /// Interaction logic for MainView.xaml /// </summary> public partial class MainView : Window { public MainView() { InitializeComponent(); } private void Button_Close_Click(object sender, RoutedEventArgs e) { this.Close(); } private void Button_Play_Click(object sender, RoutedEventArgs e) { LoginView loginView = new LoginView(); loginView.Show(); this.Close(); } } }
22.975
73
0.646355
[ "MIT" ]
twizou/Perudo
Perudo_UI/View/MainView.xaml.cs
921
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; using Unity.Mathematics; using Unity.Entities; using Random = Unity.Mathematics.Random; namespace UTJ { public class TestMissile : MonoBehaviour { Random random_; void Start() { random_.InitState(12345); } void Update() { for (var i = 0; i < 4; ++i) { var pos = random_.NextFloat3(-100, 100); var dir = random_.NextFloat3Direction(); var rot = quaternion.LookRotation(dir, new float3(0, 1, 0)); MissileSystem.Instantiate(MissileManager.Prefab, pos, rot); } } } } // namespace UTJ {
21.516129
72
0.623688
[ "MIT" ]
Jason-W-J/DOTS-Shmup3D-sample
Assets/Scripts/Tests/TestMissile.cs
669
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; public class FadeIn : MonoBehaviour { // Use this for initialization void Start () { Fader.Instance.FadeOut(1.0f); } // Update is called once per frame void Update () { } }
16.294118
37
0.68231
[ "MIT" ]
phelow/TrumpTowerTypingTutor
Assets/FadeIn.cs
279
C#
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Sungitsa.Model { public class Province { public int ProvinceId { get; set; } public string ProvinceName { get; set; } public IEnumerable<Lodge> Lodges { get; set; } public IEnumerable<Apartment> Apartments { get; set; } public IEnumerable<Hotel> Hotels { get; set; } } }
25.055556
62
0.667406
[ "MIT" ]
MwenyaK/Sungitsa
Sungitsa.Model/Province.cs
453
C#
using Microsoft.Azure.Management.ApiManagement.ArmTemplates.Common.FileHandlers; using Microsoft.Azure.Management.ApiManagement.ArmTemplates.Creator.TemplateCreators; namespace Microsoft.Azure.Management.ApiManagement.ArmTemplates.Tests.Creator.TemplateCreatorFactories { public class APITemplateCreatorFactory { public static APITemplateCreator GenerateAPITemplateCreator() { FileReader fileReader = new FileReader(); TemplateCreator templateCreator = new TemplateCreator(); PolicyTemplateCreator policyTemplateCreator = new PolicyTemplateCreator(fileReader); ProductAPITemplateCreator productAPITemplateCreator = new ProductAPITemplateCreator(); DiagnosticTemplateCreator diagnosticTemplateCreator = new DiagnosticTemplateCreator(); ReleaseTemplateCreator releaseTemplateCreator = new ReleaseTemplateCreator(); TagAPITemplateCreator tagAPITemplateCreator = new TagAPITemplateCreator(); APITemplateCreator apiTemplateCreator = new APITemplateCreator(fileReader, policyTemplateCreator, productAPITemplateCreator, tagAPITemplateCreator, diagnosticTemplateCreator, releaseTemplateCreator); return apiTemplateCreator; } } }
57.590909
211
0.779795
[ "MIT" ]
manikanta008/azure-api-management-devops-resource-kit
tests/ArmTemplates.Tests/Creator/TemplateCreatorFactories/APITemplateCreatorFactory.cs
1,269
C#
namespace Project.Web.Controllers { using Microsoft.AspNetCore.Mvc; using Project.Services.Interfaces; using Project.Web.Models; using System.Diagnostics; public class HomeController : Controller { private readonly IArticleService articles; public HomeController(IArticleService articles) { this.articles = articles; } public IActionResult Index() { var articles = this.articles.TopArticles(); return View(articles); } [ResponseCache(Duration = 0, Location = ResponseCacheLocation.None, NoStore = true)] public IActionResult Error() { return View(new ErrorViewModel { RequestId = Activity.Current?.Id ?? HttpContext.TraceIdentifier }); } } }
27.931034
112
0.630864
[ "MIT" ]
Vasil-Hadzhiev/MVC-Project
MVC.Project/Project.Web/Controllers/HomeController.cs
812
C#
// <auto-generated /> namespace SmartLink.Service.Migrations { using System.CodeDom.Compiler; using System.Data.Entity.Migrations; using System.Data.Entity.Migrations.Infrastructure; using System.Resources; [GeneratedCode("EntityFramework.Migrations", "6.1.3-40302")] public sealed partial class InitialCreate : IMigrationMetadata { private readonly ResourceManager Resources = new ResourceManager(typeof(InitialCreate)); string IMigrationMetadata.Id { get { return "201703020755344_InitialCreate"; } } string IMigrationMetadata.Source { get { return null; } } string IMigrationMetadata.Target { get { return Resources.GetString("Target"); } } } }
27.6
96
0.624396
[ "MIT" ]
kulado/ProposalManager
SmartLink/SmartLink.Service/Migrations/201703020755344_InitialCreate.Designer.cs
828
C#
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:2.0.50727.1434 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace NArrange.GuiConfig.Properties { using System; /// <summary> /// A strongly-typed resource class, for looking up localized strings, etc. /// </summary> // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "2.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] internal class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// <summary> /// Returns the cached ResourceManager instance used by this class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("NArrange.GuiConfig.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// <summary> /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } } }
43.71875
184
0.614367
[ "Apache-2.0" ]
adgistics/Adgistics.Build
NArrange/src/Backup/NArrange.GuiConfig/Properties/Resources.Designer.cs
2,800
C#
using Windows.Storage; using Windows.Storage.Streams; using Param_RootNamespace.Core.Helpers; namespace Param_RootNamespace.Helpers; // Use these extension methods to store and retrieve local and roaming app data // More details regarding storing and retrieving app data at https://docs.microsoft.com/windows/apps/design/app-settings/store-and-retrieve-app-data public static class SettingsStorageExtensions { private const string FileExtension = ".json"; public static bool IsRoamingStorageAvailable(this ApplicationData appData) { return appData.RoamingStorageQuota == 0; } public static async Task SaveAsync<T>(this StorageFolder folder, string name, T content) { var file = await folder.CreateFileAsync(GetFileName(name), CreationCollisionOption.ReplaceExisting); var fileContent = await Json.StringifyAsync(content); await FileIO.WriteTextAsync(file, fileContent); } public static async Task<T?> ReadAsync<T>(this StorageFolder folder, string name) { if (!File.Exists(Path.Combine(folder.Path, GetFileName(name)))) { return default; } var file = await folder.GetFileAsync($"{name}.json"); var fileContent = await FileIO.ReadTextAsync(file); return await Json.ToObjectAsync<T>(fileContent); } public static async Task SaveAsync<T>(this ApplicationDataContainer settings, string key, T value) { settings.SaveString(key, await Json.StringifyAsync(value)); } public static void SaveString(this ApplicationDataContainer settings, string key, string value) { settings.Values[key] = value; } public static async Task<T?> ReadAsync<T>(this ApplicationDataContainer settings, string key) { object? obj; if (settings.Values.TryGetValue(key, out obj)) { return await Json.ToObjectAsync<T>((string)obj); } return default; } public static async Task<StorageFile> SaveFileAsync(this StorageFolder folder, byte[] content, string fileName, CreationCollisionOption options = CreationCollisionOption.ReplaceExisting) { if (content == null) { throw new ArgumentNullException(nameof(content)); } if (string.IsNullOrEmpty(fileName)) { throw new ArgumentException("File name is null or empty. Specify a valid file name", nameof(fileName)); } var storageFile = await folder.CreateFileAsync(fileName, options); await FileIO.WriteBytesAsync(storageFile, content); return storageFile; } public static async Task<byte[]?> ReadFileAsync(this StorageFolder folder, string fileName) { var item = await folder.TryGetItemAsync(fileName).AsTask().ConfigureAwait(false); if ((item != null) && item.IsOfType(StorageItemTypes.File)) { var storageFile = await folder.GetFileAsync(fileName); var content = await storageFile.ReadBytesAsync(); return content; } return null; } public static async Task<byte[]?> ReadBytesAsync(this StorageFile file) { if (file != null) { using IRandomAccessStream stream = await file.OpenReadAsync(); using var reader = new DataReader(stream.GetInputStreamAt(0)); await reader.LoadAsync((uint)stream.Size); var bytes = new byte[stream.Size]; reader.ReadBytes(bytes); return bytes; } return null; } private static string GetFileName(string name) { return string.Concat(name, FileExtension); } }
33.875
191
0.64602
[ "MIT" ]
Microsoft/WindowsTemplateStudio
code/TemplateStudioForWinUICs/Templates/Ft/SettingsStorage/Param_ProjectName/Helpers/SettingsStorageExtensions.cs
3,685
C#
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System.Threading.Tasks; using Microsoft.EntityFrameworkCore.TestUtilities; using Xunit.Abstractions; namespace Microsoft.EntityFrameworkCore.Query { public class NorthwindAggregateOperatorsQuerySqlServerTest : NorthwindAggregateOperatorsQueryRelationalTestBase< NorthwindQuerySqlServerFixture<NoopModelCustomizer>> { public NorthwindAggregateOperatorsQuerySqlServerTest( NorthwindQuerySqlServerFixture<NoopModelCustomizer> fixture, ITestOutputHelper testOutputHelper) : base(fixture) { ClearLog(); //Fixture.TestSqlLoggerFactory.SetTestOutputHelper(testOutputHelper); } protected override bool CanExecuteQueryString => true; public override void Select_All() { base.Select_All(); AssertSql( @"SELECT CASE WHEN NOT EXISTS ( SELECT 1 FROM [Orders] AS [o] WHERE ([o].[CustomerID] <> N'ALFKI') OR [o].[CustomerID] IS NULL) THEN CAST(1 AS bit) ELSE CAST(0 AS bit) END"); } public override async Task Sum_with_no_arg(bool async) { await base.Sum_with_no_arg(async); AssertSql( @"SELECT COALESCE(SUM([o].[OrderID]), 0) FROM [Orders] AS [o]"); } public override async Task Sum_with_binary_expression(bool async) { await base.Sum_with_binary_expression(async); AssertSql( @"SELECT COALESCE(SUM([o].[OrderID] * 2), 0) FROM [Orders] AS [o]"); } public override async Task Sum_with_arg(bool async) { await base.Sum_with_arg(async); AssertSql( @"SELECT COALESCE(SUM([o].[OrderID]), 0) FROM [Orders] AS [o]"); } public override async Task Sum_with_arg_expression(bool async) { await base.Sum_with_arg_expression(async); AssertSql( @"SELECT COALESCE(SUM([o].[OrderID] + [o].[OrderID]), 0) FROM [Orders] AS [o]"); } public override async Task Sum_with_division_on_decimal(bool async) { await base.Sum_with_division_on_decimal(async); AssertSql( @"SELECT COALESCE(SUM(CAST([o].[Quantity] AS decimal(18,2)) / 2.09), 0.0) FROM [Order Details] AS [o]"); } public override async Task Sum_with_division_on_decimal_no_significant_digits(bool async) { await base.Sum_with_division_on_decimal_no_significant_digits(async); AssertSql( @"SELECT COALESCE(SUM(CAST([o].[Quantity] AS decimal(18,2)) / 2.0), 0.0) FROM [Order Details] AS [o]"); } public override async Task Sum_with_coalesce(bool async) { await base.Sum_with_coalesce(async); AssertSql( @"SELECT COALESCE(SUM(COALESCE([p].[UnitPrice], 0.0)), 0.0) FROM [Products] AS [p] WHERE [p].[ProductID] < 40"); } public override async Task Sum_over_subquery_is_client_eval(bool async) { await base.Sum_over_subquery_is_client_eval(async); AssertSql( @"SELECT ( SELECT SUM([o].[OrderID]) FROM [Orders] AS [o] WHERE [c].[CustomerID] = [o].[CustomerID] ) FROM [Customers] AS [c]"); } public override async Task Sum_over_nested_subquery_is_client_eval(bool async) { await base.Sum_over_nested_subquery_is_client_eval(async); AssertSql( @"SELECT [c].[CustomerID] FROM [Customers] AS [c]"); } public override async Task Sum_over_min_subquery_is_client_eval(bool async) { await base.Sum_over_min_subquery_is_client_eval(async); AssertSql( @"SELECT [c].[CustomerID] FROM [Customers] AS [c]"); } public override async Task Sum_on_float_column(bool async) { await base.Sum_on_float_column(async); AssertSql( @"SELECT CAST(COALESCE(SUM([o].[Discount]), 0.0E0) AS real) FROM [Order Details] AS [o] WHERE [o].[ProductID] = 1"); } public override async Task Sum_on_float_column_in_subquery(bool async) { await base.Sum_on_float_column_in_subquery(async); AssertSql( @"SELECT [o0].[OrderID], ( SELECT CAST(COALESCE(SUM([o].[Discount]), 0.0E0) AS real) FROM [Order Details] AS [o] WHERE [o0].[OrderID] = [o].[OrderID]) AS [Sum] FROM [Orders] AS [o0] WHERE [o0].[OrderID] < 10300"); } public override async Task Average_with_no_arg(bool async) { await base.Average_with_no_arg(async); AssertSql( @"SELECT AVG(CAST([o].[OrderID] AS float)) FROM [Orders] AS [o]"); } public override async Task Average_with_binary_expression(bool async) { await base.Average_with_binary_expression(async); AssertSql( @"SELECT AVG(CAST(([o].[OrderID] * 2) AS float)) FROM [Orders] AS [o]"); } public override async Task Average_with_arg(bool async) { await base.Average_with_arg(async); AssertSql( @"SELECT AVG(CAST([o].[OrderID] AS float)) FROM [Orders] AS [o]"); } public override async Task Average_with_arg_expression(bool async) { await base.Average_with_arg_expression(async); AssertSql( @"SELECT AVG(CAST(([o].[OrderID] + [o].[OrderID]) AS float)) FROM [Orders] AS [o]"); } public override async Task Average_with_division_on_decimal(bool async) { await base.Average_with_division_on_decimal(async); AssertSql( @"SELECT AVG(CAST([o].[Quantity] AS decimal(18,2)) / 2.09) FROM [Order Details] AS [o]"); } public override async Task Average_with_division_on_decimal_no_significant_digits(bool async) { await base.Average_with_division_on_decimal_no_significant_digits(async); AssertSql( @"SELECT AVG(CAST([o].[Quantity] AS decimal(18,2)) / 2.0) FROM [Order Details] AS [o]"); } public override async Task Average_with_coalesce(bool async) { await base.Average_with_coalesce(async); AssertSql( @"SELECT AVG(COALESCE([p].[UnitPrice], 0.0)) FROM [Products] AS [p] WHERE [p].[ProductID] < 40"); } public override async Task Average_over_subquery_is_client_eval(bool async) { await base.Average_over_subquery_is_client_eval(async); AssertSql( @"SELECT ( SELECT SUM([o].[OrderID]) FROM [Orders] AS [o] WHERE [c].[CustomerID] = [o].[CustomerID] ) FROM [Customers] AS [c]"); } public override async Task Average_over_nested_subquery_is_client_eval(bool async) { await base.Average_over_nested_subquery_is_client_eval(async); AssertSql( @"@__p_0='3' SELECT TOP(@__p_0) [c].[CustomerID] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]"); } public override async Task Average_over_max_subquery_is_client_eval(bool async) { await base.Average_over_max_subquery_is_client_eval(async); AssertSql( @"@__p_0='3' SELECT TOP(@__p_0) [c].[CustomerID] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]"); } public override async Task Average_on_float_column(bool async) { await base.Average_on_float_column(async); AssertSql( @"SELECT CAST(AVG([o].[Discount]) AS real) FROM [Order Details] AS [o] WHERE [o].[ProductID] = 1"); } public override async Task Average_on_float_column_in_subquery(bool async) { await base.Average_on_float_column_in_subquery(async); AssertSql( @"SELECT [o0].[OrderID], ( SELECT CAST(AVG([o].[Discount]) AS real) FROM [Order Details] AS [o] WHERE [o0].[OrderID] = [o].[OrderID]) AS [Sum] FROM [Orders] AS [o0] WHERE [o0].[OrderID] < 10300"); } public override async Task Average_on_float_column_in_subquery_with_cast(bool async) { await base.Average_on_float_column_in_subquery_with_cast(async); AssertSql( @"SELECT [o0].[OrderID], ( SELECT CAST(AVG([o].[Discount]) AS real) FROM [Order Details] AS [o] WHERE [o0].[OrderID] = [o].[OrderID]) AS [Sum] FROM [Orders] AS [o0] WHERE [o0].[OrderID] < 10300"); } public override async Task Min_with_no_arg(bool async) { await base.Min_with_no_arg(async); AssertSql( @"SELECT MIN([o].[OrderID]) FROM [Orders] AS [o]"); } public override async Task Min_with_arg(bool async) { await base.Min_with_arg(async); AssertSql( @"SELECT MIN([o].[OrderID]) FROM [Orders] AS [o]"); } public override async Task Min_with_coalesce(bool async) { await base.Min_with_coalesce(async); AssertSql( @"SELECT MIN(COALESCE([p].[UnitPrice], 0.0)) FROM [Products] AS [p] WHERE [p].[ProductID] < 40"); } public override async Task Min_over_subquery_is_client_eval(bool async) { await base.Min_over_subquery_is_client_eval(async); AssertSql( @"SELECT ( SELECT SUM([o].[OrderID]) FROM [Orders] AS [o] WHERE [c].[CustomerID] = [o].[CustomerID] ) FROM [Customers] AS [c]"); } public override async Task Min_over_nested_subquery_is_client_eval(bool async) { await base.Min_over_nested_subquery_is_client_eval(async); AssertSql( @"@__p_0='3' SELECT TOP(@__p_0) [c].[CustomerID] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]"); } public override async Task Min_over_max_subquery_is_client_eval(bool async) { await base.Min_over_max_subquery_is_client_eval(async); AssertSql( @"@__p_0='3' SELECT TOP(@__p_0) [c].[CustomerID] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]"); } public override async Task Max_with_no_arg(bool async) { await base.Max_with_no_arg(async); AssertSql( @"SELECT MAX([o].[OrderID]) FROM [Orders] AS [o]"); } public override async Task Max_with_arg(bool async) { await base.Max_with_arg(async); AssertSql( @"SELECT MAX([o].[OrderID]) FROM [Orders] AS [o]"); } public override async Task Max_with_coalesce(bool async) { await base.Max_with_coalesce(async); AssertSql( @"SELECT MAX(COALESCE([p].[UnitPrice], 0.0)) FROM [Products] AS [p] WHERE [p].[ProductID] < 40"); } public override async Task Max_over_subquery_is_client_eval(bool async) { await base.Max_over_subquery_is_client_eval(async); AssertSql( @"SELECT ( SELECT SUM([o].[OrderID]) FROM [Orders] AS [o] WHERE [c].[CustomerID] = [o].[CustomerID] ) FROM [Customers] AS [c]"); } public override async Task Max_over_nested_subquery_is_client_eval(bool async) { await base.Max_over_nested_subquery_is_client_eval(async); AssertSql( @"@__p_0='3' SELECT TOP(@__p_0) [c].[CustomerID] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]"); } public override async Task Max_over_sum_subquery_is_client_eval(bool async) { await base.Max_over_sum_subquery_is_client_eval(async); AssertSql( @"@__p_0='3' SELECT TOP(@__p_0) [c].[CustomerID] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]"); } public override async Task Count_with_predicate(bool async) { await base.Count_with_predicate(async); AssertSql( @"SELECT COUNT(*) FROM [Orders] AS [o] WHERE [o].[CustomerID] = N'ALFKI'"); } public override async Task Where_OrderBy_Count(bool async) { await base.Where_OrderBy_Count(async); AssertSql( @"SELECT COUNT(*) FROM [Orders] AS [o] WHERE [o].[CustomerID] = N'ALFKI'"); } public override async Task OrderBy_Where_Count(bool async) { await base.OrderBy_Where_Count(async); AssertSql( @"SELECT COUNT(*) FROM [Orders] AS [o] WHERE [o].[CustomerID] = N'ALFKI'"); } public override async Task OrderBy_Count_with_predicate(bool async) { await base.OrderBy_Count_with_predicate(async); AssertSql( @"SELECT COUNT(*) FROM [Orders] AS [o] WHERE [o].[CustomerID] = N'ALFKI'"); } public override async Task OrderBy_Where_Count_with_predicate(bool async) { await base.OrderBy_Where_Count_with_predicate(async); AssertSql( @"SELECT COUNT(*) FROM [Orders] AS [o] WHERE ([o].[OrderID] > 10) AND (([o].[CustomerID] <> N'ALFKI') OR [o].[CustomerID] IS NULL)"); } public override async Task Distinct(bool async) { await base.Distinct(async); AssertSql( @"SELECT DISTINCT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c]"); } public override async Task Distinct_Scalar(bool async) { await base.Distinct_Scalar(async); AssertSql( @"SELECT DISTINCT [c].[City] FROM [Customers] AS [c]"); } public override async Task OrderBy_Distinct(bool async) { await base.OrderBy_Distinct(async); // Ordering not preserved by distinct when ordering columns not projected. AssertSql( @"SELECT DISTINCT [c].[City] FROM [Customers] AS [c]"); } public override async Task Distinct_OrderBy(bool async) { await base.Distinct_OrderBy(async); AssertSql( @"SELECT [t].[Country] FROM ( SELECT DISTINCT [c].[Country] FROM [Customers] AS [c] ) AS [t] ORDER BY [t].[Country]"); } public override async Task Distinct_OrderBy2(bool async) { await base.Distinct_OrderBy2(async); AssertSql( @"SELECT [t].[CustomerID], [t].[Address], [t].[City], [t].[CompanyName], [t].[ContactName], [t].[ContactTitle], [t].[Country], [t].[Fax], [t].[Phone], [t].[PostalCode], [t].[Region] FROM ( SELECT DISTINCT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ) AS [t] ORDER BY [t].[CustomerID]"); } public override async Task Distinct_OrderBy3(bool async) { await base.Distinct_OrderBy3(async); AssertSql( @"SELECT [t].[CustomerID] FROM ( SELECT DISTINCT [c].[CustomerID] FROM [Customers] AS [c] ) AS [t] ORDER BY [t].[CustomerID]"); } public override async Task Distinct_Count(bool async) { await base.Distinct_Count(async); AssertSql( @"SELECT COUNT(*) FROM ( SELECT DISTINCT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ) AS [t]"); } public override async Task Select_Select_Distinct_Count(bool async) { await base.Select_Select_Distinct_Count(async); AssertSql( @"SELECT COUNT(*) FROM ( SELECT DISTINCT [c].[City] FROM [Customers] AS [c] ) AS [t]"); } public override async Task Single_Predicate(bool async) { await base.Single_Predicate(async); AssertSql( @"SELECT TOP(2) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI'"); } public override async Task FirstOrDefault_inside_subquery_gets_server_evaluated(bool async) { await base.FirstOrDefault_inside_subquery_gets_server_evaluated(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE ([c].[CustomerID] = N'ALFKI') AND (( SELECT TOP(1) [o].[CustomerID] FROM [Orders] AS [o] WHERE ([c].[CustomerID] = [o].[CustomerID]) AND ([o].[CustomerID] = N'ALFKI')) = N'ALFKI')"); } public override async Task Multiple_collection_navigation_with_FirstOrDefault_chained(bool async) { await base.Multiple_collection_navigation_with_FirstOrDefault_chained(async); AssertSql( @"SELECT [t].[OrderID], [t].[ProductID], [t].[Discount], [t].[Quantity], [t].[UnitPrice] FROM [Customers] AS [c] OUTER APPLY ( SELECT TOP(1) [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice] FROM [Order Details] AS [o] WHERE ( SELECT TOP(1) [o0].[OrderID] FROM [Orders] AS [o0] WHERE [c].[CustomerID] = [o0].[CustomerID] ORDER BY [o0].[OrderID]) IS NOT NULL AND (( SELECT TOP(1) [o1].[OrderID] FROM [Orders] AS [o1] WHERE [c].[CustomerID] = [o1].[CustomerID] ORDER BY [o1].[OrderID]) = [o].[OrderID]) ORDER BY [o].[ProductID] ) AS [t] WHERE [c].[CustomerID] LIKE N'F%' ORDER BY [c].[CustomerID]"); } public override async Task Multiple_collection_navigation_with_FirstOrDefault_chained_projecting_scalar(bool async) { await base.Multiple_collection_navigation_with_FirstOrDefault_chained_projecting_scalar(async); AssertSql( @"SELECT ( SELECT TOP(1) [o].[ProductID] FROM [Order Details] AS [o] WHERE ( SELECT TOP(1) [o0].[OrderID] FROM [Orders] AS [o0] WHERE [c].[CustomerID] = [o0].[CustomerID] ORDER BY [o0].[OrderID]) IS NOT NULL AND (( SELECT TOP(1) [o1].[OrderID] FROM [Orders] AS [o1] WHERE [c].[CustomerID] = [o1].[CustomerID] ORDER BY [o1].[OrderID]) = [o].[OrderID]) ORDER BY [o].[ProductID]) FROM [Customers] AS [c] WHERE [c].[CustomerID] LIKE N'A%' ORDER BY [c].[CustomerID]"); } public override async Task First_inside_subquery_gets_client_evaluated(bool async) { await base.First_inside_subquery_gets_client_evaluated(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE ([c].[CustomerID] = N'ALFKI') AND (( SELECT TOP(1) [o].[CustomerID] FROM [Orders] AS [o] WHERE ([c].[CustomerID] = [o].[CustomerID]) AND ([o].[CustomerID] = N'ALFKI')) = N'ALFKI')"); } public override async Task Last(bool async) { await base.Last(async); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[ContactName] DESC"); } public override async Task Last_Predicate(bool async) { await base.Last_Predicate(async); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[City] = N'London' ORDER BY [c].[ContactName] DESC"); } public override async Task Where_Last(bool async) { await base.Where_Last(async); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[City] = N'London' ORDER BY [c].[ContactName] DESC"); } public override async Task LastOrDefault(bool async) { await base.LastOrDefault(async); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[ContactName] DESC"); } public override async Task LastOrDefault_Predicate(bool async) { await base.LastOrDefault_Predicate(async); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[City] = N'London' ORDER BY [c].[ContactName] DESC"); } public override async Task Where_LastOrDefault(bool async) { await base.Where_LastOrDefault(async); AssertSql( @"SELECT TOP(1) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[City] = N'London' ORDER BY [c].[ContactName] DESC"); } public override async Task Contains_with_subquery(bool async) { await base.Contains_with_subquery(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE EXISTS ( SELECT 1 FROM [Orders] AS [o] WHERE [o].[CustomerID] = [c].[CustomerID])"); } public override async Task Contains_with_local_array_closure(bool async) { await base.Contains_with_local_array_closure(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI')", // @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ABCDE'"); } public override async Task Contains_with_subquery_and_local_array_closure(bool async) { await base.Contains_with_subquery_and_local_array_closure(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE EXISTS ( SELECT 1 FROM [Customers] AS [c0] WHERE [c0].[City] IN (N'London', N'Buenos Aires') AND ([c0].[CustomerID] = [c].[CustomerID]))", // @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE EXISTS ( SELECT 1 FROM [Customers] AS [c0] WHERE ([c0].[City] = N'London') AND ([c0].[CustomerID] = [c].[CustomerID]))"); } public override async Task Contains_with_local_uint_array_closure(bool async) { await base.Contains_with_local_uint_array_closure(async); AssertSql( @"SELECT [e].[EmployeeID], [e].[City], [e].[Country], [e].[FirstName], [e].[ReportsTo], [e].[Title] FROM [Employees] AS [e] WHERE [e].[EmployeeID] IN (0, 1)", // @"SELECT [e].[EmployeeID], [e].[City], [e].[Country], [e].[FirstName], [e].[ReportsTo], [e].[Title] FROM [Employees] AS [e] WHERE [e].[EmployeeID] = 0"); } public override async Task Contains_with_local_nullable_uint_array_closure(bool async) { await base.Contains_with_local_nullable_uint_array_closure(async); AssertSql( @"SELECT [e].[EmployeeID], [e].[City], [e].[Country], [e].[FirstName], [e].[ReportsTo], [e].[Title] FROM [Employees] AS [e] WHERE [e].[EmployeeID] IN (0, 1)", // @"SELECT [e].[EmployeeID], [e].[City], [e].[Country], [e].[FirstName], [e].[ReportsTo], [e].[Title] FROM [Employees] AS [e] WHERE [e].[EmployeeID] = 0"); } public override async Task Contains_with_local_array_inline(bool async) { await base.Contains_with_local_array_inline(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI')"); } public override async Task Contains_with_local_list_closure(bool async) { await base.Contains_with_local_list_closure(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI')"); } public override async Task Contains_with_local_object_list_closure(bool async) { await base.Contains_with_local_object_list_closure(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI')"); } public override async Task Contains_with_local_list_closure_all_null(bool async) { await base.Contains_with_local_list_closure_all_null(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE 0 = 1"); } public override async Task Contains_with_local_list_inline(bool async) { await base.Contains_with_local_list_inline(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI')"); } public override async Task Contains_with_local_list_inline_closure_mix(bool async) { await base.Contains_with_local_list_inline_closure_mix(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI')", // @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ANATR')"); } public override async Task Contains_with_local_non_primitive_list_inline_closure_mix(bool async) { await base.Contains_with_local_non_primitive_list_inline_closure_mix(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI')", // @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ANATR')"); } public override async Task Contains_with_local_non_primitive_list_closure_mix(bool async) { await base.Contains_with_local_non_primitive_list_closure_mix(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI')"); } public override async Task Contains_with_local_collection_false(bool async) { await base.Contains_with_local_collection_false(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] NOT IN (N'ABCDE', N'ALFKI')"); } public override async Task Contains_with_local_collection_complex_predicate_and(bool async) { await base.Contains_with_local_collection_complex_predicate_and(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ALFKI', N'ABCDE') AND [c].[CustomerID] IN (N'ABCDE', N'ALFKI')"); } public override async Task Contains_with_local_collection_complex_predicate_or(bool async) { await base.Contains_with_local_collection_complex_predicate_or(async); // issue #18791 // AssertSql( // @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] //FROM [Customers] AS [c] //WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI', N'ALFKI', N'ABCDE')"); } public override async Task Contains_with_local_collection_complex_predicate_not_matching_ins1(bool async) { await base.Contains_with_local_collection_complex_predicate_not_matching_ins1(async); // issue #18791 // AssertSql( // @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] //FROM [Customers] AS [c] //WHERE [c].[CustomerID] IN (N'ALFKI', N'ABCDE') OR [c].[CustomerID] NOT IN (N'ABCDE', N'ALFKI')"); } public override async Task Contains_with_local_collection_complex_predicate_not_matching_ins2(bool async) { await base.Contains_with_local_collection_complex_predicate_not_matching_ins2(async); // issue #18791 // AssertSql( // @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] //FROM [Customers] AS [c] //WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI') AND [c].[CustomerID] NOT IN (N'ALFKI', N'ABCDE')"); } public override async Task Contains_with_local_collection_sql_injection(bool async) { await base.Contains_with_local_collection_sql_injection(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ALFKI', N'ABC'')); GO; DROP TABLE Orders; GO; --') OR [c].[CustomerID] IN (N'ALFKI', N'ABCDE')"); } public override async Task Contains_with_local_collection_empty_closure(bool async) { await base.Contains_with_local_collection_empty_closure(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE 0 = 1"); } public override async Task Contains_with_local_collection_empty_inline(bool async) { await base.Contains_with_local_collection_empty_inline(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c]"); } public override async Task Contains_top_level(bool async) { await base.Contains_top_level(async); AssertSql( @"@__p_0='ALFKI' (Size = 5) (DbType = StringFixedLength) SELECT CASE WHEN EXISTS ( SELECT 1 FROM [Customers] AS [c] WHERE [c].[CustomerID] = @__p_0) THEN CAST(1 AS bit) ELSE CAST(0 AS bit) END"); } public override async Task Contains_with_local_anonymous_type_array_closure(bool async) { await base.Contains_with_local_anonymous_type_array_closure(async); AssertSql( @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice] FROM [Order Details] AS [o]", // @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice] FROM [Order Details] AS [o]"); } public override void OfType_Select() { base.OfType_Select(); AssertSql( @"SELECT TOP(1) [c].[City] FROM [Orders] AS [o] LEFT JOIN [Customers] AS [c] ON [o].[CustomerID] = [c].[CustomerID] ORDER BY [o].[OrderID]"); } public override void OfType_Select_OfType_Select() { base.OfType_Select_OfType_Select(); AssertSql( @"SELECT TOP(1) [c].[City] FROM [Orders] AS [o] LEFT JOIN [Customers] AS [c] ON [o].[CustomerID] = [c].[CustomerID] ORDER BY [o].[OrderID]"); } public override async Task Average_with_non_matching_types_in_projection_doesnt_produce_second_explicit_cast(bool async) { await base.Average_with_non_matching_types_in_projection_doesnt_produce_second_explicit_cast(async); AssertSql( @"SELECT AVG(CAST(CAST([o].[OrderID] AS bigint) AS float)) FROM [Orders] AS [o] WHERE [o].[CustomerID] IS NOT NULL AND ([o].[CustomerID] LIKE N'A%')"); } public override async Task Max_with_non_matching_types_in_projection_introduces_explicit_cast(bool async) { await base.Max_with_non_matching_types_in_projection_introduces_explicit_cast(async); AssertSql( @"SELECT MAX(CAST([o].[OrderID] AS bigint)) FROM [Orders] AS [o] WHERE [o].[CustomerID] IS NOT NULL AND ([o].[CustomerID] LIKE N'A%')"); } public override async Task Min_with_non_matching_types_in_projection_introduces_explicit_cast(bool async) { await base.Min_with_non_matching_types_in_projection_introduces_explicit_cast(async); AssertSql( @"SELECT MIN(CAST([o].[OrderID] AS bigint)) FROM [Orders] AS [o] WHERE [o].[CustomerID] IS NOT NULL AND ([o].[CustomerID] LIKE N'A%')"); } public override async Task OrderBy_Take_Last_gives_correct_result(bool async) { await base.OrderBy_Take_Last_gives_correct_result(async); AssertSql( @"@__p_0='20' SELECT TOP(1) [t].[CustomerID], [t].[Address], [t].[City], [t].[CompanyName], [t].[ContactName], [t].[ContactTitle], [t].[Country], [t].[Fax], [t].[Phone], [t].[PostalCode], [t].[Region] FROM ( SELECT TOP(@__p_0) [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID] ) AS [t] ORDER BY [t].[CustomerID] DESC"); } public override async Task OrderBy_Skip_Last_gives_correct_result(bool async) { await base.OrderBy_Skip_Last_gives_correct_result(async); AssertSql( @"@__p_0='20' SELECT TOP(1) [t].[CustomerID], [t].[Address], [t].[City], [t].[CompanyName], [t].[ContactName], [t].[ContactTitle], [t].[Country], [t].[Fax], [t].[Phone], [t].[PostalCode], [t].[Region] FROM ( SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] ORDER BY [c].[CustomerID] OFFSET @__p_0 ROWS ) AS [t] ORDER BY [t].[CustomerID] DESC"); } public override void Contains_over_entityType_should_rewrite_to_identity_equality() { base.Contains_over_entityType_should_rewrite_to_identity_equality(); AssertSql( @"SELECT TOP(2) [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE [o].[OrderID] = 10248", // @"@__entity_equality_p_0_OrderID='10248' (Nullable = true) SELECT CASE WHEN EXISTS ( SELECT 1 FROM [Orders] AS [o] WHERE ([o].[CustomerID] = N'VINET') AND ([o].[OrderID] = @__entity_equality_p_0_OrderID)) THEN CAST(1 AS bit) ELSE CAST(0 AS bit) END"); } public override async Task List_Contains_over_entityType_should_rewrite_to_identity_equality(bool async) { await base.List_Contains_over_entityType_should_rewrite_to_identity_equality(async); AssertSql( @"@__entity_equality_someOrder_0_OrderID='10248' (Nullable = true) SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE EXISTS ( SELECT 1 FROM [Orders] AS [o] WHERE ([c].[CustomerID] = [o].[CustomerID]) AND ([o].[OrderID] = @__entity_equality_someOrder_0_OrderID))"); } public override async Task List_Contains_with_constant_list(bool async) { await base.List_Contains_with_constant_list(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ALFKI', N'ANATR')"); } public override async Task List_Contains_with_parameter_list(bool async) { await base.List_Contains_with_parameter_list(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ALFKI', N'ANATR')"); } public override async Task Contains_with_parameter_list_value_type_id(bool async) { await base.Contains_with_parameter_list_value_type_id(async); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE [o].[OrderID] IN (10248, 10249)"); } public override async Task Contains_with_constant_list_value_type_id(bool async) { await base.Contains_with_constant_list_value_type_id(async); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE [o].[OrderID] IN (10248, 10249)"); } public override async Task HashSet_Contains_with_parameter(bool async) { await base.HashSet_Contains_with_parameter(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI'"); } public override async Task ImmutableHashSet_Contains_with_parameter(bool async) { await base.ImmutableHashSet_Contains_with_parameter(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] = N'ALFKI'"); } public override async Task Contains_over_entityType_with_null_should_rewrite_to_false(bool async) { await base.Contains_over_entityType_with_null_should_rewrite_to_false(async); AssertSql( @"SELECT CAST(0 AS bit)"); } public override async Task Contains_over_entityType_with_null_should_rewrite_to_identity_equality_subquery(bool async) { await base.Contains_over_entityType_with_null_should_rewrite_to_identity_equality_subquery(async); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE 0 = 1"); } public override async Task Contains_over_scalar_with_null_should_rewrite_to_identity_equality_subquery(bool async) { await base.Contains_over_scalar_with_null_should_rewrite_to_identity_equality_subquery(async); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE EXISTS ( SELECT 1 FROM [Orders] AS [o0] WHERE ([o0].[CustomerID] = N'VINET') AND [o0].[CustomerID] IS NULL)"); } public override async Task Contains_over_entityType_with_null_should_rewrite_to_identity_equality_subquery_negated(bool async) { await base.Contains_over_entityType_with_null_should_rewrite_to_identity_equality_subquery_negated(async); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE NOT (EXISTS ( SELECT 1 FROM [Orders] AS [o0] WHERE ([o0].[CustomerID] = N'VINET') AND [o0].[CustomerID] IS NULL))"); } public override async Task Contains_over_entityType_with_null_should_rewrite_to_identity_equality_subquery_complex(bool async) { await base.Contains_over_entityType_with_null_should_rewrite_to_identity_equality_subquery_complex(async); AssertSql( @"SELECT [o].[OrderID], [o].[CustomerID], [o].[EmployeeID], [o].[OrderDate] FROM [Orders] AS [o] WHERE CASE WHEN EXISTS ( SELECT 1 FROM [Orders] AS [o0] WHERE ([o0].[CustomerID] = N'VINET') AND [o0].[CustomerID] IS NULL) THEN CAST(1 AS bit) ELSE CAST(0 AS bit) END = CASE WHEN EXISTS ( SELECT 1 FROM [Orders] AS [o1] WHERE (([o1].[CustomerID] <> N'VINET') OR [o1].[CustomerID] IS NULL) AND [o1].[CustomerID] IS NULL) THEN CAST(1 AS bit) ELSE CAST(0 AS bit) END"); } public override async Task Contains_over_nullable_scalar_with_null_in_subquery_translated_correctly(bool async) { await base.Contains_over_nullable_scalar_with_null_in_subquery_translated_correctly(async); AssertSql( @"SELECT CASE WHEN EXISTS ( SELECT 1 FROM [Orders] AS [o] WHERE ([o].[CustomerID] = N'VINET') AND [o].[CustomerID] IS NULL) THEN CAST(1 AS bit) ELSE CAST(0 AS bit) END FROM [Orders] AS [o0]"); } public override async Task Contains_over_non_nullable_scalar_with_null_in_subquery_simplifies_to_false(bool async) { await base.Contains_over_non_nullable_scalar_with_null_in_subquery_simplifies_to_false(async); AssertSql( @"SELECT CAST(0 AS bit) FROM [Orders] AS [o]"); } public override async Task Contains_over_entityType_should_materialize_when_composite(bool async) { await base.Contains_over_entityType_should_materialize_when_composite(async); AssertSql( @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice] FROM [Order Details] AS [o] WHERE ([o].[ProductID] = 42) AND EXISTS ( SELECT 1 FROM [Order Details] AS [o0] WHERE ([o0].[OrderID] = [o].[OrderID]) AND ([o0].[ProductID] = [o].[ProductID]))"); } public override async Task Contains_over_entityType_should_materialize_when_composite2(bool async) { await base.Contains_over_entityType_should_materialize_when_composite2(async); AssertSql( @"SELECT [o].[OrderID], [o].[ProductID], [o].[Discount], [o].[Quantity], [o].[UnitPrice] FROM [Order Details] AS [o] WHERE ([o].[ProductID] = 42) AND EXISTS ( SELECT 1 FROM [Order Details] AS [o0] WHERE ([o0].[OrderID] > 42) AND (([o0].[OrderID] = [o].[OrderID]) AND ([o0].[ProductID] = [o].[ProductID])))"); } public override async Task String_FirstOrDefault_in_projection_does_not_do_client_eval(bool async) { await base.String_FirstOrDefault_in_projection_does_not_do_client_eval(async); AssertSql( @"SELECT SUBSTRING([c].[CustomerID], 1, 1) FROM [Customers] AS [c]"); } public override async Task Project_constant_Sum(bool async) { await base.Project_constant_Sum(async); AssertSql( @"SELECT COALESCE(SUM(1), 0) FROM [Employees] AS [e]"); } public override async Task Where_subquery_any_equals_operator(bool async) { await base.Where_subquery_any_equals_operator(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI', N'ANATR')"); } public override async Task Where_subquery_any_equals(bool async) { await base.Where_subquery_any_equals(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI', N'ANATR')"); } public override async Task Where_subquery_any_equals_static(bool async) { await base.Where_subquery_any_equals_static(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] IN (N'ABCDE', N'ALFKI', N'ANATR')"); } public override async Task Where_subquery_where_any(bool async) { await base.Where_subquery_where_any(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE ([c].[City] = N'México D.F.') AND [c].[CustomerID] IN (N'ABCDE', N'ALFKI', N'ANATR')", // @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE ([c].[City] = N'México D.F.') AND [c].[CustomerID] IN (N'ABCDE', N'ALFKI', N'ANATR')"); } public override async Task Where_subquery_all_not_equals_operator(bool async) { await base.Where_subquery_all_not_equals_operator(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] NOT IN (N'ABCDE', N'ALFKI', N'ANATR')"); } public override async Task Where_subquery_all_not_equals(bool async) { await base.Where_subquery_all_not_equals(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] NOT IN (N'ABCDE', N'ALFKI', N'ANATR')"); } public override async Task Where_subquery_all_not_equals_static(bool async) { await base.Where_subquery_all_not_equals_static(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE [c].[CustomerID] NOT IN (N'ABCDE', N'ALFKI', N'ANATR')"); } public override async Task Where_subquery_where_all(bool async) { await base.Where_subquery_where_all(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE ([c].[City] = N'México D.F.') AND [c].[CustomerID] NOT IN (N'ABCDE', N'ALFKI', N'ANATR')", // @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE ([c].[City] = N'México D.F.') AND [c].[CustomerID] NOT IN (N'ABCDE', N'ALFKI', N'ANATR')"); } public override async Task Cast_to_same_Type_Count_works(bool async) { await base.Cast_to_same_Type_Count_works(async); AssertSql( @"SELECT COUNT(*) FROM [Customers] AS [c]"); } public override async Task Cast_before_aggregate_is_preserved(bool async) { await base.Cast_before_aggregate_is_preserved(async); AssertSql( @"SELECT ( SELECT AVG(CAST([o].[OrderID] AS float)) FROM [Orders] AS [o] WHERE [c].[CustomerID] = [o].[CustomerID]) FROM [Customers] AS [c]"); } public override async Task DefaultIfEmpty_selects_only_required_columns(bool async) { await base.DefaultIfEmpty_selects_only_required_columns(async); AssertSql( @"SELECT [p].[ProductName] FROM ( SELECT NULL AS [empty] ) AS [empty] LEFT JOIN [Products] AS [p] ON 1 = 1"); } public override async Task Collection_Last_member_access_in_projection_translated(bool async) { await base.Collection_Last_member_access_in_projection_translated(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE ([c].[CustomerID] LIKE N'F%') AND (( SELECT TOP(1) [o].[CustomerID] FROM [Orders] AS [o] WHERE [c].[CustomerID] = [o].[CustomerID] ORDER BY [o].[OrderID]) = [c].[CustomerID])"); } public override async Task Collection_LastOrDefault_member_access_in_projection_translated(bool async) { await base.Collection_LastOrDefault_member_access_in_projection_translated(async); AssertSql( @"SELECT [c].[CustomerID], [c].[Address], [c].[City], [c].[CompanyName], [c].[ContactName], [c].[ContactTitle], [c].[Country], [c].[Fax], [c].[Phone], [c].[PostalCode], [c].[Region] FROM [Customers] AS [c] WHERE ([c].[CustomerID] LIKE N'F%') AND (( SELECT TOP(1) [o].[CustomerID] FROM [Orders] AS [o] WHERE [c].[CustomerID] = [o].[CustomerID] ORDER BY [o].[OrderID]) = [c].[CustomerID])"); } public override async Task Sum_over_explicit_cast_over_column(bool async) { await base.Sum_over_explicit_cast_over_column(async); AssertSql( @"SELECT COALESCE(SUM(CAST([o].[OrderID] AS bigint)), CAST(0 AS bigint)) FROM [Orders] AS [o]"); } public override async Task Count_on_projection_with_client_eval(bool async) { await base.Count_on_projection_with_client_eval(async); AssertSql( @"SELECT COUNT(*) FROM [Orders] AS [o]", // @"SELECT COUNT(*) FROM [Orders] AS [o]", // @"SELECT COUNT(*) FROM [Orders] AS [o]"); } public override async Task Average_on_nav_subquery_in_projection(bool isAsync) { await base.Average_on_nav_subquery_in_projection(isAsync); AssertSql( @"SELECT ( SELECT AVG(CAST([o].[OrderID] AS float)) FROM [Orders] AS [o] WHERE [c].[CustomerID] = [o].[CustomerID]) AS [Ave] FROM [Customers] AS [c] ORDER BY [c].[CustomerID]"); } private void AssertSql(params string[] expected) => Fixture.TestSqlLoggerFactory.AssertBaseline(expected); protected override void ClearLog() => Fixture.TestSqlLoggerFactory.Clear(); } }
37.797179
211
0.586958
[ "Apache-2.0" ]
ImJuzGz2020-org/efcore
test/EFCore.SqlServer.FunctionalTests/Query/NorthwindAggregateOperatorsQuerySqlServerTest.cs
56,286
C#
using FileScout.DataObjects; using FileScout.ScoutingMethods; using Microsoft.VisualStudio.TestTools.UnitTesting; using System.IO; using System.Reflection; using System.Text; namespace FileScout.UnitTest.Tests.ScoutingMethods { /// <summary> /// 行文字数の標準偏差の調査手段のテストを提供します。 /// </summary> [TestClass] public class RowTextLengthStandardDeviationScoutingMethodUnitTest { /// <summary> /// テスト用ディレクトリのパスを取得します。 /// </summary> private string DirectoryPath { get { var root = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); var dir = Path.Combine(root, nameof(RowTextLengthStandardDeviationScoutingMethodUnitTest)); return dir; } } /// <summary> /// テストで使うリソースを準備します。 /// </summary> [TestInitialize] public void Initialize() { if (!Directory.Exists(this.DirectoryPath)) { Directory.CreateDirectory(this.DirectoryPath); } } /// <summary> /// テストで使ったリソースを開放します。 /// </summary> [TestCleanup] public void Cleanup() { if (Directory.Exists(this.DirectoryPath)) { Directory.Delete(this.DirectoryPath, true); } } /// <summary> /// バイナリファイルの場合の調査結果を検証します。 /// </summary> [TestMethod] public void Test_Do_ReturnsZero_WhenFileIsBinary() { // テスト用のファイルを用意 var fileName = MethodBase.GetCurrentMethod().Name; var filePath = Path.Combine(this.DirectoryPath, fileName); using (var stream = File.Create(filePath)) { stream.Write(new byte[] { 0x0 }, 0, 1); stream.Flush(); } // テスト対象の処理を実行 var method = new RowTextLengthStandardDeviationScoutingMethod(); var actual = method.Do(new ScoutingClue() { FilePath = filePath }); // テスト結果を検証 Assert.AreEqual("0", actual); } /// <summary> /// 行文字数の標準偏差(小数部なし)の調査結果を検証します。 /// </summary> [TestMethod] public void Test_Do_ReturnsStandardDeviationWithoutDecimalPoint() { // テスト用のファイルを用意 var fileName = MethodBase.GetCurrentMethod().Name; var filePath = Path.Combine(this.DirectoryPath, fileName); var fileEncoding = Encoding.UTF8; using (var stream = File.Create(filePath)) using (var writer = new StreamWriter(stream, fileEncoding)) { writer.WriteLine(new string('0', 1)); writer.WriteLine(new string('0', 1)); writer.WriteLine(new string('0', 1)); writer.Flush(); } // テスト対象の処理を実行 var method = new RowTextLengthStandardDeviationScoutingMethod(); var actual = method.Do(new ScoutingClue() { FilePath = filePath, Encoding = fileEncoding }); // テスト結果を検証 Assert.AreEqual("0", actual); } /// <summary> /// 行文字数の標準偏差(小数部あり)の調査結果を検証します。 /// </summary> [TestMethod] public void Test_Do_ReturnsStandardDeviationWithDecimalPoint() { // テスト用のファイルを用意 var fileName = MethodBase.GetCurrentMethod().Name; var filePath = Path.Combine(this.DirectoryPath, fileName); var fileEncoding = Encoding.UTF8; using (var stream = File.Create(filePath)) using (var writer = new StreamWriter(stream, fileEncoding)) { writer.WriteLine(new string('0', 1)); writer.WriteLine(new string('0', 2)); writer.WriteLine(new string('0', 3)); writer.Flush(); } // テスト対象の処理を実行 var method = new RowTextLengthStandardDeviationScoutingMethod(); var actual = method.Do(new ScoutingClue() { FilePath = filePath, Encoding = fileEncoding }); // テスト結果を検証 Assert.AreEqual("0.816496580927726", actual); } } }
30.965035
107
0.528004
[ "MIT" ]
Gizmo-Verindipencil/File-Scout
SourceCode/FileScout.UnitTest/Tests/ScoutingMethods/RowTextLengthStandardDeviationScoutingMethodUnitTest.cs
4,926
C#
using System; using System.Collections.Generic; using System.Text; using Xunit; namespace PipServices3.Expressions.Mustache.Parsers { public class MustacheParserTest { [Fact] public void TestLexicalAnalysis() { MustacheParser parser = new MustacheParser(); parser.Template = "Hello, {{{NAME}}}{{ #if ESCLAMATION }}!{{/if}}{{{^ESCLAMATION}}}.{{{/ESCLAMATION}}}"; List<MustacheToken> expectedTokens = new List<MustacheToken> { new MustacheToken(MustacheTokenType.Value, "Hello, ", 0, 0), new MustacheToken(MustacheTokenType.EscapedVariable, "NAME", 0, 0), new MustacheToken(MustacheTokenType.Section, "ESCLAMATION", 0, 0), new MustacheToken(MustacheTokenType.Value, "!", 0, 0), new MustacheToken(MustacheTokenType.SectionEnd, null, 0, 0), new MustacheToken(MustacheTokenType.InvertedSection, "ESCLAMATION", 0, 0), new MustacheToken(MustacheTokenType.Value, ".", 0, 0), new MustacheToken(MustacheTokenType.SectionEnd, "ESCLAMATION", 0, 0), }; var tokens = parser.InitialTokens; Assert.Equal(expectedTokens.Count, tokens.Count); for (int i = 0; i < tokens.Count; i++) { Assert.Equal(expectedTokens[i].Type, tokens[i].Type); Assert.Equal(expectedTokens[i].Value, tokens[i].Value); } } [Fact] public void TestSyntaxAnalysis() { MustacheParser parser = new MustacheParser(); parser.Template = "Hello, {{{NAME}}}{{ #if ESCLAMATION }}!{{/if}}{{{^ESCLAMATION}}}.{{{/ESCLAMATION}}}"; List<MustacheToken> expectedTokens = new List<MustacheToken> { new MustacheToken(MustacheTokenType.Value, "Hello, ", 0, 0), new MustacheToken(MustacheTokenType.EscapedVariable, "NAME", 0, 0), new MustacheToken(MustacheTokenType.Section, "ESCLAMATION", 0, 0), new MustacheToken(MustacheTokenType.InvertedSection, "ESCLAMATION", 0, 0), }; var tokens = parser.ResultTokens; Assert.Equal(expectedTokens.Count, tokens.Count); for (int i = 0; i < tokens.Count; i++) { Assert.Equal(expectedTokens[i].Type, tokens[i].Type); Assert.Equal(expectedTokens[i].Value, tokens[i].Value); } } [Fact] public void TestVariableNames() { MustacheParser parser = new MustacheParser(); parser.Template = "Hello, {{{NAME}}}{{ #if ESCLAMATION }}!{{/if}}{{{^ESCLAMATION}}}.{{{/ESCLAMATION}}}"; Assert.Equal(2, parser.VariableNames.Count); Assert.Equal("NAME", parser.VariableNames[0]); Assert.Equal("ESCLAMATION", parser.VariableNames[1]); } } }
40.108108
116
0.573113
[ "MIT" ]
pip-services3-dotnet/pip-services3-expressions-dotnet
test/Mustache/Parsers/MustacheParserTest.cs
2,970
C#
/* Generated SBE (Simple Binary Encoding) message codec */ #pragma warning disable 1591 // disable warning on missing comments using System; using Misakai.Storage.Sbe; namespace Misakai.Storage.Sbe.Tests.Generated { public class SnapshotFullRefresh { public const ushort TemplateId = (ushort)25; public const byte TemplateVersion = (byte)1; public const ushort BlockLength = (ushort)59; public const string SematicType = "W"; private readonly SnapshotFullRefresh _parentMessage; private DirectBuffer _buffer; private int _offset; private int _limit; private int _actingBlockLength; private int _actingVersion; public int Offset { get { return _offset; } } public SnapshotFullRefresh() { _parentMessage = this; } public void WrapForEncode(DirectBuffer buffer, int offset) { _buffer = buffer; _offset = offset; _actingBlockLength = BlockLength; _actingVersion = TemplateVersion; Limit = offset + _actingBlockLength; } public void WrapForDecode(DirectBuffer buffer, int offset, int actingBlockLength, int actingVersion) { _buffer = buffer; _offset = offset; _actingBlockLength = actingBlockLength; _actingVersion = actingVersion; Limit = offset + _actingBlockLength; } public int Size { get { return _limit - _offset; } } public int Limit { get { return _limit; } set { _buffer.CheckLimit(value); _limit = value; } } public const int LastMsgSeqNumProcessedSchemaId = 369; public static string LastMsgSeqNumProcessedMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "SeqNum"; } return ""; } public const uint LastMsgSeqNumProcessedNullValue = 4294967294U; public const uint LastMsgSeqNumProcessedMinValue = 0U; public const uint LastMsgSeqNumProcessedMaxValue = 4294967293U; public uint LastMsgSeqNumProcessed { get { return _buffer.Uint32GetLittleEndian(_offset + 0); } set { _buffer.Uint32PutLittleEndian(_offset + 0, value); } } public const int TotNumReportsSchemaId = 911; public static string TotNumReportsMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "int"; } return ""; } public const uint TotNumReportsNullValue = 4294967294U; public const uint TotNumReportsMinValue = 0U; public const uint TotNumReportsMaxValue = 4294967293U; public uint TotNumReports { get { return _buffer.Uint32GetLittleEndian(_offset + 4); } set { _buffer.Uint32PutLittleEndian(_offset + 4, value); } } public const int SecurityIDSchemaId = 48; public static string SecurityIDMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "int"; } return ""; } public const int SecurityIDNullValue = -2147483648; public const int SecurityIDMinValue = -2147483647; public const int SecurityIDMaxValue = 2147483647; public int SecurityID { get { return _buffer.Int32GetLittleEndian(_offset + 8); } set { _buffer.Int32PutLittleEndian(_offset + 8, value); } } public const int RptSeqSchemaId = 83; public static string RptSeqMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "SeqNum"; } return ""; } public const uint RptSeqNullValue = 4294967294U; public const uint RptSeqMinValue = 0U; public const uint RptSeqMaxValue = 4294967293U; public uint RptSeq { get { return _buffer.Uint32GetLittleEndian(_offset + 12); } set { _buffer.Uint32PutLittleEndian(_offset + 12, value); } } public const int TransactTimeSchemaId = 60; public static string TransactTimeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "UTCTimestamp"; } return ""; } public const ulong TransactTimeNullValue = 0x8000000000000000UL; public const ulong TransactTimeMinValue = 0x0UL; public const ulong TransactTimeMaxValue = 0x7fffffffffffffffUL; public ulong TransactTime { get { return _buffer.Uint64GetLittleEndian(_offset + 16); } set { _buffer.Uint64PutLittleEndian(_offset + 16, value); } } public const int LastUpdateTimeSchemaId = 779; public static string LastUpdateTimeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "UTCTimestamp"; } return ""; } public const ulong LastUpdateTimeNullValue = 0x8000000000000000UL; public const ulong LastUpdateTimeMinValue = 0x0UL; public const ulong LastUpdateTimeMaxValue = 0x7fffffffffffffffUL; public ulong LastUpdateTime { get { return _buffer.Uint64GetLittleEndian(_offset + 24); } set { _buffer.Uint64PutLittleEndian(_offset + 24, value); } } public const int TradeDateSchemaId = 75; public static string TradeDateMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "LocalMktDate"; } return ""; } public const ushort TradeDateNullValue = (ushort)65535; public const ushort TradeDateMinValue = (ushort)0; public const ushort TradeDateMaxValue = (ushort)65534; public ushort TradeDate { get { return _buffer.Uint16GetLittleEndian(_offset + 32); } set { _buffer.Uint16PutLittleEndian(_offset + 32, value); } } public const int MDSecurityTradingStatusSchemaId = 1682; public static string MDSecurityTradingStatusMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "int"; } return ""; } public SecurityTradingStatus MDSecurityTradingStatus { get { return (SecurityTradingStatus)_buffer.Uint8Get(_offset + 34); } set { _buffer.Uint8Put(_offset + 34, (byte)value); } } public const int HighLimitPriceSchemaId = 1149; public static string HighLimitPriceMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "Price"; } return ""; } private readonly PRICENULL _highLimitPrice = new PRICENULL(); public PRICENULL HighLimitPrice { get { _highLimitPrice.Wrap(_buffer, _offset + 35, _actingVersion); return _highLimitPrice; } } public const int LowLimitPriceSchemaId = 1148; public static string LowLimitPriceMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "Price"; } return ""; } private readonly PRICENULL _lowLimitPrice = new PRICENULL(); public PRICENULL LowLimitPrice { get { _lowLimitPrice.Wrap(_buffer, _offset + 43, _actingVersion); return _lowLimitPrice; } } public const int MaxPriceVariationSchemaId = 1143; public static string MaxPriceVariationMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "Price"; } return ""; } private readonly PRICENULL _maxPriceVariation = new PRICENULL(); public PRICENULL MaxPriceVariation { get { _maxPriceVariation.Wrap(_buffer, _offset + 51, _actingVersion); return _maxPriceVariation; } } private readonly NoMDEntriesGroup _noMDEntries = new NoMDEntriesGroup(); public const long NoMDEntriesSchemaId = 268; public NoMDEntriesGroup NoMDEntries { get { _noMDEntries.WrapForDecode(_parentMessage, _buffer, _actingVersion); return _noMDEntries; } } public NoMDEntriesGroup NoMDEntriesCount(int count) { _noMDEntries.WrapForEncode(_parentMessage, _buffer, count); return _noMDEntries; } public class NoMDEntriesGroup { private readonly GroupSize _dimensions = new GroupSize(); private SnapshotFullRefresh _parentMessage; private DirectBuffer _buffer; private int _blockLength; private int _actingVersion; private int _count; private int _index; private int _offset; public void WrapForDecode(SnapshotFullRefresh parentMessage, DirectBuffer buffer, int actingVersion) { _parentMessage = parentMessage; _buffer = buffer; _dimensions.Wrap(buffer, parentMessage.Limit, actingVersion); _count = _dimensions.NumInGroup; _blockLength = _dimensions.BlockLength; _actingVersion = actingVersion; _index = -1; _parentMessage.Limit = parentMessage.Limit + 3; } public void WrapForEncode(SnapshotFullRefresh parentMessage, DirectBuffer buffer, int count) { _parentMessage = parentMessage; _buffer = buffer; _dimensions.Wrap(buffer, parentMessage.Limit, _actingVersion); _dimensions.NumInGroup = (byte)count; _dimensions.BlockLength = (ushort)22; _index = -1; _count = count; _blockLength = 22; parentMessage.Limit = parentMessage.Limit + 3; } public int Count { get { return _count; } } public bool HasNext { get { return _index + 1 < _count; } } public NoMDEntriesGroup Next() { if (_index + 1 >= _count) { throw new InvalidOperationException(); } _offset = _parentMessage.Limit; _parentMessage.Limit = _offset + _blockLength; ++_index; return this; } public const int MDEntryTypeSchemaId = 269; public static string MDEntryTypeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "char"; } return ""; } public MDEntryType MDEntryType { get { return (MDEntryType)_buffer.CharGet(_offset + 0); } set { _buffer.CharPut(_offset + 0, (byte)value); } } public const int MDPriceLevelSchemaId = 1023; public static string MDPriceLevelMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "int"; } return ""; } public const sbyte MDPriceLevelNullValue = (sbyte)127; public const sbyte MDPriceLevelMinValue = (sbyte)-127; public const sbyte MDPriceLevelMaxValue = (sbyte)127; public sbyte MDPriceLevel { get { return _buffer.Int8Get(_offset + 1); } set { _buffer.Int8Put(_offset + 1, value); } } public const int MDEntryPxSchemaId = 270; public static string MDEntryPxMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "Price"; } return ""; } private readonly PRICENULL _mDEntryPx = new PRICENULL(); public PRICENULL MDEntryPx { get { _mDEntryPx.Wrap(_buffer, _offset + 2, _actingVersion); return _mDEntryPx; } } public const int MDEntrySizeSchemaId = 271; public static string MDEntrySizeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "Qty"; } return ""; } public const int MDEntrySizeNullValue = 2147483647; public const int MDEntrySizeMinValue = -2147483647; public const int MDEntrySizeMaxValue = 2147483647; public int MDEntrySize { get { return _buffer.Int32GetLittleEndian(_offset + 10); } set { _buffer.Int32PutLittleEndian(_offset + 10, value); } } public const int NumberOfOrdersSchemaId = 346; public static string NumberOfOrdersMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "int"; } return ""; } public const int NumberOfOrdersNullValue = 2147483647; public const int NumberOfOrdersMinValue = -2147483647; public const int NumberOfOrdersMaxValue = 2147483647; public int NumberOfOrders { get { return _buffer.Int32GetLittleEndian(_offset + 14); } set { _buffer.Int32PutLittleEndian(_offset + 14, value); } } public const int OpenCloseSettlFlagSchemaId = 286; public static string OpenCloseSettlFlagMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "int"; } return ""; } public OpenCloseSettlFlag OpenCloseSettlFlag { get { return (OpenCloseSettlFlag)_buffer.Uint8Get(_offset + 18); } set { _buffer.Uint8Put(_offset + 18, (byte)value); } } public const int SettlPriceTypeSchemaId = 731; public static string SettlPriceTypeMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "MultipleCharValue"; } return ""; } public SettlPriceType SettlPriceType { get { return (SettlPriceType)_buffer.Uint8Get(_offset + 19); } set { _buffer.Uint8Put(_offset + 19, (byte)value); } } public const int TradingReferenceDateSchemaId = 5796; public static string TradingReferenceDateMetaAttribute(MetaAttribute metaAttribute) { switch (metaAttribute) { case MetaAttribute.Epoch: return "unix"; case MetaAttribute.TimeUnit: return "nanosecond"; case MetaAttribute.SemanticType: return "LocalMktDate"; } return ""; } public const ushort TradingReferenceDateNullValue = (ushort)65535; public const ushort TradingReferenceDateMinValue = (ushort)0; public const ushort TradingReferenceDateMaxValue = (ushort)65534; public ushort TradingReferenceDate { get { return _buffer.Uint16GetLittleEndian(_offset + 20); } set { _buffer.Uint16PutLittleEndian(_offset + 20, value); } } } } }
26.249651
108
0.585569
[ "Apache-2.0" ]
Kelindar/misakai-storage-sbe
test/csharp/Generated/SnapshotFullRefresh.cs
18,821
C#
using ICSharpCode.AvalonEdit.CodeCompletion; using ICSharpCode.AvalonEdit.Document; using System; using System.Collections.Immutable; using System.Threading.Tasks; using System.Windows; using Microsoft.CodeAnalysis; using System.Windows.Media; using ICSharpCode.AvalonEdit.Editing; using Microsoft.CodeAnalysis.Tags; using System.Linq; namespace ScriptPad.Editor { public class CodeCompletionData : ICompletionData { public CodeCompletionData(DocumentId id, Microsoft.CodeAnalysis.Completion.CompletionItem item) { this.Text = item.DisplayText; image = new Lazy<ImageSource>(() => ImageResource.GetImage(item.Tags)); description = new Lazy<object>(() => Service.ScriptCompletionService.GetDescriptionAsync(id, item).Result); this.Content = Text; } public string Text { get; } public object Description { get => description.Value; } public object Content { get; set; } public ImageSource Image { get => image.Value; } public double Priority => 0; public void Complete(TextArea textArea, ISegment completionSegment, EventArgs insertionRequestEventArgs) { textArea.Document.Replace(completionSegment, Text); } private Lazy<object> description; private Lazy<ImageSource> image; } }
30.977273
119
0.695525
[ "MIT" ]
kongdetuo/RevitTools
src/ScriptPad/Editor/CodeCompletionData.cs
1,365
C#
using System; using System.Threading.Tasks; namespace PersistentStorage { public interface IPersistentStorageGroup { T Get<T>(string key, T defaultValue); void Put<T>(string key, T value); void Delete(string key); Task<T> GetAsync<T>(string key, T defaultValue); Task PutAsync<T>(string key, T value); Task DeleteAsync(string key); } }
21.105263
56
0.640898
[ "MIT" ]
jankrib/PersistentStorage
PersistentStorage.Core/IPersistentStorageGroup.cs
403
C#
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence. // See the LICENCE file in the repository root for full licence text. using System; using System.Linq; using osu.Framework.Allocation; using osu.Framework.Bindables; using osu.Framework.Graphics; using osu.Framework.Graphics.Containers; using osu.Framework.Graphics.Shapes; using osu.Framework.Graphics.Sprites; using osu.Framework.Input.Events; using osu.Game.Graphics; using osu.Game.Graphics.Sprites; using osu.Game.Online.API.Requests.Responses; using osu.Game.Resources.Localisation.Web; using osuTK; namespace osu.Game.Overlays.Profile.Header.Components { public class PreviousUsernames : CompositeDrawable { private const int duration = 200; private const int margin = 10; private const int width = 310; private const int move_offset = 15; public readonly Bindable<APIUser> User = new Bindable<APIUser>(); private readonly TextFlowContainer text; private readonly Box background; private readonly SpriteText header; public PreviousUsernames() { HoverIconContainer hoverIcon; AutoSizeAxes = Axes.Y; Width = width; Masking = true; CornerRadius = 5; AddRangeInternal(new Drawable[] { background = new Box { RelativeSizeAxes = Axes.Both, }, new GridContainer { AutoSizeAxes = Axes.Y, RelativeSizeAxes = Axes.X, RowDimensions = new[] { new Dimension(GridSizeMode.AutoSize), new Dimension(GridSizeMode.AutoSize) }, ColumnDimensions = new[] { new Dimension(GridSizeMode.AutoSize), new Dimension() }, Content = new[] { new Drawable[] { hoverIcon = new HoverIconContainer(), header = new OsuSpriteText { Anchor = Anchor.BottomLeft, Origin = Anchor.BottomLeft, Text = UsersStrings.ShowPreviousUsernames, Font = OsuFont.GetFont(size: 10, italics: true) } }, new Drawable[] { new Container { RelativeSizeAxes = Axes.Both, }, text = new TextFlowContainer(s => s.Font = OsuFont.GetFont(size: 12, weight: FontWeight.Bold, italics: true)) { RelativeSizeAxes = Axes.X, AutoSizeAxes = Axes.Y, Direction = FillDirection.Full, Margin = new MarginPadding { Bottom = margin, Top = margin / 2f } } } } } }); hoverIcon.ActivateHover += showContent; hideContent(); } [BackgroundDependencyLoader] private void load(OsuColour colours) { background.Colour = colours.GreySeaFoamDarker; } protected override void LoadComplete() { base.LoadComplete(); User.BindValueChanged(onUserChanged, true); } private void onUserChanged(ValueChangedEvent<APIUser> user) { text.Text = string.Empty; string[] usernames = user.NewValue?.PreviousUsernames; if (usernames?.Any() ?? false) { text.Text = string.Join(", ", usernames); Show(); return; } Hide(); } protected override void OnHoverLost(HoverLostEvent e) { base.OnHoverLost(e); hideContent(); } private void showContent() { text.FadeIn(duration, Easing.OutQuint); header.FadeIn(duration, Easing.OutQuint); background.FadeIn(duration, Easing.OutQuint); this.MoveToY(-move_offset, duration, Easing.OutQuint); } private void hideContent() { text.FadeOut(duration, Easing.OutQuint); header.FadeOut(duration, Easing.OutQuint); background.FadeOut(duration, Easing.OutQuint); this.MoveToY(0, duration, Easing.OutQuint); } private class HoverIconContainer : Container { public Action ActivateHover; public HoverIconContainer() { AutoSizeAxes = Axes.Both; Child = new SpriteIcon { Margin = new MarginPadding { Top = 6, Left = margin, Right = margin * 2 }, Size = new Vector2(15), Icon = FontAwesome.Solid.IdCard, }; } protected override bool OnHover(HoverEvent e) { ActivateHover?.Invoke(); return base.OnHover(e); } } } }
33.678363
138
0.469179
[ "MIT" ]
20PercentRendered/osu
osu.Game/Overlays/Profile/Header/Components/PreviousUsernames.cs
5,591
C#
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ using Aliyun.Acs.Core; using Aliyun.Acs.Core.Http; using Aliyun.Acs.Core.Transform; using Aliyun.Acs.Core.Utils; using Aliyun.Acs.R_kvstore.Transform; using Aliyun.Acs.R_kvstore.Transform.V20150101; using System.Collections.Generic; namespace Aliyun.Acs.R_kvstore.Model.V20150101 { public class ModifyInstanceSSLRequest : RpcAcsRequest<ModifyInstanceSSLResponse> { public ModifyInstanceSSLRequest() : base("R_kvstore", "2015-01-01", "ModifyInstanceSSL", "redisa", "openAPI") { } private long? resourceOwnerId; private string instanceId; private string securityToken; private string resourceOwnerAccount; private string ownerAccount; private string action; private long? ownerId; private string accessKeyId; private string sSLEnabled; public long? ResourceOwnerId { get { return resourceOwnerId; } set { resourceOwnerId = value; DictionaryUtil.Add(QueryParameters, "ResourceOwnerId", value.ToString()); } } public string InstanceId { get { return instanceId; } set { instanceId = value; DictionaryUtil.Add(QueryParameters, "InstanceId", value); } } public string SecurityToken { get { return securityToken; } set { securityToken = value; DictionaryUtil.Add(QueryParameters, "SecurityToken", value); } } public string ResourceOwnerAccount { get { return resourceOwnerAccount; } set { resourceOwnerAccount = value; DictionaryUtil.Add(QueryParameters, "ResourceOwnerAccount", value); } } public string OwnerAccount { get { return ownerAccount; } set { ownerAccount = value; DictionaryUtil.Add(QueryParameters, "OwnerAccount", value); } } public string Action { get { return action; } set { action = value; DictionaryUtil.Add(QueryParameters, "Action", value); } } public long? OwnerId { get { return ownerId; } set { ownerId = value; DictionaryUtil.Add(QueryParameters, "OwnerId", value.ToString()); } } public string AccessKeyId { get { return accessKeyId; } set { accessKeyId = value; DictionaryUtil.Add(QueryParameters, "AccessKeyId", value); } } public string SSLEnabled { get { return sSLEnabled; } set { sSLEnabled = value; DictionaryUtil.Add(QueryParameters, "SSLEnabled", value); } } public override ModifyInstanceSSLResponse GetResponse(Core.Transform.UnmarshallerContext unmarshallerContext) { return ModifyInstanceSSLResponseUnmarshaller.Unmarshall(unmarshallerContext); } } }
20.926136
117
0.653272
[ "Apache-2.0" ]
brightness007/unofficial-aliyun-openapi-net-sdk
aliyun-net-sdk-r-kvstore/R_kvstore/Model/V20150101/ModifyInstanceSSLRequest.cs
3,683
C#
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Data; using Xunit; namespace Microsoft.Data.SqlClient.ManualTesting.Tests { public static class SqlNamedPipesTest { [ConditionalFact(typeof(DataTestUtility), nameof(DataTestUtility.AreConnStringsSetup), nameof(DataTestUtility.IsNotAzureServer))] [PlatformSpecific(TestPlatforms.Windows)] // Named pipes with the given input strings are not supported on Unix public static void ValidConnStringTest() { SqlConnectionStringBuilder builder = new SqlConnectionStringBuilder(DataTestUtility.NPConnectionString); builder.ConnectTimeout = 5; OpenGoodConnection(builder.ConnectionString); } private static void OpenGoodConnection(string connectionString) { using (SqlConnection conn = new SqlConnection(connectionString)) { conn.Open(); DataTestUtility.AssertEqualsWithDescription(ConnectionState.Open, conn.State, "FAILED: Connection should be in open state"); } } } }
38.242424
140
0.701268
[ "MIT" ]
0xced/SqlClient
src/Microsoft.Data.SqlClient/tests/ManualTests/SQL/SqlNamedPipesTest/SqlNamedPipesTest.cs
1,262
C#
using System; using NUnit.Framework; using Microsoft.FSharp.Core; using FSharpx; namespace FSharpx.CSharpTests { [TestFixture] public class EnumerableTests { [Test] public void FirstOrNone_None() { var a = new int[0]; Assert.AreEqual(FSharpOption<int>.None, a.FirstOrNone()); } [Test] public void FirstOrNone_Some() { var a = new int[] {1,2,3}; Assert.AreEqual(1.Some(), a.FirstOrNone()); } } }
23
70
0.548204
[ "Apache-2.0" ]
dmohl/fsharpx
tests/FSharpx.CSharpTests/EnumerableTests.cs
531
C#
using System; using System.Collections.Generic; using System.Text; namespace QK.Framework.Core.Ioc { public interface IDependency { } }
13.636364
33
0.72
[ "Apache-2.0" ]
hywb/QK.Framework
QK.Framework/Core/Ioc/IDependency.cs
152
C#
using System; using Xunit; namespace CRM.Communication.FunctionalTests { public class UnitTest1 { [Fact] public void Test1() { Assert.Equal(1, 1); } } }
14.933333
44
0.504464
[ "MIT" ]
LeonardoMaran/crm-1
src/Communication/CRM.Communication.FunctionalTests/UnitTest1.cs
224
C#
using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; namespace Mem.Models { public enum NoteClassification { Nessuno = 0, Arancione = 1, Blu = 2, Giallo = 3, Rosso = 4, Verde = 5, Viola = 6 } public class NoteModel { public int ID; public string Customer; public string CustomerCleaned; public DateTime DateCreated; public DateTime DateUpdated; public NoteClassification Classification; public string Text; } public class ByCustomer : IEqualityComparer<NoteModel> { public bool Equals(NoteModel x, NoteModel y) { return x.CustomerCleaned == y.CustomerCleaned; } public int GetHashCode(NoteModel obj) { return obj.CustomerCleaned.GetHashCode(); } } public class CustomerModel { public string Customer; public string CustomerCleaned; } public class NoteSearchModel { public string filterDate { get; set; } public string searchString { get; set; } } }
20.327586
58
0.592027
[ "MIT" ]
fedtes/mem
Mem/Models/NoteModel.cs
1,181
C#
/******************************************************************************* * Copyright 2009-2016 Amazon Services. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * * You may not use this file except in compliance with the License. * You may obtain a copy of the License at: http://aws.amazon.com/apache2.0 * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. ******************************************************************************* * List Inbound Shipment Items By Next Token Result * API Version: 2010-10-01 * Library Version: 2016-10-05 * Generated: Wed Oct 05 06:15:39 PDT 2016 */ using System; using System.Xml; using System.Xml.Serialization; using Claytondus.AmazonMWS.Runtime; namespace Claytondus.AmazonMWS.FbaInbound.Model { [XmlTypeAttribute(Namespace = "http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/")] [XmlRootAttribute(Namespace = "http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/", IsNullable = false)] public class ListInboundShipmentItemsByNextTokenResult : AbstractMwsObject { private InboundShipmentItemList _itemData; private string _nextToken; /// <summary> /// Gets and sets the ItemData property. /// </summary> [XmlElementAttribute(ElementName = "ItemData")] public InboundShipmentItemList ItemData { get { return this._itemData; } set { this._itemData = value; } } /// <summary> /// Sets the ItemData property. /// </summary> /// <param name="itemData">ItemData property.</param> /// <returns>this instance.</returns> public ListInboundShipmentItemsByNextTokenResult WithItemData(InboundShipmentItemList itemData) { this._itemData = itemData; return this; } /// <summary> /// Checks if ItemData property is set. /// </summary> /// <returns>true if ItemData property is set.</returns> public bool IsSetItemData() { return this._itemData != null; } /// <summary> /// Gets and sets the NextToken property. /// </summary> [XmlElementAttribute(ElementName = "NextToken")] public string NextToken { get { return this._nextToken; } set { this._nextToken = value; } } /// <summary> /// Sets the NextToken property. /// </summary> /// <param name="nextToken">NextToken property.</param> /// <returns>this instance.</returns> public ListInboundShipmentItemsByNextTokenResult WithNextToken(string nextToken) { this._nextToken = nextToken; return this; } /// <summary> /// Checks if NextToken property is set. /// </summary> /// <returns>true if NextToken property is set.</returns> public bool IsSetNextToken() { return this._nextToken != null; } public override void ReadFragmentFrom(IMwsReader reader) { _itemData = reader.Read<InboundShipmentItemList>("ItemData"); _nextToken = reader.Read<string>("NextToken"); } public override void WriteFragmentTo(IMwsWriter writer) { writer.Write("ItemData", _itemData); writer.Write("NextToken", _nextToken); } public override void WriteTo(IMwsWriter writer) { writer.Write("http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/", "ListInboundShipmentItemsByNextTokenResult", this); } public ListInboundShipmentItemsByNextTokenResult() : base() { } } }
33.948718
143
0.597432
[ "Apache-2.0" ]
claytondus/Claytondus.AmazonMWS
Claytondus.AmazonMWS.FbaInbound/Model/ListInboundShipmentItemsByNextTokenResult.cs
3,972
C#
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("01. Async")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("01. Async")] [assembly: AssemblyCopyright("Copyright © 2016")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("4f2cc898-bfaf-41ef-9f1d-4ba8ba813d30")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
37.594595
84
0.741193
[ "Unlicense" ]
GoldenR1618/SoftUni-Projects
05.C#_Advanced/10.ASYNCHRONOUS_PROGRAMMING/09. CSharp-Advanced-Asynchronous-Programming-Demos/01. Sync/Properties/AssemblyInfo.cs
1,394
C#
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the importexport-2010-06-01.normal.json service model. */ using System; using Amazon.Runtime; using Amazon.Util.Internal; namespace Amazon.ImportExport { /// <summary> /// Configuration for accessing Amazon ImportExport service /// </summary> public partial class AmazonImportExportConfig : ClientConfig { private static readonly string UserAgentString = InternalSDKUtils.BuildUserAgentString("3.3.100.207"); private string _userAgent = UserAgentString; /// <summary> /// Default constructor /// </summary> public AmazonImportExportConfig() { this.AuthenticationServiceName = "importexport"; } /// <summary> /// The constant used to lookup in the region hash the endpoint. /// </summary> public override string RegionEndpointServiceName { get { return "importexport"; } } /// <summary> /// Gets the ServiceVersion property. /// </summary> public override string ServiceVersion { get { return "2010-06-01"; } } /// <summary> /// Gets the value of UserAgent property. /// </summary> public override string UserAgent { get { return _userAgent; } } } }
26.4875
110
0.592732
[ "Apache-2.0" ]
JeffAshton/aws-sdk-net
sdk/src/Services/ImportExport/Generated/AmazonImportExportConfig.cs
2,119
C#
using System; using System.Globalization; using System.Linq; using System.Reflection; namespace CalculadoraWebApi.Areas.HelpPage.ModelDescriptions { internal static class ModelNameHelper { // Modify this to provide custom model name mapping. public static string GetModelName(Type type) { ModelNameAttribute modelNameAttribute = type.GetCustomAttribute<ModelNameAttribute>(); if (modelNameAttribute != null && !String.IsNullOrEmpty(modelNameAttribute.Name)) { return modelNameAttribute.Name; } string modelName = type.Name; if (type.IsGenericType) { // Format the generic type name to something like: GenericOfAgurment1AndArgument2 Type genericType = type.GetGenericTypeDefinition(); Type[] genericArguments = type.GetGenericArguments(); string genericTypeName = genericType.Name; // Trim the generic parameter counts from the name genericTypeName = genericTypeName.Substring(0, genericTypeName.IndexOf('`')); string[] argumentTypeNames = genericArguments.Select(t => GetModelName(t)).ToArray(); modelName = String.Format(CultureInfo.InvariantCulture, "{0}Of{1}", genericTypeName, String.Join("And", argumentTypeNames)); } return modelName; } } }
40.166667
140
0.637621
[ "MIT" ]
Fantasmy/AngularWeApiCalculadora
CalculadoraWebApi/CalculadoraWebApi/Areas/HelpPage/ModelDescriptions/ModelNameHelper.cs
1,446
C#
using System; using Trustlink.Network.P2P.Payloads; using Trustlink.Persistence; using Trustlink.SmartContract; using Trustlink.SmartContract.Native; namespace Trustlink.UnitTests.Extensions { public static class NativeContractExtensions { public static StackItem Call(this NativeContract contract, Snapshot snapshot, string method, params ContractParameter[] args) { return Call(contract, snapshot, null, method, args); } public static StackItem Call(this NativeContract contract, Snapshot snapshot, IVerifiable container, string method, params ContractParameter[] args) { var engine = new ApplicationEngine(TriggerType.Application, container, snapshot, 0, true); engine.LoadScript(contract.Script); var script = new ScriptBuilder(); for (var i = args.Length - 1; i >= 0; i--) script.EmitPush(args[i]); script.EmitPush(args.Length); script.Emit(OpCode.PACK); script.EmitPush(method); engine.LoadScript(script.ToArray()); if (engine.Execute() != VMState.HALT) { throw new InvalidOperationException(); } return engine.ResultStack.Pop(); } } }
31.658537
156
0.634823
[ "MIT" ]
Trustlink-chain/trustlink
trustlink.UnitTests/Extensions/NativeContractExtensions.cs
1,298
C#
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information namespace DotNetNuke.Web.Mvc.Helpers { using System; using System.Linq.Expressions; using System.Web.Mvc; using System.Web.Mvc.Html; /// <summary> /// Gets the HTML ID and name attributes of the <see cref="T:DotNetNuke.Web.Mvc.HtmlHelper"/> string. /// </summary> public static class HtmlNameExtensions { /// <summary> /// Gets the ID of the <see cref="T:DotNetNuke.Web.Mvc.HtmlHelper"/> string. /// </summary> /// /// <returns> /// The HTML ID attribute value for the object that is represented by the expression. /// </returns> /// <param name="html">The HTML helper instance that this method extends.</param><param name="name">An expression that identifies the object that contains the ID.</param> public static MvcHtmlString Id(this DnnHtmlHelper html, string name) { return html.HtmlHelper.Id(name); } /// <summary> /// Gets the ID of the <see cref="T:DotNetNuke.Web.Mvc.HtmlHelper"/> string. /// </summary> /// /// <returns> /// The HTML ID attribute value for the object that is represented by the expression. /// </returns> /// <param name="html">The HTML helper instance that this method extends.</param><param name="expression">An expression that identifies the object that contains the ID.</param><typeparam name="TModel">The type of the model.</typeparam><typeparam name="TProperty">The type of the property.</typeparam> public static MvcHtmlString IdFor<TModel, TProperty>(this DnnHtmlHelper<TModel> html, Expression<Func<TModel, TProperty>> expression) { return html.HtmlHelper.IdFor(expression); } /// <summary> /// Gets the ID of the <see cref="T:DotNetNuke.Web.Mvc.HtmlHelper"/> string. /// </summary> /// /// <returns> /// The HTML ID attribute value for the object that is represented by the expression. /// </returns> /// <param name="html">The HTML helper instance that this method extends.</param> public static MvcHtmlString IdForModel(this DnnHtmlHelper html) { return html.HtmlHelper.IdForModel(); } /// <summary> /// Gets the full HTML field name for the object that is represented by the expression. /// </summary> /// /// <returns> /// The full HTML field name for the object that is represented by the expression. /// </returns> /// <param name="html">The HTML helper instance that this method extends.</param><param name="name">An expression that identifies the object that contains the name.</param> public static MvcHtmlString Name(this DnnHtmlHelper html, string name) { return html.HtmlHelper.Name(name); } /// <summary> /// Gets the full HTML field name for the object that is represented by the expression. /// </summary> /// /// <returns> /// The full HTML field name for the object that is represented by the expression. /// </returns> /// <param name="html">The HTML helper instance that this method extends.</param><param name="expression">An expression that identifies the object that contains the name.</param><typeparam name="TModel">The type of the model.</typeparam><typeparam name="TProperty">The type of the property.</typeparam> public static MvcHtmlString NameFor<TModel, TProperty>(this DnnHtmlHelper<TModel> html, Expression<Func<TModel, TProperty>> expression) { return html.HtmlHelper.NameFor(expression); } /// <summary> /// Gets the full HTML field name for the object that is represented by the expression. /// </summary> /// /// <returns> /// The full HTML field name for the object that is represented by the expression. /// </returns> /// <param name="html">The HTML helper instance that this method extends.</param> public static MvcHtmlString NameForModel(this DnnHtmlHelper html) { return html.HtmlHelper.NameForModel(); } } }
46.239583
310
0.632124
[ "MIT" ]
Mariusz11711/DNN
DNN Platform/DotNetNuke.Web.Mvc/Helpers/HtmlNameExtensions.cs
4,441
C#
using System; using System.Collections.Generic; using System.Text; namespace ProgrammingCSharp { public class Perro:Animal { public int Edad { get; set; } public string Raza { get; set; } public void Ladrar() { Console.WriteLine("Guau!"); } public override void Comer() { Console.WriteLine("Perro comiendo..."); } public override string HacerUnTruco() { return "Da la pata"; } } }
18.642857
51
0.532567
[ "Apache-2.0" ]
DJkimer/ProgrammingWithCSharp
ProgrammingCSharp/ProgrammingCSharp/Perro.cs
524
C#
/* * Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ /* * Do not modify this file. This file is generated from the comprehend-2017-11-27.normal.json service model. */ using System; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Text; using System.Xml.Serialization; using Amazon.Comprehend.Model; using Amazon.Runtime; using Amazon.Runtime.Internal; using Amazon.Runtime.Internal.Transform; using Amazon.Runtime.Internal.Util; using ThirdParty.Json.LitJson; namespace Amazon.Comprehend.Model.Internal.MarshallTransformations { /// <summary> /// StartSentimentDetectionJob Request Marshaller /// </summary> public class StartSentimentDetectionJobRequestMarshaller : IMarshaller<IRequest, StartSentimentDetectionJobRequest> , IMarshaller<IRequest,AmazonWebServiceRequest> { /// <summary> /// Marshaller the request object to the HTTP request. /// </summary> /// <param name="input"></param> /// <returns></returns> public IRequest Marshall(AmazonWebServiceRequest input) { return this.Marshall((StartSentimentDetectionJobRequest)input); } /// <summary> /// Marshaller the request object to the HTTP request. /// </summary> /// <param name="publicRequest"></param> /// <returns></returns> public IRequest Marshall(StartSentimentDetectionJobRequest publicRequest) { IRequest request = new DefaultRequest(publicRequest, "Amazon.Comprehend"); string target = "Comprehend_20171127.StartSentimentDetectionJob"; request.Headers["X-Amz-Target"] = target; request.Headers["Content-Type"] = "application/x-amz-json-1.1"; request.Headers[Amazon.Util.HeaderKeys.XAmzApiVersion] = "2017-11-27"; request.HttpMethod = "POST"; request.ResourcePath = "/"; request.MarshallerVersion = 2; using (StringWriter stringWriter = new StringWriter(CultureInfo.InvariantCulture)) { JsonWriter writer = new JsonWriter(stringWriter); writer.WriteObjectStart(); var context = new JsonMarshallerContext(request, writer); if(publicRequest.IsSetClientRequestToken()) { context.Writer.WritePropertyName("ClientRequestToken"); context.Writer.Write(publicRequest.ClientRequestToken); } else if(!(publicRequest.IsSetClientRequestToken())) { context.Writer.WritePropertyName("ClientRequestToken"); context.Writer.Write(Guid.NewGuid().ToString()); } if(publicRequest.IsSetDataAccessRoleArn()) { context.Writer.WritePropertyName("DataAccessRoleArn"); context.Writer.Write(publicRequest.DataAccessRoleArn); } if(publicRequest.IsSetInputDataConfig()) { context.Writer.WritePropertyName("InputDataConfig"); context.Writer.WriteObjectStart(); var marshaller = InputDataConfigMarshaller.Instance; marshaller.Marshall(publicRequest.InputDataConfig, context); context.Writer.WriteObjectEnd(); } if(publicRequest.IsSetJobName()) { context.Writer.WritePropertyName("JobName"); context.Writer.Write(publicRequest.JobName); } if(publicRequest.IsSetLanguageCode()) { context.Writer.WritePropertyName("LanguageCode"); context.Writer.Write(publicRequest.LanguageCode); } if(publicRequest.IsSetOutputDataConfig()) { context.Writer.WritePropertyName("OutputDataConfig"); context.Writer.WriteObjectStart(); var marshaller = OutputDataConfigMarshaller.Instance; marshaller.Marshall(publicRequest.OutputDataConfig, context); context.Writer.WriteObjectEnd(); } if(publicRequest.IsSetVolumeKmsKeyId()) { context.Writer.WritePropertyName("VolumeKmsKeyId"); context.Writer.Write(publicRequest.VolumeKmsKeyId); } if(publicRequest.IsSetVpcConfig()) { context.Writer.WritePropertyName("VpcConfig"); context.Writer.WriteObjectStart(); var marshaller = VpcConfigMarshaller.Instance; marshaller.Marshall(publicRequest.VpcConfig, context); context.Writer.WriteObjectEnd(); } writer.WriteObjectEnd(); string snippet = stringWriter.ToString(); request.Content = System.Text.Encoding.UTF8.GetBytes(snippet); } return request; } private static StartSentimentDetectionJobRequestMarshaller _instance = new StartSentimentDetectionJobRequestMarshaller(); internal static StartSentimentDetectionJobRequestMarshaller GetInstance() { return _instance; } /// <summary> /// Gets the singleton. /// </summary> public static StartSentimentDetectionJobRequestMarshaller Instance { get { return _instance; } } } }
37.964072
167
0.592902
[ "Apache-2.0" ]
DetlefGolze/aws-sdk-net
sdk/src/Services/Comprehend/Generated/Model/Internal/MarshallTransformations/StartSentimentDetectionJobRequestMarshaller.cs
6,340
C#
using System; using System.Collections; using System.Collections.Generic; using System.Linq; using Handelabra.Sentinels.Engine.Controller; using Handelabra.Sentinels.Engine.Model; namespace Cauldron.Starlight { public class NightloreArmorCardController : StarlightCardController { public NightloreArmorCardController(Card card, TurnTakerController turnTakerController) : base(card, turnTakerController) { SpecialStringMaker.ShowNumberOfCardsInPlay(new LinqCardCriteria((Card c) => IsConstellation(c), "constellation")); } public readonly string HasConstellationBeenDestroyed = "HasConstellationBeenDestroyed"; public readonly string HasSaidNoToNightloreArmor = "HasSaidNoToNightloreArmor"; public readonly string CurrentActionGUID = "CurrentActionGUID"; private bool hasSaidNoToNightloreArmor = false; private int currentActionGUID = -1; public override void AddTriggers() { //"Whenever damage would be dealt to another hero target, you may destroy a constellation card in play to prevent that damage." AddTrigger((DealDamageAction dd) => dd.Target.IsHero && !ListStarlights().Contains(dd.Target) && dd.Amount > 0, DestroyConstellationToPreventDamage, new TriggerType[] { TriggerType.DestroyCard, TriggerType.WouldBeDealtDamage, TriggerType.CancelAction }, timing: TriggerTiming.Before); } private IEnumerator DestroyConstellationToPreventDamage(DealDamageAction dd) { if(hasSaidNoToNightloreArmor == true && currentActionGUID == dd.InstanceIdentifier.GetHashCode() + dd.Target.GetHashCode()) { if (!dd.IsPretend) { hasSaidNoToNightloreArmor = false; currentActionGUID = -1; } yield break; } if (!IsPropertyTrue(HasConstellationBeenDestroyed)) { var constellationsInPlay = FindCardsWhere(IsConstellationInPlay); if (constellationsInPlay.Count() == 0) { //don't bother player with trigger they can't do anything about yield break; } //"...you may..." List<YesNoCardDecision> yesNoDecision = new List<YesNoCardDecision> { }; //What does the associatedCards argument actually do here? Should/should not be passing in the list of constellations? //Looks like it puts a constellation on the other side of the decision. Probably a good idea. IEnumerator askPrevent = GameController.MakeYesNoCardDecision(HeroTurnTakerController, SelectionType.PreventDamage, Card, dd, yesNoDecision, constellationsInPlay, GetCardSource()); if (UseUnityCoroutines) { yield return GameController.StartCoroutine(askPrevent); } else { GameController.ExhaustCoroutine(askPrevent); } if (!DidPlayerAnswerYes(yesNoDecision)) { hasSaidNoToNightloreArmor = true; currentActionGUID = dd.InstanceIdentifier.GetHashCode() + dd.Target.GetHashCode(); yield break; } //"...destroy a constellation in play..." List<DestroyCardAction> storedResults = new List<DestroyCardAction>(); IEnumerator destroyConstellation = GameController.SelectAndDestroyCard(HeroTurnTakerController, new LinqCardCriteria(IsConstellationInPlay, "constellation"), optional: false, storedResultsAction: storedResults, cardSource: GetCardSource()); if (UseUnityCoroutines) { yield return GameController.StartCoroutine(destroyConstellation); } else { GameController.ExhaustCoroutine(destroyConstellation); } if(DidDestroyCard(storedResults)) { SetCardProperty(HasConstellationBeenDestroyed, true); } } if (IsPropertyTrue(HasConstellationBeenDestroyed)) { //"...to prevent that damage." IEnumerator preventDamage = CancelAction(dd, isPreventEffect: true); if (UseUnityCoroutines) { yield return GameController.StartCoroutine(preventDamage); } else { GameController.ExhaustCoroutine(preventDamage); } if (!dd.IsPretend) { SetCardProperty(HasConstellationBeenDestroyed, false); hasSaidNoToNightloreArmor = false; currentActionGUID = -1; } } yield break; } private bool IsConstellationInPlay(Card c) { return IsConstellation(c) && c.IsInPlayAndHasGameText; } } }
42.832
256
0.577325
[ "MIT" ]
SotMSteamMods/CauldronMods
CauldronMods/Controller/Heroes/Starlight/Cards/NightloreArmorCardController.cs
5,354
C#
using System; using System.Threading.Tasks; using BettingGame.Betting.Core.Domain; using BettingGame.Betting.Core.Shared.Abstraction; namespace BettingGame.Betting.Core.Features.TeamHandling.Abstraction { public interface ITeamMetadataCommandRepository : ICommandRepository<TeamMetadata> { Task<TeamMetadata> UpsertAsync(Guid id, Action<TeamMetadata> setValues); } }
28.785714
87
0.769231
[ "Apache-2.0" ]
msallin/BettingGame
BettingGame.Betting.Core/Features/TeamHandling/Abstraction/ITeamMetadataCommandRepository.cs
405
C#
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows; using System.Windows.Controls; using System.Windows.Interactivity; using System.Windows.Interop; using WindowsSharp.Processes; namespace Everythingbar { public class SysMenuBehavior : Behavior<FrameworkElement> { public ProcessWindow TargetWindow { get => (ProcessWindow)GetValue(TargetWindowProperty); set => SetValue(TargetWindowProperty, value); } public static readonly DependencyProperty TargetWindowProperty = DependencyProperty.Register("TargetWindow", typeof(ProcessWindow), typeof(SysMenuBehavior), new PropertyMetadata()); ListViewItem _item; protected override void OnAttached() { base.OnAttached(); _item = AssociatedObject.TemplatedParent as ListViewItem; //FrameworkElement parent = AssociatedObject.Parent as FrameworkElement; //Debug.WriteLine("PARENT TYPE: " + (AssociatedObject.TemplatedParent as FrameworkElement).TemplatedParent.GetType().FullName); /*while (!(parent is ListViewItem) && (parent != null)) { parent = parent.Parent as FrameworkElement; } if (parent == null) Debug.WriteLine("PARENT IS NULL"); else Debug.WriteLine("PARENT TYPE: " + AssociatedObject.Parent.GetType().FullName);*/ if (_item != null) { _item.PreviewMouseRightButtonUp += ListViewItem_PreviewMouseRightButtonUp; } } private void ListViewItem_PreviewMouseRightButtonUp(object sender, System.Windows.Input.MouseButtonEventArgs e) { TargetWindow.ShowSystemMenu(new WindowInteropHelper(Window.GetWindow(_item)).EnsureHandle()); } } }
34.5
139
0.656832
[ "MIT" ]
StartNine/Superbar
Superbar/SysMenuBehavior.cs
1,934
C#
using Sdl.Web.Common.Configuration; using Sdl.Web.Common.Models; using System; using System.Collections.Generic; using System.Linq; using Newtonsoft.Json; namespace Sdl.Web.Tridion.Tests.Models { [SemanticEntity(Vocab = SchemaOrgVocabulary, EntityName = "ItemList", Prefix = "s", Public = true)] [SemanticEntity("ContentQuery")] public class ContentList<T> : DynamicList where T : EntityModel { [SemanticProperty("s:headline")] public string Headline { get; set; } public Link Link { get; set; } public Tag ContentType { get; set; } public Tag Sort { get; set; } public int PageSize { get; set; } public int CurrentPage { get { return PageSize == 0 ? 1 : (Start / PageSize) + 1; } } public override Sdl.Web.Common.Models.Query GetQuery(Localization localization) { return new SimpleBrokerQuery { Start = Start, PageSize = PageSize, PublicationId = Int32.Parse(localization.Id), SchemaId = MapSchema(ContentType.Key, localization), Sort = Sort.Key, Localization = localization }; } protected int MapSchema(string schemaKey, Localization localization) { string[] schemaKeyParts = schemaKey.Split('.'); string moduleName = schemaKeyParts.Length > 1 ? schemaKeyParts[0] : SiteConfiguration.CoreModuleName; schemaKey = schemaKeyParts.Length > 1 ? schemaKeyParts[1] : schemaKeyParts[0]; string schemaId = localization.GetConfigValue(string.Format("{0}.schemas.{1}", moduleName, schemaKey)); int result; Int32.TryParse(schemaId, out result); return result; } [JsonIgnore] public override Type ResultType { get { return typeof(T); } } public List<T> ItemListElements { get { return QueryResults.Cast<T>().ToList(); } set { if (value != null) { QueryResults = value.Cast<EntityModel>().ToList(); } else { QueryResults = null; } } } } }
30.567901
115
0.521405
[ "Apache-2.0" ]
JaimeSA/dxa-web-application-dotnet
Sdl.Web.Tridion.Tests/Models/Entity/ContentList.cs
2,478
C#
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for license information. namespace Microsoft.CommonDataModel.ObjectModel.Persistence.ModelJson.types { using Newtonsoft.Json; using System; /// <summary> /// Defines a base class for an entity. /// An entity is a set of attributes and metadata that defines a concept /// like Account or Contact and can be defined by any data producer. /// If you make changes to this class, please note a custom serializer is used <see cref="CustomSerializer"/> /// </summary> public class Entity : DataObject { [JsonProperty("$type", Order = -2)] public string Type { get; set; } [JsonProperty("cdm:lastChildFileModifiedTime", NullValueHandling = NullValueHandling.Ignore)] public DateTimeOffset? LastChildFileModifiedTime { get; set; } [JsonProperty("cdm:lastFileModifiedTime", NullValueHandling = NullValueHandling.Ignore)] public DateTimeOffset? LastFileModifiedTime { get; set; } [JsonProperty("cdm:lastFileStatusCheckTime", NullValueHandling = NullValueHandling.Ignore)] public DateTimeOffset? LastFileStatusCheckTime { get; set; } } }
42.466667
113
0.710361
[ "MIT" ]
AnkurSri-SPP/Dynamics-365-FastTrack-Implementation-Assets
Analytics/CDMUtilSolution/CDM.ObjectModel/Persistence/ModelJson/types/Entity.cs
1,276
C#
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. using System.Threading; using System.Xml; using System.Diagnostics; using System.Collections.Generic; using System.Runtime.CompilerServices; namespace System.Runtime.Serialization.Json { internal class JsonClassDataContract : JsonDataContract { private readonly JsonClassDataContractCriticalHelper _helper; public JsonClassDataContract(ClassDataContract traditionalDataContract) : base(new JsonClassDataContractCriticalHelper(traditionalDataContract)) { _helper = base.Helper as JsonClassDataContractCriticalHelper; } private JsonFormatClassReaderDelegate CreateJsonFormatReaderDelegate() { return new ReflectionJsonClassReader(TraditionalClassDataContract).ReflectionReadClass; } internal JsonFormatClassReaderDelegate JsonFormatReaderDelegate { get { if (_helper.JsonFormatReaderDelegate == null) { lock (this) { if (_helper.JsonFormatReaderDelegate == null) { JsonFormatClassReaderDelegate tempDelegate; if (DataContractSerializer.Option == SerializationOption.ReflectionOnly) { tempDelegate = CreateJsonFormatReaderDelegate(); } else { tempDelegate = new JsonFormatReaderGenerator().GenerateClassReader(TraditionalClassDataContract); } Interlocked.MemoryBarrier(); _helper.JsonFormatReaderDelegate = tempDelegate; } } } return _helper.JsonFormatReaderDelegate; } } private JsonFormatClassWriterDelegate CreateJsonFormatWriterDelegate() { return new ReflectionJsonFormatWriter().ReflectionWriteClass; } internal JsonFormatClassWriterDelegate JsonFormatWriterDelegate { get { if (_helper.JsonFormatWriterDelegate == null) { lock (this) { if (_helper.JsonFormatWriterDelegate == null) { JsonFormatClassWriterDelegate tempDelegate; if (DataContractSerializer.Option == SerializationOption.ReflectionOnly) { tempDelegate = CreateJsonFormatWriterDelegate(); } else { tempDelegate = new JsonFormatWriterGenerator().GenerateClassWriter(TraditionalClassDataContract); } Interlocked.MemoryBarrier(); _helper.JsonFormatWriterDelegate = tempDelegate; } } } return _helper.JsonFormatWriterDelegate; } } internal XmlDictionaryString[] MemberNames => _helper.MemberNames; internal override string TypeName => _helper.TypeName; private ClassDataContract TraditionalClassDataContract => _helper.TraditionalClassDataContract; public override object ReadJsonValueCore(XmlReaderDelegator jsonReader, XmlObjectSerializerReadContextComplexJson context) { jsonReader.Read(); object o = JsonFormatReaderDelegate(jsonReader, context, XmlDictionaryString.Empty, MemberNames); jsonReader.ReadEndElement(); return o; } public override void WriteJsonValueCore(XmlWriterDelegator jsonWriter, object obj, XmlObjectSerializerWriteContextComplexJson context, RuntimeTypeHandle declaredTypeHandle) { jsonWriter.WriteAttributeString(null, JsonGlobals.typeString, null, JsonGlobals.objectString); JsonFormatWriterDelegate(jsonWriter, obj, context, TraditionalClassDataContract, MemberNames); } private class JsonClassDataContractCriticalHelper : JsonDataContractCriticalHelper { private JsonFormatClassReaderDelegate _jsonFormatReaderDelegate; private JsonFormatClassWriterDelegate _jsonFormatWriterDelegate; private XmlDictionaryString[] _memberNames; private readonly ClassDataContract _traditionalClassDataContract; private readonly string _typeName; public JsonClassDataContractCriticalHelper(ClassDataContract traditionalDataContract) : base(traditionalDataContract) { _typeName = string.IsNullOrEmpty(traditionalDataContract.Namespace.Value) ? traditionalDataContract.Name.Value : string.Concat(traditionalDataContract.Name.Value, JsonGlobals.NameValueSeparatorString, XmlObjectSerializerWriteContextComplexJson.TruncateDefaultDataContractNamespace(traditionalDataContract.Namespace.Value)); _traditionalClassDataContract = traditionalDataContract; CopyMembersAndCheckDuplicateNames(); } internal JsonFormatClassReaderDelegate JsonFormatReaderDelegate { get { return _jsonFormatReaderDelegate; } set { _jsonFormatReaderDelegate = value; } } internal JsonFormatClassWriterDelegate JsonFormatWriterDelegate { get { return _jsonFormatWriterDelegate; } set { _jsonFormatWriterDelegate = value; } } internal XmlDictionaryString[] MemberNames { get { return _memberNames; } } internal ClassDataContract TraditionalClassDataContract { get { return _traditionalClassDataContract; } } private void CopyMembersAndCheckDuplicateNames() { if (_traditionalClassDataContract.MemberNames != null) { int memberCount = _traditionalClassDataContract.MemberNames.Length; Dictionary<string, object> memberTable = new Dictionary<string, object>(memberCount); XmlDictionaryString[] decodedMemberNames = new XmlDictionaryString[memberCount]; for (int i = 0; i < memberCount; i++) { if (memberTable.ContainsKey(_traditionalClassDataContract.MemberNames[i].Value)) { throw new SerializationException(SR.Format(SR.JsonDuplicateMemberNames, DataContract.GetClrTypeFullName(_traditionalClassDataContract.UnderlyingType), _traditionalClassDataContract.MemberNames[i].Value)); } else { memberTable.Add(_traditionalClassDataContract.MemberNames[i].Value, null); decodedMemberNames[i] = DataContractJsonSerializerImpl.ConvertXmlNameToJsonName(_traditionalClassDataContract.MemberNames[i]); } } _memberNames = decodedMemberNames; } } } } }
44.304598
339
0.595797
[ "MIT" ]
06needhamt/runtime
src/libraries/System.Private.DataContractSerialization/src/System/Runtime/Serialization/Json/JsonClassDataContract.cs
7,709
C#
namespace ScotlandsMountains.Domain; public class MountainGroup : Entity { public List<MountainSummary> Mountains { get; set; } = new(); public int MountainsCount { get; set; } = 0; }
24.25
65
0.701031
[ "MIT" ]
graham-miller/ScotlandsMountains
Domain/MountainGroup.cs
196
C#
// *** WARNING: this file was generated by the Pulumi SDK Generator. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Threading.Tasks; using Pulumi.Serialization; namespace Pulumi.AzureNextGen.Network.V20170301.Inputs { /// <summary> /// SSL certificates of an application gateway. /// </summary> public sealed class ApplicationGatewaySslCertificateArgs : Pulumi.ResourceArgs { /// <summary> /// Base-64 encoded pfx certificate. Only applicable in PUT Request. /// </summary> [Input("data")] public Input<string>? Data { get; set; } /// <summary> /// A unique read-only string that changes whenever the resource is updated. /// </summary> [Input("etag")] public Input<string>? Etag { get; set; } /// <summary> /// Resource ID. /// </summary> [Input("id")] public Input<string>? Id { get; set; } /// <summary> /// Name of the resource that is unique within a resource group. This name can be used to access the resource. /// </summary> [Input("name")] public Input<string>? Name { get; set; } /// <summary> /// Password for the pfx file specified in data. Only applicable in PUT request. /// </summary> [Input("password")] public Input<string>? Password { get; set; } /// <summary> /// Provisioning state of the SSL certificate resource Possible values are: 'Updating', 'Deleting', and 'Failed'. /// </summary> [Input("provisioningState")] public Input<string>? ProvisioningState { get; set; } /// <summary> /// Base-64 encoded Public cert data corresponding to pfx specified in data. Only applicable in GET request. /// </summary> [Input("publicCertData")] public Input<string>? PublicCertData { get; set; } public ApplicationGatewaySslCertificateArgs() { } } }
32.830769
121
0.599813
[ "Apache-2.0" ]
pulumi/pulumi-azure-nextgen
sdk/dotnet/Network/V20170301/Inputs/ApplicationGatewaySslCertificateArgs.cs
2,134
C#
using System; using System.Xml.Serialization; namespace Alipay.AopSdk.Domain { /// <summary> /// KoubeiCateringItemlistQueryModel Data Structure. /// </summary> [Serializable] public class KoubeiCateringItemlistQueryModel : AopObject { /// <summary> /// 服务商、服务商员工、商户、商户员工等口碑角色操作时必填,对应为《koubei.member.data.oauth.query》中的auth_code,默认有效期24小时;isv自身角色操作的时候,无需传该参数 /// </summary> [XmlElement("auth_code")] public string AuthCode { get; set; } /// <summary> /// 查询特定状态的商品。状态枚举值为:INIT表示未上架,EFFECTIVE表示已上架,PAUSE表示已暂停, FREEZE表示已冻结, INVALID表示已下架。如果为空则默认查询所有状态商品 /// </summary> [XmlElement("item_status")] public string ItemStatus { get; set; } /// <summary> /// 操作人员身份类型。如果是isv代操作,请传入ISV;如果是商户操作请传入MERCHANT;如果是商户员工则传入M_STAFF /// </summary> [XmlElement("operator_type")] public string OperatorType { get; set; } /// <summary> /// 页码数,整数,表示需要查询第几页数据。 /// </summary> [XmlElement("page_no")] public long PageNo { get; set; } /// <summary> /// 列表每页显示商品的条目数,整数 /// </summary> [XmlElement("page_size")] public long PageSize { get; set; } /// <summary> /// 请求id。支持英文字母和数字,由开发者自行定义(不允许重复) /// </summary> [XmlElement("request_id")] public string RequestId { get; set; } } }
29.122449
116
0.587947
[ "MIT" ]
ArcherTrister/LeXun.Alipay.AopSdk
src/Alipay.AopSdk/Domain/KoubeiCateringItemlistQueryModel.cs
1,873
C#
// Copyright © 2016 onwards, Andrew Whewell // All rights reserved. // // Redistribution and use of this software in source and binary forms, with or without modification, are permitted provided that the following conditions are met: // * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. // * Neither the name of the author nor the names of the program's contributors may be used to endorse or promote products derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OF THE SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.IO; using System.Security.Cryptography; namespace ChecksumFiles { class Program { static Crc64 _Crc64 = new Crc64(); static void Main(string[] args) { try { var cmdArgs = new CommandLineArgs(args); var root = cmdArgs.MandatoryString("root", r => Usage(r)); if(!Directory.Exists(root)) Usage(String.Format("{0} does not exist or is not a folder", root)); var outFileName = cmdArgs.OptionalString("out"); if(outFileName != null) { var folder = Path.GetDirectoryName(Path.GetFullPath(outFileName)); if(!Directory.Exists(folder)) { Directory.CreateDirectory(folder); } } var outWriter = outFileName == null ? Console.Out : new StreamWriter(outFileName, false); GenerateChecksums(root, outWriter); if(outFileName != null) { outWriter.Dispose(); } if(cmdArgs.HasKeyWithNoValue("addContentChecksum")) { var checksums = File.ReadAllLines(outFileName); var checksumBytes = Encoding.UTF8.GetBytes(String.Concat(checksums)); var checksum = ChecksumBytes(checksumBytes); var contentLines = new List<string>(); contentLines.Add(FormatChecksumLine(checksum, checksums.Sum(r => r.Length), "\\**CONTENT CHECKSUM**")); contentLines.AddRange(checksums); File.WriteAllLines(outFileName, contentLines); } } catch(Exception ex) { Usage(String.Format("Exception caught: {0}", ex.ToString())); } } static void Usage(string message) { Console.WriteLine("ChecksumFiles <-root folder> [-out output file] [-addContentChecksum]"); if(!String.IsNullOrEmpty(message)) Console.WriteLine("{0}{1}", Environment.NewLine, message); Environment.Exit(1); } static void GenerateChecksums(string folder, TextWriter output) { foreach(var fileName in Directory.GetFiles(folder, "*.*", SearchOption.AllDirectories)) { var relativePath = fileName.Substring(folder.Length); var checksum = ChecksumFile(fileName); output.WriteLine(FormatChecksumLine(checksum, new FileInfo(fileName).Length, relativePath)); } } static string ChecksumFile(string fileName) { var content = File.ReadAllBytes(fileName); return ChecksumBytes(content); } static string ChecksumBytes(byte[] content) { return _Crc64.ComputeChecksumString(content, 0, content.Length); } static string FormatChecksumLine(string checksum, long fileLength, string relativePath) { return String.Format("{0} {1,9} {2}", checksum, fileLength, relativePath); } } }
50.177083
750
0.631721
[ "BSD-3-Clause" ]
J0hnLiu/vrs
ThirdParty/ChecksumFiles/Program.cs
4,820
C#
// // Copyright (c) Microsoft. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. namespace Microsoft.Azure.Commands.ApiManagement.Models { using System; using System.Collections.Generic; using System.Linq; using System.Text.RegularExpressions; using Microsoft.Azure.Commands.ApiManagement.Properties; using Microsoft.Azure.Management.ApiManagement.Models; public class PsApiManagement { private static readonly Regex ResourceGroupRegex = new Regex(@"/resourceGroups/(?<resourceGroupName>.+)/providers/", RegexOptions.Compiled); public PsApiManagement() { Tags = new Dictionary<string, string>(); AdditionalRegions = new List<PsApiManagementRegion>(); } public PsApiManagement(ApiManagementServiceResource apiServiceResource) : this() { if (apiServiceResource == null) { throw new ArgumentNullException("apiServiceResource"); } Id = apiServiceResource.Id; Name = apiServiceResource.Name; Location = apiServiceResource.Location; Sku = ApiManagementClient.Mapper.Map<string, PsApiManagementSku>(apiServiceResource.Sku.Name); Capacity = apiServiceResource.Sku.Capacity; CreatedTimeUtc = apiServiceResource.CreatedAtUtc; PublisherEmail = apiServiceResource.PublisherEmail; OrganizationName = apiServiceResource.PublisherName; NotificationSenderEmail = apiServiceResource.NotificationSenderEmail; ProvisioningState = apiServiceResource.ProvisioningState; RuntimeUrl = apiServiceResource.GatewayUrl; RuntimeRegionalUrl = apiServiceResource.GatewayRegionalUrl; PortalUrl = apiServiceResource.PortalUrl; DeveloperPortalUrl = apiServiceResource.DeveloperPortalUrl; ManagementApiUrl = apiServiceResource.ManagementApiUrl; ScmUrl = apiServiceResource.ScmUrl; PublicIPAddresses = apiServiceResource.PublicIPAddresses != null ? apiServiceResource.PublicIPAddresses.ToArray() : null; PrivateIPAddresses = apiServiceResource.PrivateIPAddresses != null ? apiServiceResource.PrivateIPAddresses.ToArray() : null; EnableClientCertificate = apiServiceResource.EnableClientCertificate; PublicNetworkAccess = apiServiceResource.PublicNetworkAccess; PublicIpAddressId = apiServiceResource.PublicIpAddressId; PlatformVersion = apiServiceResource.PlatformVersion; VpnType = ApiManagementClient.Mapper.Map<string, PsApiManagementVpnType>(apiServiceResource.VirtualNetworkType); if (apiServiceResource.AdditionalLocations != null) { AdditionalRegions = apiServiceResource.AdditionalLocations .Select(region => new PsApiManagementRegion(region)) .ToList(); } if (apiServiceResource.VirtualNetworkConfiguration != null) { VirtualNetwork = new PsApiManagementVirtualNetwork(apiServiceResource.VirtualNetworkConfiguration); } if (apiServiceResource.HostnameConfigurations != null && apiServiceResource.HostnameConfigurations.Any()) { var portalHostnameResource = apiServiceResource.HostnameConfigurations.FirstOrDefault(conf => conf.Type == HostnameType.Portal); if (portalHostnameResource != null) { PortalCustomHostnameConfiguration = new PsApiManagementCustomHostNameConfiguration(portalHostnameResource); } var proxyHostnameResources = apiServiceResource.HostnameConfigurations.Where(conf => conf.Type == HostnameType.Proxy); if (proxyHostnameResources != null && proxyHostnameResources.Any()) { var proxyCustomHostnameResources = new List<PsApiManagementCustomHostNameConfiguration>(); foreach (var proxyHostNameResource in proxyHostnameResources) { proxyCustomHostnameResources.Add(new PsApiManagementCustomHostNameConfiguration(proxyHostNameResource)); } ProxyCustomHostnameConfiguration = proxyCustomHostnameResources.ToArray(); } var managementHostnameResource = apiServiceResource.HostnameConfigurations.FirstOrDefault(conf => conf.Type == HostnameType.Management); if (managementHostnameResource != null) { ManagementCustomHostnameConfiguration = new PsApiManagementCustomHostNameConfiguration(managementHostnameResource); } var scmHostnameResource = apiServiceResource.HostnameConfigurations.FirstOrDefault(conf => conf.Type == HostnameType.Scm); if (scmHostnameResource != null) { ScmCustomHostnameConfiguration = new PsApiManagementCustomHostNameConfiguration(scmHostnameResource); } var developerPortalResource = apiServiceResource.HostnameConfigurations.FirstOrDefault(conf => conf.Type == HostnameType.DeveloperPortal); if (developerPortalResource != null) { DeveloperPortalHostnameConfiguration = new PsApiManagementCustomHostNameConfiguration(developerPortalResource); } } if (apiServiceResource.Certificates != null && apiServiceResource.Certificates.Any()) { var systemCertificates = new List<PsApiManagementSystemCertificate>(); foreach(var certificate in apiServiceResource.Certificates) { systemCertificates.Add(new PsApiManagementSystemCertificate(certificate)); } SystemCertificates = systemCertificates.ToArray(); } // prepare the SSL settings if (apiServiceResource.CustomProperties != null && apiServiceResource.CustomProperties.Any()) { SslSetting = new PsApiManagementSslSetting(apiServiceResource.CustomProperties); } if (apiServiceResource.Tags != null) { Tags = apiServiceResource.Tags; } if (apiServiceResource.Identity != null) { this.Identity = new PsApiManagementServiceIdentity(apiServiceResource.Identity); } Zone = apiServiceResource.Zones?.ToArray(); MinimalControlPlaneApiVersion = apiServiceResource.ApiVersionConstraint?.MinApiVersion; DisableGateway = apiServiceResource.DisableGateway; PrivateEndpointConnections = apiServiceResource.PrivateEndpointConnections; } public string[] PublicIPAddresses { get; private set; } public string[] PrivateIPAddresses { get; private set; } public string Id { get; private set; } public string Name { get; private set; } public string Location { get; private set; } public PsApiManagementSku Sku { get; set; } public int Capacity { get; set; } public DateTime? CreatedTimeUtc { get; set; } public string ProvisioningState { get; private set; } public string RuntimeUrl { get; private set; } public string RuntimeRegionalUrl { get; private set; } public string PortalUrl { get; private set; } public string DeveloperPortalUrl { get; private set; } public string ManagementApiUrl { get; private set; } public string ScmUrl { get; private set; } public string PublisherEmail { get; set; } public string OrganizationName { get; set; } public string NotificationSenderEmail { get; set; } public PsApiManagementVirtualNetwork VirtualNetwork { get; set; } public PsApiManagementVpnType VpnType { get; set; } public PsApiManagementCustomHostNameConfiguration PortalCustomHostnameConfiguration { get; set; } public PsApiManagementCustomHostNameConfiguration[] ProxyCustomHostnameConfiguration { get; set; } public PsApiManagementCustomHostNameConfiguration ManagementCustomHostnameConfiguration { get; set; } public PsApiManagementCustomHostNameConfiguration ScmCustomHostnameConfiguration { get; set; } public PsApiManagementCustomHostNameConfiguration DeveloperPortalHostnameConfiguration { get; set; } public PsApiManagementSystemCertificate[] SystemCertificates { get; set; } public IDictionary<string, string> Tags { get; set; } public IList<PsApiManagementRegion> AdditionalRegions { get; private set; } public PsApiManagementSslSetting SslSetting { get; private set; } public PsApiManagementServiceIdentity Identity { get; private set; } public bool? EnableClientCertificate { get; private set; } public string[] Zone { get; set; } public bool? DisableGateway { get; set; } public string MinimalControlPlaneApiVersion { get; set; } public string PublicIpAddressId { get; set; } // // Summary: // Gets compute Platform Version running the service. Possible values include: 'undetermined', // 'stv1', 'stv2', 'mtv1' public string PlatformVersion { get; set; } public string PublicNetworkAccess { get; set; } // Gets or sets list of Private Endpoint Connections configured for the Api Management Service . public IList<RemotePrivateEndpointConnectionWrapper> PrivateEndpointConnections { get; set; } public string ResourceGroupName { get { if (string.IsNullOrWhiteSpace(Id)) { return null; } var match = ResourceGroupRegex.Match(Id); if (match.Success) { var resourceGroupNameGroup = match.Groups["resourceGroupName"]; if (resourceGroupNameGroup != null && resourceGroupNameGroup.Success) { return resourceGroupNameGroup.Value; } } return null; } } public PsApiManagementRegion AddRegion( string location, PsApiManagementSku sku = PsApiManagementSku.Developer, int capacity = 1, PsApiManagementVirtualNetwork virtualNetwork = null, string[] zone = null, bool? disableGateway = null, string publicIpAddressId = null) { if (location == null) { throw new ArgumentNullException("location"); } if (location.Equals(Location) || AdditionalRegions.Any(r => location.Equals(r.Location))) { throw new ArgumentException(string.Format(Resources.AddRegionExistsMessage, location), "location"); } var newRegion = new PsApiManagementRegion { Location = location, Sku = sku, Capacity = capacity, VirtualNetwork = virtualNetwork, Zone = zone, DisableGateway = disableGateway, PublicIpAddressId = publicIpAddressId }; AdditionalRegions.Add(newRegion); return newRegion; } public bool RemoveRegion(string location) { if (location == null) { throw new ArgumentNullException("location"); } if (location.Equals(Location)) { throw new ArgumentException( string.Format(Resources.RemoveRegionCannotRemoveMasterRegion, location), "location"); } var regionToRemove = AdditionalRegions.FirstOrDefault(r => location.Equals(r.Location)); return regionToRemove != null && AdditionalRegions.Remove(regionToRemove); } public void UpdateRegion( string location, PsApiManagementSku sku, int capacity, PsApiManagementVirtualNetwork virtualNetwork, string[] zone, bool? disableGateway, string publicIpAddressId) { if (location == null) { throw new ArgumentNullException("location"); } var regionToUpdate = AdditionalRegions.FirstOrDefault(r => location.Trim().Equals(r.Location, StringComparison.OrdinalIgnoreCase)); if (regionToUpdate != null) { // if this is additional region regionToUpdate.Sku = sku; regionToUpdate.Capacity = capacity; regionToUpdate.VirtualNetwork = virtualNetwork; regionToUpdate.Zone = zone; regionToUpdate.DisableGateway = disableGateway; regionToUpdate.PublicIpAddressId = publicIpAddressId; } else if (location.Equals(Location)) { // if this is master region Sku = sku; Capacity = capacity; VirtualNetwork = virtualNetwork; Zone = zone; DisableGateway = disableGateway; PublicIpAddressId = publicIpAddressId; } else { throw new ArgumentException(string.Format(Resources.UpdateRegionDoesNotExistsMessage, location), "location"); } } } }
41.951705
155
0.608519
[ "MIT" ]
AlanFlorance/azure-powershell
src/ApiManagement/ApiManagement/Models/PsApiManagement.cs
14,418
C#
using System; using System.Collections.Generic; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; namespace kSlovnik.Board { public class BoardSlotInfo { public int Tag { get; set; } public string ImageRef { get; set; } public char Letter { get; set; } = '\0'; public Constants.Colors.TileColors Color { get; set; } = Constants.Colors.TileColors.None; public Position Position { get; set; } public static BoardSlotInfo FromBoardSlot(BoardSlot boardSlot) { return new BoardSlotInfo { ImageRef = boardSlot.Image.ToBase64String(), Letter = boardSlot.Letter, Position = boardSlot.Position, Color = boardSlot.Color, Tag = (int)boardSlot.Tag }; } public static void ApplyBoardInfoToSlot(BoardSlotInfo boardSlotInfo, BoardSlot targetBoardSlot) { targetBoardSlot.Image = Util.ImageFromBase64String(boardSlotInfo.ImageRef); targetBoardSlot.Letter = boardSlotInfo.Letter; targetBoardSlot.Position = boardSlotInfo.Position; targetBoardSlot.Color = boardSlotInfo.Color; targetBoardSlot.Tag = boardSlotInfo.Tag; } } }
30.204545
103
0.62453
[ "BSD-3-Clause" ]
martin-chulev/kSlovnik
kSlovnik/Board/BoardSlotInfo.cs
1,331
C#
using System.Collections; using System.Collections.Generic; using UnityEngine; public class Quit : MonoBehaviour { public void ExitGame() { Application.Quit(); } }
14.692308
33
0.670157
[ "MIT" ]
ConnorCooke/Waiting-On-The-Beat
Assets/Quit.cs
193
C#
namespace Tests.Caliburn.Adapters.Components { public interface ISampleCommand { void Execute(); } }
18.142857
46
0.629921
[ "MIT" ]
CaliburnFx/Caliburn
src/Tests.Caliburn/Adapters/Components/ISampleCommand.cs
129
C#
using System; using System.Threading; using GitHub.Logging; namespace GitHub.Unity { class OctorunInstaller { private static readonly ILogging Logger = LogHelper.GetLogger<OctorunInstaller>(); private readonly IEnvironment environment; private readonly IFileSystem fileSystem; private readonly ITaskManager taskManager; private readonly IZipHelper sharpZipLibHelper; private readonly OctorunInstallDetails installDetails; public OctorunInstaller(IEnvironment environment, ITaskManager taskManager, OctorunInstallDetails installDetails = null) { this.environment = environment; this.sharpZipLibHelper = ZipHelper.Instance; this.installDetails = installDetails ?? new OctorunInstallDetails(environment.UserCachePath); this.fileSystem = environment.FileSystem; this.taskManager = taskManager; } public NPath SetupOctorunIfNeeded() { NPath path = NPath.Default; var isOctorunExtracted = IsOctorunExtracted(); if (isOctorunExtracted) return installDetails.ExecutablePath; GrabZipFromResources(); var tempZipExtractPath = NPath.CreateTempDirectory("octorun_extract_archive_path"); var unzipTask = new UnzipTask(taskManager.Token, installDetails.ZipFile, tempZipExtractPath, sharpZipLibHelper, fileSystem) .Catch(e => { Logger.Error(e, "Error extracting octorun"); return true; }); var extractPath = unzipTask.RunSynchronously(); if (unzipTask.Successful) path = MoveOctorun(extractPath.Combine("octorun")); return path; } private NPath GrabZipFromResources() { return AssemblyResources.ToFile(ResourceType.Generic, "octorun.zip", installDetails.BaseZipPath, environment); } private NPath MoveOctorun(NPath fromPath) { var toPath = installDetails.InstallationPath; toPath.DeleteIfExists(); toPath.EnsureParentDirectoryExists(); fromPath.Move(toPath); fromPath.Parent.Delete(); return installDetails.ExecutablePath; } private bool IsOctorunExtracted() { if (!installDetails.InstallationPath.DirectoryExists()) { return false; } if (!installDetails.VersionFile.FileExists()) { return false; } var octorunVersion = installDetails.VersionFile.ReadAllText().Trim(); if (!OctorunInstallDetails.PackageVersion.Equals(octorunVersion)) { Logger.Warning("Current version {0} does not match expected {1}", octorunVersion, OctorunInstallDetails.PackageVersion); return false; } return true; } public class OctorunInstallDetails { public const string DefaultZipMd5Url = "http://github-vs.s3.amazonaws.com/unity/octorun/octorun.zip.md5"; public const string DefaultZipUrl = "http://github-vs.s3.amazonaws.com/unity/octorun/octorun.zip"; public const string PackageVersion = "b4b80eb4ac"; private const string PackageName = "octorun"; private const string zipFile = "octorun.zip"; public OctorunInstallDetails(NPath baseDataPath) { BaseZipPath = baseDataPath.Combine("downloads"); BaseZipPath.EnsureDirectoryExists(); ZipFile = BaseZipPath.Combine(zipFile); var installPath = baseDataPath.Combine(PackageName); InstallationPath = installPath; Executable = "app.js"; ExecutablePath = installPath.Combine("src", "bin", Executable); } public NPath BaseZipPath { get; } public NPath ZipFile { get; } public NPath InstallationPath { get; } public string Executable { get; } public NPath ExecutablePath { get; } public UriString ZipMd5Url { get; set; } = DefaultZipMd5Url; public UriString ZipUrl { get; set; } = DefaultZipUrl; public NPath VersionFile => InstallationPath.Combine("version"); } } }
38.353448
136
0.612947
[ "MIT" ]
profet23/Unity
src/GitHub.Api/Installer/OctorunInstaller.cs
4,451
C#
// // AuthenticationService.cs // // Author: // Igor Guerrero <igorgue@protonmail.com> // // Copyright (c) 2021 HODL Wallet // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. using System; using Xamarin.Forms; using HodlWallet.Core.Interfaces; using HodlWallet.Core.Services; using HodlWallet.UI.Views; using HodlWallet.UI; using Xamarin.Essentials; using Plugin.Fingerprint; [assembly: Dependency(typeof(AuthenticationService))] namespace HodlWallet.Core.Services { public sealed class AuthenticationService : IAuthenticationService { #if DEBUG const int EXPIRATION_TIME = 10_000; // 10 seconds in ms #else const int EXPIRATION_TIME = 300_000; // 5 mins in ms #endif public DateTimeOffset LastAuth { get; set; } bool isAuthenticated = false; public bool IsAuthenticated { get { if (!isAuthenticated) return isAuthenticated; var now = DateTimeOffset.UtcNow; var expiration = LastAuth + TimeSpan.FromMilliseconds(EXPIRATION_TIME); if (expiration < now) isAuthenticated = false; return isAuthenticated && SecureStorageService.UserDidSetup(); } set { if (value) LastAuth = DateTimeOffset.UtcNow; isAuthenticated = value; } } public bool ShowingLoginForm { get; set; } bool biometricsAvailable; public bool BiometricsAvailable { get { return Preferences.Get("biometricsAvailable", false); } set { biometricsAvailable = value; Preferences.Set("biometricsAvailable", biometricsAvailable); } } public async void ShowLogin(string action = null) { string lastLogin = Preferences.Get("lastLogin", "pin"); bool biometricsAllow = Preferences.Get("biometricsAllow", false); BiometricsAvailable = await CrossFingerprint.Current.IsAvailableAsync(); if (biometricsAllow && (lastLogin == "biometric" && BiometricsAvailable)) { var view = new BiometricLoginView(action); if (action == "pop") { if (Application.Current.MainPage is AppShell appShell) await appShell.Navigation.PushModalAsync(view); return; } Application.Current.MainPage = view; } else { var view = new LoginView(action); if (action == "pop") { if (Application.Current.MainPage is AppShell appShell) await appShell.Navigation.PushModalAsync(view); return; } Application.Current.MainPage = view; } } public bool Authenticate(string input) { IsAuthenticated = input == SecureStorageService.GetPin(); return IsAuthenticated; } } }
31.389706
87
0.599438
[ "MIT" ]
hodlwallet/hodlwallet
HodlWallet/Core/Services/AuthenticationService.cs
4,269
C#
using System; using System.Globalization; namespace soma_vetor { class Program { static void Main(string[] args) { CultureInfo CI = CultureInfo.InvariantCulture; int n, i; double soma = 0, media; Console.Write("Quantos numeros voce vai digitar? "); n = int.Parse(Console.ReadLine()); double[] vet = new double[n]; for(i = 0; i < n; i++) { Console.Write("Digite um numero: "); vet[i] = double.Parse(Console.ReadLine(), CI); } Console.WriteLine(); Console.Write("VALORES = "); for(i = 0; i < n; i++) { Console.Write(vet[i].ToString("F1", CI) + " "); } Console.WriteLine(); for(i = 0; i < n; i++) { soma = soma + vet[i]; } media = soma / n; Console.WriteLine("SOMA = " + soma.ToString("F2", CI)); Console.WriteLine("MEDIA = " + media.ToString("F2", CI)); } } }
23.851064
69
0.438002
[ "MIT" ]
DariaMachado/Algoritmos_em_CSharp
soma_vetor.cs
1,123
C#
using System; using System.Collections.Generic; using System.Text; using System.Runtime.InteropServices; using System.Numerics; namespace StudioCore { /// <summary> /// Binding to Navgen recast based navmesh generation library /// </summary> class NavGen { [DllImport("NavGen.dll")] public static extern bool SetNavmeshBuildParams(float cs, float ch, float slope, float aheight, float aclimb, float aradius, int minregionarea); [DllImport("NavGen.dll")] public static extern bool BuildNavmeshForMesh([In] Vector3[] verts, int vcount, [In] int[] indices, int icount); [DllImport("NavGen.dll")] public static extern int GetMeshVertCount(); [DllImport("NavGen.dll")] public static extern int GetMeshTriCount(); [DllImport("NavGen.dll")] public static extern void GetMeshVerts([In, Out] ushort[] buffer); [DllImport("NavGen.dll")] public static extern void GetMeshTris([In, Out] ushort[] buffer); [DllImport("NavGen.dll")] public static extern void GetBoundingBox([In, Out] Vector3[] buffer); } }
31.583333
152
0.666667
[ "MIT" ]
JKAnderson/DSMapStudio
StudioCore/NavGen.cs
1,139
C#
using Beer.DaAPI.Core.Common; using Newtonsoft.Json; using System; using System.Collections.Generic; using System.Text; namespace Beer.DaAPI.Infrastructure.Services.JsonConverters { public class IPv4AddressAsStringJsonConverter : JsonConverter { public override bool CanConvert(Type objectType) => objectType == typeof(IPv4Address); public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer) { String value = reader.Value as String; return IPv4Address.FromString(value); } public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer) => writer.WriteValue((value as IPv4Address).ToString()); } }
34.727273
155
0.732984
[ "MIT" ]
just-the-benno/Beer
src/DaAPI/Service/Beer.DaAPI.Service.Infrastructure/Services/JsonConverters/IPv4AddressAsStringJsonConverter.cs
766
C#
/* * ToolStack.com C# WriteableBitmap extension for PNG Writer library by Greg Ross * * Homepage: http://ToolStack.com/PNGWriter * * This library is based upon the examples hosted at the forums on WriteableBitmapEx * project at the codeplex site (http://writeablebitmapex.codeplex.com/discussions/274445). * * This is public domain software, use and abuse as you see fit. * * Version 1.0 - Released Feburary 22, 2012 */ using System; using System.Globalization; using System.IO; using System.IO.IsolatedStorage; using System.Windows.Shapes; using System.Windows.Media; using ToolStackCRCLib; using ToolStackPNGWriterLib; namespace System.Windows.Media.Imaging { /// <summary> /// WriteableBitmap Extensions for PNG Writing /// </summary> public static partial class WriteableBitmapExtensions { /// <summary> /// Write and PNG file out to a file stream. Currently compression is not supported. /// </summary> /// <param name="image">The WriteableBitmap to work on.</param> /// <param name="stream">The destination file stream.</param> public static void WritePNG(this WriteableBitmap image, System.IO.Stream stream) { WritePNG(image, stream, -1); } /// <summary> /// Write and PNG file out to a file stream. Currently compression is not supported. /// </summary> /// <param name="image">The WriteableBitmap to work on.</param> /// <param name="stream">The destination file stream.</param> /// <param name="compression">Level of compression to use (-1=auto, 0=none, 1-100 is percentage).</param> public static void WritePNG(this WriteableBitmap image, System.IO.Stream stream, int compression) { PNGWriter.DetectWBByteOrder(); PNGWriter.WritePNG(image, stream, compression); } } }
36.735849
114
0.651772
[ "MIT" ]
pantaloons/4charm
4charm/Models/PNG/ToolStackPNGWriterWBext.cs
1,949
C#
// Copyright 2010 Max Toro Q. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. using System; using System.Collections.Generic; using System.Text; namespace Nuxleus.Web { public interface IHasXPathItemFactory { XPathItemFactory ItemFactory { get; } } }
30.038462
75
0.737516
[ "BSD-3-Clause" ]
mdavid/nuxleus
src/Nuxleus.Web/IHasXPathItemFactory.cs
783
C#
using FluentAssertions; using Microsoft.CodeAnalysis.CSharp.Syntax; using Microsoft.VisualStudio.TestTools.UnitTesting; using System; namespace Sybil.Tests { [TestClass] public class ClassBuilderTests { private const string Name = "Test"; private const string Base = "Base"; private const string PublicClass = @"public class Test { }"; private const string PublicStaticClass = @"public static class Test { }"; private const string ClassWithBase = @"class Test : Base { }"; private readonly ClassBuilder builder; public ClassBuilderTests() { this.builder = new ClassBuilder(Name); } [TestMethod] public void Constructor_NullName_ThrowArgumentNullException() { var action = () => { _ = new ClassBuilder(null); }; action.Should().Throw<ArgumentNullException>(); } [TestMethod] public void WithModifier_ModifierNull_ThrowsArgumentNullException() { var action = () => { this.builder.WithModifier(null); }; action.Should().Throw<ArgumentNullException>(); } [TestMethod] public void WithModifier_ModifierValid_ReturnsBuilder() { var returnedBuilder = this.builder.WithModifier("public"); returnedBuilder.Should().NotBeNull().And.Subject.Should().Be(this.builder); } [TestMethod] public void WithModifiers_ModifiersNull_ThrowsArgumentNullException() { var action = () => { this.builder.WithModifiers(null); }; action.Should().Throw<ArgumentNullException>(); } [TestMethod] public void WithModifiers_ModifiersValid_ReturnsBuilder() { var returnedBuilder = this.builder.WithModifiers("public static"); returnedBuilder.Should().NotBeNull().And.Subject.Should().Be(this.builder); } [TestMethod] public void WithField_NullFieldBuilder_ThrowsArgumentNullException() { var action = () => { this.builder.WithField(null); }; action.Should().Throw<ArgumentNullException>(); } [TestMethod] public void WithProperty_NullPropertyBuilder_ThrowsArgumentNullException() { var action = () => { this.builder.WithProperty(null); }; action.Should().Throw<ArgumentNullException>(); } [TestMethod] public void WithMethod_NullMethodBuilder_ThrowsArgumentNullException() { var action = () => { this.builder.WithMethod(null); }; action.Should().Throw<ArgumentNullException>(); } [TestMethod] public void WithConstructor_NullConstructorBuilder_ThrowsArgumentNullException() { var action = () => { this.builder.WithConstructor(null); }; action.Should().Throw<ArgumentNullException>(); } [TestMethod] public void WithBaseClass_NullBaseClass_ThrowsArgumentNullException() { var action = () => { this.builder.WithBaseClass(null); }; action.Should().Throw<ArgumentNullException>(); } [TestMethod] public void WithBaseClass_BaseClassValid_ReturnsBuilder() { var returnedBuilder = this.builder.WithBaseClass(Base); returnedBuilder.Should().NotBeNull().And.Subject.Should().BeOfType<ClassBuilder>(); } [TestMethod] public void Build_ReturnsClassDeclarationSyntax() { var syntax = this.builder.Build(); syntax.Should().NotBeNull().And.Subject.Should().BeOfType<ClassDeclarationSyntax>(); } [TestMethod] public void WithModifier_ReturnsExpectedString() { var result = this.builder.WithModifier("public").Build().ToFullString(); result.Should().Be(PublicClass); } [TestMethod] public void WithModifiers_ReturnsExpectedString() { var result = this.builder.WithModifiers("public static").Build().ToFullString(); result.Should().Be(PublicStaticClass); } [TestMethod] public void WithBaseClass_ReturnsExpectedString() { var result = this.builder .WithBaseClass(Base) .Build() .ToFullString(); result.Should().Be(ClassWithBase); } } }
26.811111
96
0.565479
[ "MIT" ]
AlexMacocian/Sybil
Sybil.UnitTests/ClassBuilderTests.cs
4,828
C#
// <auto-generated> // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. See License.txt in the project root for // license information. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. // </auto-generated> namespace Microsoft.Azure.Management.Redis { using Microsoft.Rest; using Microsoft.Rest.Azure; using Models; using Newtonsoft.Json; using System.Collections; using System.Collections.Generic; using System.Linq; using System.Net; using System.Net.Http; using System.Threading; using System.Threading.Tasks; /// <summary> /// PatchSchedulesOperations operations. /// </summary> internal partial class PatchSchedulesOperations : IServiceOperations<RedisManagementClient>, IPatchSchedulesOperations { /// <summary> /// Initializes a new instance of the PatchSchedulesOperations class. /// </summary> /// <param name='client'> /// Reference to the service client. /// </param> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> internal PatchSchedulesOperations(RedisManagementClient client) { if (client == null) { throw new System.ArgumentNullException("client"); } Client = client; } /// <summary> /// Gets a reference to the RedisManagementClient /// </summary> public RedisManagementClient Client { get; private set; } /// <summary> /// Create or replace the patching schedule for Redis cache (requires Premium /// SKU). /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='name'> /// The name of the Redis cache. /// </param> /// <param name='parameters'> /// Parameters to set the patching schedule for Redis cache. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<RedisPatchSchedule>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string name, RedisPatchSchedule parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (name == null) { throw new ValidationException(ValidationRules.CannotBeNull, "name"); } if (parameters == null) { throw new ValidationException(ValidationRules.CannotBeNull, "parameters"); } if (parameters != null) { parameters.Validate(); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("name", name); tracingParameters.Add("parameters", parameters); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "CreateOrUpdate", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}/patchSchedules/default").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{name}", System.Uri.EscapeDataString(name)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("PUT"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; if(parameters != null) { _requestContent = Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.SerializationSettings); _httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8); _httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8"); } // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200 && (int)_statusCode != 201) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<RedisPatchSchedule>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<RedisPatchSchedule>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } // Deserialize Response if ((int)_statusCode == 201) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<RedisPatchSchedule>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Deletes the patching schedule of a redis cache (requires Premium SKU). /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='name'> /// The name of the redis cache. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string name, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (name == null) { throw new ValidationException(ValidationRules.CannotBeNull, "name"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("name", name); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Delete", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}/patchSchedules/default").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{name}", System.Uri.EscapeDataString(name)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("DELETE"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200 && (int)_statusCode != 204) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } /// <summary> /// Gets the patching schedule of a redis cache (requires Premium SKU). /// </summary> /// <param name='resourceGroupName'> /// The name of the resource group. /// </param> /// <param name='name'> /// The name of the redis cache. /// </param> /// <param name='customHeaders'> /// Headers that will be added to request. /// </param> /// <param name='cancellationToken'> /// The cancellation token. /// </param> /// <exception cref="CloudException"> /// Thrown when the operation returned an invalid status code /// </exception> /// <exception cref="SerializationException"> /// Thrown when unable to deserialize the response /// </exception> /// <exception cref="ValidationException"> /// Thrown when a required parameter is null /// </exception> /// <exception cref="System.ArgumentNullException"> /// Thrown when a required parameter is null /// </exception> /// <return> /// A response object containing the response body and response headers. /// </return> public async Task<AzureOperationResponse<RedisPatchSchedule>> GetWithHttpMessagesAsync(string resourceGroupName, string name, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken)) { if (resourceGroupName == null) { throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName"); } if (name == null) { throw new ValidationException(ValidationRules.CannotBeNull, "name"); } if (Client.ApiVersion == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion"); } if (Client.SubscriptionId == null) { throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId"); } // Tracing bool _shouldTrace = ServiceClientTracing.IsEnabled; string _invocationId = null; if (_shouldTrace) { _invocationId = ServiceClientTracing.NextInvocationId.ToString(); Dictionary<string, object> tracingParameters = new Dictionary<string, object>(); tracingParameters.Add("resourceGroupName", resourceGroupName); tracingParameters.Add("name", name); tracingParameters.Add("cancellationToken", cancellationToken); ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters); } // Construct URL var _baseUrl = Client.BaseUri.AbsoluteUri; var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}/patchSchedules/default").ToString(); _url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName)); _url = _url.Replace("{name}", System.Uri.EscapeDataString(name)); _url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId)); List<string> _queryParameters = new List<string>(); if (Client.ApiVersion != null) { _queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion))); } if (_queryParameters.Count > 0) { _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters); } // Create HTTP transport objects var _httpRequest = new HttpRequestMessage(); HttpResponseMessage _httpResponse = null; _httpRequest.Method = new HttpMethod("GET"); _httpRequest.RequestUri = new System.Uri(_url); // Set Headers if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value) { _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString()); } if (Client.AcceptLanguage != null) { if (_httpRequest.Headers.Contains("accept-language")) { _httpRequest.Headers.Remove("accept-language"); } _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage); } if (customHeaders != null) { foreach(var _header in customHeaders) { if (_httpRequest.Headers.Contains(_header.Key)) { _httpRequest.Headers.Remove(_header.Key); } _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value); } } // Serialize Request string _requestContent = null; // Set Credentials if (Client.Credentials != null) { cancellationToken.ThrowIfCancellationRequested(); await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false); } // Send Request if (_shouldTrace) { ServiceClientTracing.SendRequest(_invocationId, _httpRequest); } cancellationToken.ThrowIfCancellationRequested(); _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false); if (_shouldTrace) { ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse); } HttpStatusCode _statusCode = _httpResponse.StatusCode; cancellationToken.ThrowIfCancellationRequested(); string _responseContent = null; if ((int)_statusCode != 200) { var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode)); try { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings); if (_errorBody != null) { ex = new CloudException(_errorBody.Message); ex.Body = _errorBody; } } catch (JsonException) { // Ignore the exception } ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent); ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent); if (_httpResponse.Headers.Contains("x-ms-request-id")) { ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } if (_shouldTrace) { ServiceClientTracing.Error(_invocationId, ex); } _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw ex; } // Create Result var _result = new AzureOperationResponse<RedisPatchSchedule>(); _result.Request = _httpRequest; _result.Response = _httpResponse; if (_httpResponse.Headers.Contains("x-ms-request-id")) { _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault(); } // Deserialize Response if ((int)_statusCode == 200) { _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); try { _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<RedisPatchSchedule>(_responseContent, Client.DeserializationSettings); } catch (JsonException ex) { _httpRequest.Dispose(); if (_httpResponse != null) { _httpResponse.Dispose(); } throw new SerializationException("Unable to deserialize the response.", _responseContent, ex); } } if (_shouldTrace) { ServiceClientTracing.Exit(_invocationId, _result); } return _result; } } }
45.46434
296
0.562331
[ "MIT" ]
0xced/azure-sdk-for-net
src/SDKs/RedisCache/Management.Redis/Generated/PatchSchedulesOperations.cs
29,961
C#
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using MyClassLib.WordOfTanks; using MyClassLib; namespace Day7__Tanks_ { class Program { static void Main(string[] args) { Tank[] t34 = new Tank[5]; t34[0] = new Tank("Джейк"); t34[1] = new Tank("Биби"); t34[2] = new Tank("Пупырка"); t34[3] = new Tank("Фин"); t34[4] = new Tank("БиМо"); Tank[] Pantera = new Tank[5]; Pantera[0] = new Tank("Sponge"); Pantera[1] = new Tank("Patrick"); Pantera[2] = new Tank("Krabs"); Pantera[3] = new Tank("Perl"); Pantera[4] = new Tank("Garry"); Console.WriteLine("\t\t\tУчастники!"); Tank[] Vinner = new Tank[5]; Console.ForegroundColor = ConsoleColor.DarkRed; Console.WriteLine("Танки Т-34 : \t"); Console.ForegroundColor = ConsoleColor.White; foreach (var i in t34) { Console.WriteLine(i.Show()); } Console.WriteLine(); Console.ForegroundColor = ConsoleColor.DarkRed; Console.WriteLine("Танки Pantera : \t"); Console.ForegroundColor = ConsoleColor.White; foreach (var i in Pantera) { Console.WriteLine(i.Show()); } Console.WriteLine(); Console.WriteLine(""); for (int i = 0; i < Vinner.Length; i++) { Console.ForegroundColor = ConsoleColor.DarkRed; Console.WriteLine((i+1)+" РАУНД! \n"+" танк Т34 против Pantera "); Console.ForegroundColor = ConsoleColor.White; Console.WriteLine(" _____"); Console.WriteLine("| "+(i+1)+" ||__ ........"); Console.WriteLine("|_____|| ********"); Console.WriteLine(" ooooo"); Vinner[i] = t34[i] * Pantera[i]; Console.WriteLine("Победитель: \t"+Vinner[i].Show()); } Console.WriteLine(); Console.WriteLine(); /*Разработать программу, моделирующую танковый бой. В танковом бою участвуют 5 танков «Т-34» и 5 танков «Pantera». Каждый танк («Т-34» и «Pantera») описываются параметрами: «Боекомплект», «Уровень брони», «Уровень маневренности». Значение данных параметров задаются случайными числами от 0 до 100. Каждый танк участвует в парной битве, т.е. первый танк «Т-34» сражается с первым танком «Pantera» и т. д. Победа присуждается тому танку, который превышает противника по двум и более параметрам из трех (пример: см. программу). Основное требование: сражение (проверку на победу в бою) реализовать путем перегрузки оператора «*» (произведение). */ #region Console.WriteLine("-------------------------"); Present[] Candys = new Present[3]; Candys[0] = new Present("Рахат","Рахат", 250,25); Candys[1] = new Present("Kinders", "Ferrero", 110, 45); Candys[2] = new Present("Mars", "Mars", 110, 45); for(int i=0;i<Candys.Length;i++) { Console.Write("№" + (i+1)); Candys[i].Show(); } Console.WriteLine(); Console.Write("Выберите номер товара "); int k = Int32.Parse(Console.ReadLine())-1; Console.Write("И номер второго товара "); int l = Int32.Parse(Console.ReadLine())-1; Console.WriteLine(); if (Candys[k] == Candys[l]) Console.WriteLine("Подарок " + Candys[k].name + " + Подарок " + Candys[l].name + " Одинаковы по весу и цене "); else Console.WriteLine("Не равноценные продукт "); Console.WriteLine(); #endregion #region Console.WriteLine("------------------------------"); Arrays massiv1 = new Arrays(10); Arrays massiv2 = new Arrays(10); massiv1.Show(); massiv2.Show(); Console.WriteLine(); if (massiv1 > massiv2) Console.WriteLine("Сумма элементов первого массива больше второго "); else Console.WriteLine("Сумма элементов второго массива больше первого "); #endregion #region Console.WriteLine("------------------------------------------"); Console.ForegroundColor = ConsoleColor.Red; Console.Write("Введите сумму в тенге: "); Console.ForegroundColor = ConsoleColor.White; int sum = Int32.Parse(Console.ReadLine()); Console.ForegroundColor = ConsoleColor.Red; Console.Write("Введите вторую сумму :"); Console.ForegroundColor = ConsoleColor.White; int sum2 = Int32.Parse(Console.ReadLine()); Console.ForegroundColor = ConsoleColor.Red; Console.Write("и валюту (dollars, euro, rub, tenge) : "); Console.ForegroundColor = ConsoleColor.White; string cur = Console.ReadLine(); Money tenge = new Money(sum,"tenge"); Money dol = new Money(sum2, cur); Console.ForegroundColor = ConsoleColor.Red; if (tenge == dol) { Console.WriteLine(tenge.money + dol.money); } else { double res; Console.Write("Выберите валюту для конвертации итога : \n1-dollars; 2- euro; 3- rub; 4-tenge "); Console.ForegroundColor = ConsoleColor.White; int i = Int32.Parse(Console.ReadLine()); if (i == 4) { res = dol.Convert(i); Console.WriteLine("Итого "+(tenge.money + res)+" tenge"); } else { res = tenge.Convert(i); Console.WriteLine("Итого "+(dol.money + res)+" "+cur); } } #endregion } } }
37.786127
127
0.489062
[ "MIT" ]
DarTiess/Peregruzka
Program.cs
7,368
C#
using System; using System.Runtime.Serialization; namespace ShortBook.Server.Exceptions { /// <summary> /// 表示用户得到授权(与401错误相对),但是访问是被禁止的。 /// <para>403</para> /// </summary> public class ShortBookServerForbiddenException : ShortBookServerException { /// <inheritdoc /> public ShortBookServerForbiddenException() { } /// <inheritdoc /> public ShortBookServerForbiddenException(string message) : base(message) { } /// <inheritdoc /> public ShortBookServerForbiddenException(string message, Exception inner) : base(message, inner) { } /// <inheritdoc /> protected ShortBookServerForbiddenException(SerializationInfo info, StreamingContext context) : base(info, context) { } } }
25.333333
123
0.620813
[ "MIT" ]
shortbook/ShortBook.Server
ShortBook.Server/Exceptions/ShortBookServerForbiddenException.cs
890
C#
using System.Collections.Generic; using DSharpPlus.Entities; using Windows.Media.Transcoding; using static Unicord.Constants; namespace Unicord.Universal.Models { public class MediaSettingsModel : ViewModelBase { private int[] _availableWidths = new[] { 256, 426, 640, 854, 1280, 1920 }; private int[] _availableHeights = new[] { 144, 240, 360, 480, 720, 1080 }; public int AutoTranscodeMedia { get => (int)App.RoamingSettings.Read(AUTO_TRANSCODE_MEDIA, MediaTranscodeOptions.WhenNeeded); set => App.RoamingSettings.Save(AUTO_TRANSCODE_MEDIA, (MediaTranscodeOptions)value); } public int ProcessingAlgorithm { get => (int)App.RoamingSettings.Read(VIDEO_PROCESSING, MediaVideoProcessingAlgorithm.Default); set => App.RoamingSettings.Save(VIDEO_PROCESSING, (MediaTranscodeOptions)value); } public int VideoBitrate { get => App.RoamingSettings.Read(VIDEO_BITRATE, 1_150_000) / 1000; set => App.RoamingSettings.Save(VIDEO_BITRATE, value * 1000); } public int AudioBitrate { get => App.RoamingSettings.Read(AUDIO_BITRATE, 192); set => App.RoamingSettings.Save(AUDIO_BITRATE, value); } public int VideoWidth { get => App.RoamingSettings.Read(VIDEO_WIDTH, 854); set => App.RoamingSettings.Save(VIDEO_WIDTH, value); } public int VideoHeight { get => App.RoamingSettings.Read(VIDEO_HEIGHT, 480); set => App.RoamingSettings.Save(VIDEO_HEIGHT, value); } public bool SavePhotos { get => App.RoamingSettings.Read("SavePhotos", true); set => App.RoamingSettings.Save("SavePhotos", value); } public bool PreserveFrameRate { get => App.RoamingSettings.Read("PreserveFrameRate", true); set => App.RoamingSettings.Save("PreserveFrameRate", value); } public List<string> AvailableResolutions => new List<string> { "144p", "240p", "360p", "480p", "720p", "1080p" }; public string Resolution { get => $"{VideoHeight}p"; set { var index = AvailableResolutions.IndexOf(value); VideoWidth = _availableWidths[index]; VideoHeight = _availableHeights[index]; } } } }
33.213333
121
0.598956
[ "MIT" ]
UnicordDev/Unicord
Unicord.Universal/Models/Settings/MediaSettingsModel.cs
2,493
C#
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; namespace TrainStation { public partial class CancelTicket : Form { Controller controllerObj; public CancelTicket() { InitializeComponent(); } private void CancelTicket_load(object sender, EventArgs e) { controllerObj = new Controller(); DataTable x = controllerObj.viewTicketNumbers(); foreach (DataRow row in x.Rows) ticketSerialNoComboBox.Items.Add(row[0].ToString()); } private void cancelBtn_Click(object sender, EventArgs e) { if (ticketSerialNoComboBox.Text == "") { MessageBox.Show("Please select a ticket!"); return; } int ticketSerialNo = Int32.Parse(ticketSerialNoComboBox.Text); int m = controllerObj.cancelTicketEmployee(ticketSerialNo); if(m==1) MessageBox.Show("You succefully cancelled a ticket with Serial No. : "+ticketSerialNo); this.Close(); } private void Back_Click(object sender, EventArgs e) { this.Close(); } private void Exit_Click(object sender, EventArgs e) { Application.Exit(); } } }
26.714286
103
0.590909
[ "MIT" ]
EslamAsHhraf/Train-Station
TrainStation/Employee_forms/CancelTicket.cs
1,498
C#
using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Threading; using Umbraco.Cms.Core.Models; using Umbraco.Cms.Core.Scoping; namespace Umbraco.Cms.Core.Services { public class IdKeyMap : IIdKeyMap,IDisposable { private readonly IScopeProvider _scopeProvider; private readonly ReaderWriterLockSlim _locker = new ReaderWriterLockSlim(); private readonly Dictionary<int, TypedId<Guid>> _id2Key = new Dictionary<int, TypedId<Guid>>(); private readonly Dictionary<Guid, TypedId<int>> _key2Id = new Dictionary<Guid, TypedId<int>>(); public IdKeyMap(IScopeProvider scopeProvider) { _scopeProvider = scopeProvider; } // note - for pure read-only we might want to *not* enforce a transaction? // notes // // - this class assumes that the id/guid map is unique; that is, if an id and a guid map // to each other, then the id will never map to another guid, and the guid will never map // to another id // // - cache is cleared by MediaCacheRefresher, UnpublishedPageCacheRefresher, and other // refreshers - because id/guid map is unique, we only clear to avoid leaking memory, 'cos // we don't risk caching obsolete values - and only when actually deleting // // - we do NOT prefetch anything from database // // - NuCache maintains its own id/guid map for content & media items // it does *not* populate the idk map, because it directly uses its own map // still, it provides mappers so that the idk map can benefit from them // which means there will be some double-caching at some point ?? // // - when a request comes in: // if the idkMap already knows about the map, it returns the value // else it tries the published cache via mappers // else it hits the database private readonly ConcurrentDictionary<UmbracoObjectTypes, (Func<int, Guid> id2key, Func<Guid, int> key2id)> _dictionary = new ConcurrentDictionary<UmbracoObjectTypes, (Func<int, Guid> id2key, Func<Guid, int> key2id)>(); private bool _disposedValue; public void SetMapper(UmbracoObjectTypes umbracoObjectType, Func<int, Guid> id2key, Func<Guid, int> key2id) { _dictionary[umbracoObjectType] = (id2key, key2id); } internal void Populate(IEnumerable<(int id, Guid key)> pairs, UmbracoObjectTypes umbracoObjectType) { try { _locker.EnterWriteLock(); foreach (var pair in pairs) { _id2Key[pair.id] = new TypedId<Guid>(pair.key, umbracoObjectType); _key2Id[pair.key] = new TypedId<int>(pair.id, umbracoObjectType); } } finally { if (_locker.IsWriteLockHeld) _locker.ExitWriteLock(); } } #if POPULATE_FROM_DATABASE private void PopulateLocked() { // don't if not empty if (_key2Id.Count > 0) return; using (var scope = _scopeProvider.CreateScope()) { // populate content and media items var types = new[] { Constants.ObjectTypes.Document, Constants.ObjectTypes.Media }; var values = scope.Database.Query<TypedIdDto>("SELECT id, uniqueId, nodeObjectType FROM umbracoNode WHERE nodeObjectType IN @types", new { types }); foreach (var value in values) { var umbracoObjectType = ObjectTypes.GetUmbracoObjectType(value.NodeObjectType); _id2Key.Add(value.Id, new TypedId<Guid>(value.UniqueId, umbracoObjectType)); _key2Id.Add(value.UniqueId, new TypedId<int>(value.Id, umbracoObjectType)); } } } private Attempt<int> PopulateAndGetIdForKey(Guid key, UmbracoObjectTypes umbracoObjectType) { try { _locker.EnterWriteLock(); PopulateLocked(); return _key2Id.TryGetValue(key, out var id) && id.UmbracoObjectType == umbracoObjectType ? Attempt.Succeed(id.Id) : Attempt<int>.Fail(); } finally { if (_locker.IsWriteLockHeld) _locker.ExitWriteLock(); } } private Attempt<Guid> PopulateAndGetKeyForId(int id, UmbracoObjectTypes umbracoObjectType) { try { _locker.EnterWriteLock(); PopulateLocked(); return _id2Key.TryGetValue(id, out var key) && key.UmbracoObjectType == umbracoObjectType ? Attempt.Succeed(key.Id) : Attempt<Guid>.Fail(); } finally { if (_locker.IsWriteLockHeld) _locker.ExitWriteLock(); } } #endif public Attempt<int> GetIdForKey(Guid key, UmbracoObjectTypes umbracoObjectType) { bool empty; try { _locker.EnterReadLock(); if (_key2Id.TryGetValue(key, out var id) && id.UmbracoObjectType == umbracoObjectType) return Attempt.Succeed(id.Id); empty = _key2Id.Count == 0; } finally { if (_locker.IsReadLockHeld) _locker.ExitReadLock(); } #if POPULATE_FROM_DATABASE // if cache is empty and looking for a document or a media, // populate the cache at once and return what we found if (empty && (umbracoObjectType == UmbracoObjectTypes.Document || umbracoObjectType == UmbracoObjectTypes.Media)) return PopulateAndGetIdForKey(key, umbracoObjectType); #endif // optimize for read speed: reading database outside a lock means that we could read // multiple times, but we don't lock the cache while accessing the database = better int? val = null; if (_dictionary.TryGetValue(umbracoObjectType, out var mappers)) if ((val = mappers.key2id(key)) == default(int)) val = null; if (val == null) { using (var scope = _scopeProvider.CreateScope()) { //if it's unknown don't include the nodeObjectType in the query if (umbracoObjectType == UmbracoObjectTypes.Unknown) { val = scope.Database.ExecuteScalar<int?>("SELECT id FROM umbracoNode WHERE uniqueId=@id", new { id = key}); } else { val = scope.Database.ExecuteScalar<int?>("SELECT id FROM umbracoNode WHERE uniqueId=@id AND (nodeObjectType=@type OR nodeObjectType=@reservation)", new { id = key, type = GetNodeObjectTypeGuid(umbracoObjectType), reservation = Cms.Core.Constants.ObjectTypes.IdReservation }); } scope.Complete(); } } if (val == null) return Attempt<int>.Fail(); // cache reservations, when something is saved this cache is cleared anyways //if (umbracoObjectType == UmbracoObjectTypes.IdReservation) // Attempt.Succeed(val.Value); try { _locker.EnterWriteLock(); _id2Key[val.Value] = new TypedId<Guid>(key, umbracoObjectType); _key2Id[key] = new TypedId<int>(val.Value, umbracoObjectType); } finally { if (_locker.IsWriteLockHeld) _locker.ExitWriteLock(); } return Attempt.Succeed(val.Value); } public Attempt<int> GetIdForUdi(Udi udi) { var guidUdi = udi as GuidUdi; if (guidUdi == null) return Attempt<int>.Fail(); var umbracoType = UdiEntityTypeHelper.ToUmbracoObjectType(guidUdi.EntityType); return GetIdForKey(guidUdi.Guid, umbracoType); } public Attempt<Udi> GetUdiForId(int id, UmbracoObjectTypes umbracoObjectType) { var keyAttempt = GetKeyForId(id, umbracoObjectType); return keyAttempt ? Attempt.Succeed<Udi>(new GuidUdi(UdiEntityTypeHelper.FromUmbracoObjectType(umbracoObjectType), keyAttempt.Result)) : Attempt<Udi>.Fail(); } public Attempt<Guid> GetKeyForId(int id, UmbracoObjectTypes umbracoObjectType) { bool empty; try { _locker.EnterReadLock(); if (_id2Key.TryGetValue(id, out var key) && key.UmbracoObjectType == umbracoObjectType) return Attempt.Succeed(key.Id); empty = _id2Key.Count == 0; } finally { if (_locker.IsReadLockHeld) _locker.ExitReadLock(); } #if POPULATE_FROM_DATABASE // if cache is empty and looking for a document or a media, // populate the cache at once and return what we found if (empty && (umbracoObjectType == UmbracoObjectTypes.Document || umbracoObjectType == UmbracoObjectTypes.Media)) return PopulateAndGetKeyForId(id, umbracoObjectType); #endif // optimize for read speed: reading database outside a lock means that we could read // multiple times, but we don't lock the cache while accessing the database = better Guid? val = null; if (_dictionary.TryGetValue(umbracoObjectType, out var mappers)) if ((val = mappers.id2key(id)) == default(Guid)) val = null; if (val == null) { using (var scope = _scopeProvider.CreateScope()) { //if it's unknown don't include the nodeObjectType in the query if (umbracoObjectType == UmbracoObjectTypes.Unknown) { val = scope.Database.ExecuteScalar<Guid?>("SELECT uniqueId FROM umbracoNode WHERE id=@id", new { id }); } else { val = scope.Database.ExecuteScalar<Guid?>("SELECT uniqueId FROM umbracoNode WHERE id=@id AND (nodeObjectType=@type OR nodeObjectType=@reservation)", new { id, type = GetNodeObjectTypeGuid(umbracoObjectType), reservation = Cms.Core.Constants.ObjectTypes.IdReservation }); } scope.Complete(); } } if (val == null) return Attempt<Guid>.Fail(); // cache reservations, when something is saved this cache is cleared anyways //if (umbracoObjectType == UmbracoObjectTypes.IdReservation) // Attempt.Succeed(val.Value); try { _locker.EnterWriteLock(); _id2Key[id] = new TypedId<Guid>(val.Value, umbracoObjectType); _key2Id[val.Value] = new TypedId<int>(id, umbracoObjectType); } finally { if (_locker.IsWriteLockHeld) _locker.ExitWriteLock(); } return Attempt.Succeed(val.Value); } private static Guid GetNodeObjectTypeGuid(UmbracoObjectTypes umbracoObjectType) { var guid = umbracoObjectType.GetGuid(); if (guid == Guid.Empty) throw new NotSupportedException("Unsupported object type (" + umbracoObjectType + ")."); return guid; } // invoked on UnpublishedPageCacheRefresher.RefreshAll // anything else will use the id-specific overloads public void ClearCache() { try { _locker.EnterWriteLock(); _id2Key.Clear(); _key2Id.Clear(); } finally { if (_locker.IsWriteLockHeld) _locker.ExitWriteLock(); } } public void ClearCache(int id) { try { _locker.EnterWriteLock(); if (_id2Key.TryGetValue(id, out var key) == false) return; _id2Key.Remove(id); _key2Id.Remove(key.Id); } finally { if (_locker.IsWriteLockHeld) _locker.ExitWriteLock(); } } public void ClearCache(Guid key) { try { _locker.EnterWriteLock(); if (_key2Id.TryGetValue(key, out var id) == false) return; _id2Key.Remove(id.Id); _key2Id.Remove(key); } finally { if (_locker.IsWriteLockHeld) _locker.ExitWriteLock(); } } // ReSharper disable ClassNeverInstantiated.Local // ReSharper disable UnusedAutoPropertyAccessor.Local private class TypedIdDto { public int Id { get; set; } public Guid UniqueId { get; set; } public Guid NodeObjectType { get; set; } } // ReSharper restore ClassNeverInstantiated.Local // ReSharper restore UnusedAutoPropertyAccessor.Local private struct TypedId<T> { public TypedId(T id, UmbracoObjectTypes umbracoObjectType) { UmbracoObjectType = umbracoObjectType; Id = id; } public UmbracoObjectTypes UmbracoObjectType { get; } public T Id { get; } } protected virtual void Dispose(bool disposing) { if (!_disposedValue) { if (disposing) { _locker.Dispose(); } _disposedValue = true; } } public void Dispose() { // Do not change this code. Put cleanup code in 'Dispose(bool disposing)' method Dispose(disposing: true); } } }
37.16285
172
0.545224
[ "MIT" ]
Ambertvu/Umbraco-CMS
src/Umbraco.Infrastructure/Services/IdKeyMap.cs
14,607
C#
// Amplify Shader Editor - Visual Shader Editing Tool // Copyright (c) Amplify Creations, Lda <info@amplify.pt> using System; using System.Collections.Generic; using UnityEditor; using UnityEngine; namespace AmplifyShaderEditor { public enum PreviewLocation { Auto, TopCenter, BottomCenter, Left, Right } public enum NodeMessageType { Error, Warning, Info } [Serializable] public class ParentNode : UndoParentNode, ISerializationCallbackReceiver { protected readonly string[] PrecisionLabels = { "Float", "Half" }; protected readonly string[] PrecisionLabelsExtra = { "Float", "Half", "Inherit" }; private const double NodeClickTime = 0.2; protected GUIContent PrecisionContent = new GUIContent( "Precision", "Changes the precision of internal calculations, using lower types saves some performance\nDefault: Float" ); private const int MoveCountBuffer = 3;// When testing for stopped movement we need to take Layout and Repaint into account for them not to interfere with tests private const float MinInsideBoxWidth = 20; private const float MinInsideBoxHeight = 10; private const string WikiLinkStr = "online reference"; public delegate void OnNodeEvent( ParentNode node, bool testOnlySelected, InteractionMode interactionMode ); public delegate void OnNodeGenericEvent( ParentNode node ); public delegate void OnNodeReOrder( ParentNode node, int index ); public delegate void DrawPropertySection(); public delegate void OnSRPAction( int outputId, ref MasterNodeDataCollector dataCollector ); [SerializeField] protected PrecisionType m_currentPrecisionType = PrecisionType.Inherit; [SerializeField] protected bool m_customPrecision = false; [SerializeField] protected InteractionMode m_defaultInteractionMode = InteractionMode.Other; public event OnNodeEvent OnNodeStoppedMovingEvent; public OnNodeGenericEvent OnNodeChangeSizeEvent; public OnNodeGenericEvent OnNodeDestroyedEvent; public event OnNodeReOrder OnNodeReOrderEvent; public OnSRPAction OnLightweightAction; public OnSRPAction OnHDAction; [SerializeField] private int m_uniqueId; [SerializeField] protected Rect m_position; [SerializeField] protected Rect m_unpreviewedPosition; [SerializeField] protected GUIContent m_content; [SerializeField] protected GUIContent m_additionalContent; [SerializeField] protected bool m_initialized; [SerializeField] protected NodeConnectionStatus m_connStatus; protected bool m_selfPowered = false; [SerializeField] protected int m_activeConnections; [SerializeField] protected System.Type m_activeType; [SerializeField] protected int m_activePort; [SerializeField] protected int m_activeNode; protected NodeRestrictions m_restrictions; [SerializeField] protected Color m_statusColor; [SerializeField] protected Rect m_propertyDrawPos; // Ports [SerializeField] protected List<InputPort> m_inputPorts = new List<InputPort>(); protected Dictionary<int, InputPort> m_inputPortsDict = new Dictionary<int, InputPort>(); [SerializeField] protected List<OutputPort> m_outputPorts = new List<OutputPort>(); protected Dictionary<int, OutputPort> m_outputPortsDict = new Dictionary<int, OutputPort>(); [SerializeField] protected Rect m_globalPosition; [SerializeField] protected Rect m_headerPosition; //private Vector2 m_tooltipOffset; [SerializeField] protected bool m_sizeIsDirty = false; [SerializeField] protected Vector2 m_extraSize; [SerializeField] protected Vector2 m_insideSize; [SerializeField] protected float m_fontHeight; // Editor State save on Play Button [SerializeField] protected bool m_isDirty; [SerializeField] private int m_isMoving = 0; [SerializeField] private Rect m_lastPosition; // Live Shader Gen [SerializeField] private bool m_saveIsDirty; [SerializeField] protected bool m_requireMaterialUpdate = false; [SerializeField] protected int m_commentaryParent = -1; [SerializeField] protected int m_depth = -1; [SerializeField] protected bool m_materialMode = false; [SerializeField] protected bool m_showPreview = false; [SerializeField] protected int m_previewMaterialPassId = -1; protected bool m_useSquareNodeTitle = false; [SerializeField] protected bool m_continuousPreviewRefresh = false; private bool m_previewIsDirty = true; // Error Box Messages private Rect m_errorBox; private bool m_previousErrorMessage = false; protected bool m_showErrorMessage = false; protected NodeMessageType m_errorMessageTypeIsError = NodeMessageType.Error; protected string m_errorMessageTooltip = string.Empty; private GUIContent m_errorIcon = new GUIContent(); private GUIContent m_errorMessage = new GUIContent(); private GUIStyle m_errorCurrentStyle; private const string ErrorTitle = "ERROR"; private const string WarningTitle = "WARNING"; private const string InfoTitle = "INFO"; // Drawing Node protected PreviewLocation m_selectedLocation = PreviewLocation.Auto; private int m_extraHeaderHeight = 0; protected bool m_isVisible; protected bool m_selected = false; protected bool m_rmbIgnore; protected GUIContent m_sizeContentAux; protected uint m_currentReadParamIdx = 1; protected bool m_reorderLocked = false; protected Rect m_cachedPos; protected Vector2 m_accumDelta = Vector2.zero; private bool m_isOnGrid = false; protected bool m_useInternalPortData = false; protected bool m_autoDrawInternalPortData = true; protected DrawOrder m_drawOrder = DrawOrder.Default; protected bool m_movingInFrame = false; protected float m_anchorAdjust = -1; protected Color m_headerColor; [SerializeField] // needs to be serialized because of Undo protected Color m_headerColorModifier = Color.white; protected bool m_infiniteLoopDetected = false; protected int m_textLabelWidth = -1; private bool m_linkVisibility = false; [SerializeField] protected bool m_hasTooltipLink = true; protected int m_category = 0; protected double m_lastTimeSelected; private double m_tooltipTimestamp; protected string m_tooltipText; protected Rect m_unscaledRemainingBox; protected Rect m_remainingBox; private int m_visibleInputs = 0; private int m_visibleOutputs = 0; private double m_doubleClickTimestamp; private const double DoubleClickTime = 0.25; protected bool m_canExpand = true; protected bool m_firstDraw = true; protected int m_matrixId = -1; private float m_paddingTitleLeft = 0; private float m_paddingTitleRight = 0; // Preview Fields private Material m_previewMaterial = null; private Shader m_previewShader = null; protected string m_previewShaderGUID = string.Empty; protected float m_marginPreviewLeft = 0; protected bool m_globalShowPreview = false; protected Rect m_unscaledPreviewRect; protected Rect m_previewRect; protected bool m_drawPreviewMaskButtons = true; private int m_channelNumber = 0; protected bool m_firstPreviewDraw = true; [SerializeField] protected bool m_drawPreview = true; protected bool m_drawPreviewExpander = true; private bool m_spherePreview = false; protected bool m_drawPreviewAsSphere = false; protected bool m_forceDrawPreviewAsPlane = false; private bool m_finishPreviewRender = false; private int m_cachedMainTexId = -1; private int m_cachedMaskTexId = -1; private int m_cachedPortsId = -1; private int m_cachedPortId = -1; private int m_cachedDrawSphereId = -1; private int m_cachedInvertedZoomId = -1; //private int m_cachedIsLinearId = -1; private bool[] m_previewChannels = { true, true, true, false }; // Others protected bool m_hasSubtitle = false; protected bool m_showSubtitle = true; protected bool m_hasLeftDropdown = false; protected bool m_autoWrapProperties = false; protected bool m_internalDataFoldout = true; protected bool m_propertiesFoldout = true; protected bool m_repopulateDictionaries = true; protected Vector2 m_lastInputBottomRight = Vector2.zero; protected Vector2 m_lastOutputBottomLeft = Vector2.zero; private Vector4 m_portMask = Vector4.zero; private Vector2 m_auxVector2 = Vector4.zero; protected Rect m_auxRect; protected PreviewLocation m_autoLocation; protected Rect m_titlePos; protected Rect m_addTitlePos; protected Rect m_expandRect; protected Rect m_dropdownRect; protected Rect m_currInputPortPos; protected Rect m_currOutputPortPos; protected Color m_colorBuffer; [SerializeField] protected bool m_docking = false; [SerializeField] protected int m_visiblePorts = 0; protected int m_graphDepth = 0; protected int m_oldInputCount = -1; protected bool m_dropdownEditing = false; protected bool m_isNodeBeingCopied = false; protected string m_previousTitle = string.Empty; protected string m_previousAdditonalTitle = string.Empty; private bool m_alive = true; private bool m_wasDeprecated = false; private double m_timedUpdateInitialValue; private double m_timedUpdateInterval; private bool m_fireTimedUpdateRequest = false; public ParentNode() { m_position = new Rect( 0, 0, 0, 0 ); m_content = new GUIContent( GUIContent.none ); m_additionalContent = new GUIContent( GUIContent.none ); CommonInit( -1 ); } public ParentNode( int uniqueId, float x, float y, float width, float height ) { m_position = new Rect( x, y, width, height ); m_content = new GUIContent( GUIContent.none ); m_additionalContent = new GUIContent( GUIContent.none ); CommonInit( uniqueId ); } public virtual void OnEnable() { hideFlags = HideFlags.HideAndDontSave; if( m_nodeAttribs != null ) { if( UIUtils.HasColorCategory( m_nodeAttribs.Category ) ) { m_headerColor = UIUtils.GetColorFromCategory( m_nodeAttribs.Category ); } else { if( !string.IsNullOrEmpty( m_nodeAttribs.CustomCategoryColor ) ) { m_headerColor = UIUtils.AddColorCategory( m_nodeAttribs.Category, m_nodeAttribs.CustomCategoryColor ); } } } m_tooltipTimestamp = Time.realtimeSinceStartup; hideFlags = HideFlags.DontSave; } protected virtual void CommonInit( int uniqueId ) { m_uniqueId = uniqueId; m_isOnGrid = false; ConnStatus = NodeConnectionStatus.Not_Connected; m_inputPorts = new List<InputPort>(); m_inputPortsDict = new Dictionary<int, InputPort>(); m_outputPorts = new List<OutputPort>(); m_outputPortsDict = new Dictionary<int, OutputPort>(); System.Reflection.MemberInfo info = this.GetType(); m_nodeAttribs = info.GetCustomAttributes( true )[ 0 ] as NodeAttributes; if( m_nodeAttribs != null ) { m_content.text = m_nodeAttribs.Name; //m_content.tooltip = m_nodeAttribs.Description; m_tooltipText = m_nodeAttribs.Description; m_selected = false; } m_sizeContentAux = new GUIContent(); m_extraSize = new Vector2( 0, 0 ); m_insideSize = new Vector2( 0, 0 ); m_sizeIsDirty = true; m_initialized = true; m_restrictions = new NodeRestrictions(); m_propertyDrawPos = new Rect(); } public virtual void AfterCommonInit() { if( PreviewShader && !HasPreviewShader ) { m_drawPreview = false; m_drawPreviewExpander = false; m_canExpand = false; } if( m_drawPreviewExpander || m_hasLeftDropdown ) { m_paddingTitleRight += Constants.PreviewExpanderWidth + Constants.IconsLeftRightMargin; m_paddingTitleLeft = Constants.PreviewExpanderWidth + Constants.IconsLeftRightMargin; } } public virtual void Destroy() { m_alive = false; if( OnNodeDestroyedEvent != null ) { OnNodeDestroyedEvent( this ); OnNodeDestroyedEvent = null; } OnLightweightAction = null; OnHDAction = null; OnNodeStoppedMovingEvent = null; OnNodeChangeSizeEvent = null; OnNodeReOrderEvent = null; if( m_restrictions != null ) m_restrictions.Destroy(); m_restrictions = null; if( m_inputPorts != null ) { int inputCount = m_inputPorts.Count; for( int i = 0; i < inputCount; i++ ) { m_inputPorts[ i ].Destroy(); } m_inputPorts.Clear(); m_inputPorts = null; } if( m_outputPorts != null ) { int outputCount = m_outputPorts.Count; for( int i = 0; i < outputCount; i++ ) { m_outputPorts[ i ].Destroy(); } m_outputPorts.Clear(); m_outputPorts = null; } if( m_inputPortsDict != null ) m_inputPortsDict.Clear(); m_inputPortsDict = null; if( m_outputPortsDict != null ) m_outputPortsDict.Clear(); m_outputPortsDict = null; if( m_previewMaterial != null ) DestroyImmediate( m_previewMaterial ); m_previewMaterial = null; m_previewShader = null; //m_containerGraph = null; } public virtual void Move( Vector2 delta ) { if( m_docking ) return; Move( delta, false ); } public virtual void Move( Vector2 delta, bool snap ) { if( m_docking ) return; if( m_isMoving == 0 ) { m_cachedPos = m_position; m_accumDelta = Vector2.zero; } m_isMoving = MoveCountBuffer; m_accumDelta += delta; if( snap ) { m_position.x = Mathf.Round( ( m_cachedPos.x + m_accumDelta.x ) / 16 ) * 16; m_position.y = Mathf.Round( ( m_cachedPos.y + m_accumDelta.y ) / 16 ) * 16; } else { m_position.x += delta.x; m_position.y += delta.y; } //if(Event.current.type == EventType.Layout) m_movingInFrame = true; } public virtual void UpdateMaterial( Material mat ) { m_requireMaterialUpdate = false; } public virtual void SetMaterialMode( Material mat, bool fetchMaterialValues ) { m_materialMode = ( mat != null ); } public virtual bool UpdateShaderDefaults( ref Shader shader, ref TextureDefaultsDataColector defaultCol ) { return false; } public virtual void ForceUpdateFromMaterial( Material material ) { } public void SetSaveIsDirty() { if( m_connStatus == NodeConnectionStatus.Connected ) { SaveIsDirty = true; } } public void ActivateNodeReordering( int index ) { if( OnNodeReOrderEvent != null ) OnNodeReOrderEvent( this, index ); } void RecalculateInputPortIdx() { m_inputPortsDict.Clear(); int count = m_inputPorts.Count; for( int i = 0; i < count; i++ ) { if( m_inputPorts[ i ].IsConnected ) { int nodeId = m_inputPorts[ i ].ExternalReferences[ 0 ].NodeId; int portId = m_inputPorts[ i ].ExternalReferences[ 0 ].PortId; ParentNode node = UIUtils.GetNode( nodeId ); if( node != null ) { OutputPort outputPort = node.GetOutputPortByUniqueId( portId ); int outputCount = outputPort.ExternalReferences.Count; for( int j = 0; j < outputCount; j++ ) { if( outputPort.ExternalReferences[ j ].NodeId == m_uniqueId && outputPort.ExternalReferences[ j ].PortId == m_inputPorts[ i ].PortId ) { outputPort.ExternalReferences[ j ].PortId = i; } } } } m_inputPorts[ i ].PortId = i; m_inputPortsDict.Add( i, m_inputPorts[ i ] ); } } public void SwapInputPorts( int fromIdx, int toIdx ) { InputPort port = m_inputPorts[ fromIdx ]; //if( toIdx > fromIdx ) // toIdx--; m_inputPorts.Remove( port ); m_inputPorts.Insert( toIdx, port ); RecalculateInputPortIdx(); SetSaveIsDirty(); } public void RemoveInputPort( int idx ) { if( idx < m_inputPorts.Count ) { m_inputPortsDict.Remove( m_inputPorts[ idx ].PortId ); m_inputPorts.RemoveAt( idx ); SetSaveIsDirty(); m_sizeIsDirty = true; } } public void RemoveOutputPort( string name ) { int count = m_outputPorts.Count; for( int i = 0; i < count; i++ ) { if( m_outputPorts[ i ].Name.Equals( name ) ) { if( m_outputPorts[ i ].IsConnected ) { m_containerGraph.DeleteConnection( false, m_uniqueId, m_outputPorts[ i ].PortId, false, true ); m_outputPortsDict.Remove( m_outputPorts[ i ].PortId ); m_outputPorts.RemoveAt( i ); SetSaveIsDirty(); m_sizeIsDirty = true; } } } } public void RemoveOutputPort( int idx, bool isArrayIndex = true ) { if( isArrayIndex ) { // idx represents a position on the output port array if( idx < m_outputPorts.Count ) { if( m_outputPorts[ idx ].IsConnected ) { m_containerGraph.DeleteConnection( false, m_uniqueId, m_outputPorts[ idx ].PortId, false, true ); } m_outputPortsDict.Remove( m_outputPorts[ idx ].PortId ); m_outputPorts.RemoveAt( idx ); SetSaveIsDirty(); m_sizeIsDirty = true; } } else { // idx represents a port unique id int count = m_outputPorts.Count; int arrIdx = -1; for( int i = 0; i < count; i++ ) { if( m_outputPorts[ i ].PortId == idx ) { arrIdx = i; break; } } if( arrIdx >= 0 ) { if( m_outputPorts[ arrIdx ].IsConnected ) { m_containerGraph.DeleteConnection( false, m_uniqueId, idx, false, true ); } m_outputPortsDict.Remove( idx ); m_outputPorts.RemoveAt( arrIdx ); SetSaveIsDirty(); m_sizeIsDirty = true; } } } // Manually add Ports public InputPort AddInputPort( WirePortDataType type, bool typeLocked, string name, int orderId = -1, MasterNodePortCategory category = MasterNodePortCategory.Fragment, int uniquePortId = -1 ) { InputPort port = new InputPort( m_uniqueId, ( uniquePortId < 0 ? m_inputPorts.Count : uniquePortId ), type, name, typeLocked, ( orderId >= 0 ? orderId : m_inputPorts.Count ), category ); m_inputPorts.Add( port ); m_inputPortsDict.Add( port.PortId, port ); SetSaveIsDirty(); m_sizeIsDirty = true; return port; } public InputPort AddInputPort( WirePortDataType type, bool typeLocked, string name, string dataName, int orderId = -1, MasterNodePortCategory category = MasterNodePortCategory.Fragment, int uniquePortId = -1 ) { InputPort port = new InputPort( m_uniqueId, ( uniquePortId < 0 ? m_inputPorts.Count : uniquePortId ), type, name, dataName, typeLocked, ( orderId >= 0 ? orderId : m_inputPorts.Count ), category ); m_inputPorts.Add( port ); m_inputPortsDict.Add( port.PortId, port ); SetSaveIsDirty(); m_sizeIsDirty = true; return port; } public InputPort AddInputPortAt( int idx, WirePortDataType type, bool typeLocked, string name, int orderId = -1, MasterNodePortCategory category = MasterNodePortCategory.Fragment, int uniquePortId = -1 ) { InputPort port = new InputPort( m_uniqueId, ( uniquePortId < 0 ? m_inputPorts.Count : uniquePortId ), type, name, typeLocked, ( orderId >= 0 ? orderId : m_inputPorts.Count ), category ); m_inputPorts.Insert( idx, port ); m_inputPortsDict.Add( port.PortId, port ); SetSaveIsDirty(); m_sizeIsDirty = true; RecalculateInputPortIdx(); return port; } public void AddOutputPort( WirePortDataType type, string name, int uniquePortId = -1 ) { m_outputPorts.Add( new OutputPort( this, m_uniqueId, ( uniquePortId < 0 ? m_outputPorts.Count : uniquePortId ), type, name ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); SetSaveIsDirty(); m_sizeIsDirty = true; } public void AddOutputPortAt( int idx, WirePortDataType type, string name, int uniquePortId = -1 ) { OutputPort port = new OutputPort( this, m_uniqueId, ( uniquePortId < 0 ? m_outputPorts.Count : uniquePortId ), type, name ); m_outputPorts.Insert( idx, port ); m_outputPortsDict.Add( port.PortId, port ); SetSaveIsDirty(); m_sizeIsDirty = true; } public void AddOutputVectorPorts( WirePortDataType type, string name ) { m_sizeIsDirty = true; m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, type, name ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); switch( type ) { case WirePortDataType.FLOAT2: { m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "X" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "Y" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); } break; case WirePortDataType.FLOAT3: { m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "X" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "Y" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "Z" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); } break; case WirePortDataType.FLOAT4: { m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "X" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "Y" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "Z" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "W" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); } break; } SetSaveIsDirty(); } public void SetPreviewDirtyFromOutputs() { PreviewIsDirty = true; } public string GetOutputVectorItem( int vectorPortId, int currentPortId, string result ) { if( m_outputPorts[ 0 ].DataType == WirePortDataType.COLOR ) { switch( currentPortId - vectorPortId ) { case 1: result += ".r"; break; case 2: result += ".g"; break; case 3: result += ".b"; break; case 4: result += ".a"; break; } } else { switch( currentPortId - vectorPortId ) { case 1: result += ".x"; break; case 2: result += ".y"; break; case 3: result += ".z"; break; case 4: result += ".w"; break; } } return result; } public void AddOutputColorPorts( string name, bool addAlpha = true ) { m_sizeIsDirty = true; //Main port m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, addAlpha ? WirePortDataType.COLOR : WirePortDataType.FLOAT3, name ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); //Color components port m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "R" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts[ m_outputPorts.Count - 1 ].CustomColor = Color.red; m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "G" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts[ m_outputPorts.Count - 1 ].CustomColor = Color.green; m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "B" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts[ m_outputPorts.Count - 1 ].CustomColor = Color.blue; if( addAlpha ) { m_outputPorts.Add( new OutputPort( this, m_uniqueId, m_outputPorts.Count, WirePortDataType.FLOAT, "A" ) ); m_outputPortsDict.Add( m_outputPorts[ m_outputPorts.Count - 1 ].PortId, m_outputPorts[ m_outputPorts.Count - 1 ] ); m_outputPorts[ m_outputPorts.Count - 1 ].CustomColor = Color.white; } } public void ConvertFromVectorToColorPorts() { m_outputPorts[ 0 ].ChangeType( WirePortDataType.COLOR, false ); m_outputPorts[ 1 ].Name = "R"; m_outputPorts[ 1 ].CustomColor = Color.red; m_outputPorts[ 2 ].Name = "G"; m_outputPorts[ 2 ].CustomColor = Color.green; m_outputPorts[ 3 ].Name = "B"; m_outputPorts[ 3 ].CustomColor = Color.blue; m_outputPorts[ 4 ].Name = "A"; m_outputPorts[ 4 ].CustomColor = Color.white; } public string GetOutputColorItem( int vectorPortId, int currentPortId, string result ) { switch( currentPortId - vectorPortId ) { case 1: result += ".r"; break; case 2: result += ".g"; break; case 3: result += ".b"; break; case 4: result += ".a"; break; } return result; } public void ChangeOutputType( WirePortDataType type, bool invalidateConnections ) { int outputCount = m_outputPorts.Count; for( int i = 0; i < outputCount; i++ ) { m_outputPorts[ i ].ChangeType( type, invalidateConnections ); } } public void ChangeInputType( WirePortDataType type, bool invalidateConnections ) { int inputCount = m_inputPorts.Count; for( int i = 0; i < inputCount; i++ ) { m_inputPorts[ i ].ChangeType( type, invalidateConnections ); } } public void ChangeOutputProperties( int outputID, string newName, WirePortDataType newType, bool invalidateConnections = true ) { if( outputID < m_outputPorts.Count ) { m_outputPorts[ outputID ].ChangeProperties( newName, newType, invalidateConnections ); IsDirty = true; m_sizeIsDirty = true; SetSaveIsDirty(); } } public void ChangeOutputName( int outputArrayIdx, string newName ) { if( outputArrayIdx < m_outputPorts.Count ) { m_outputPorts[ outputArrayIdx ].Name = newName; IsDirty = true; m_sizeIsDirty = true; } } public InputPort CheckInputPortAt( Vector3 pos ) { int count = m_inputPorts.Count; for( int i = 0; i < count; i++ ) { if( m_inputPorts[ i ].InsideActiveArea( pos ) ) return m_inputPorts[ i ]; } return null; } public InputPort GetFirstInputPortOfType( WirePortDataType dataType, bool countObjectTypeAsValid ) { int count = m_inputPorts.Count; for( int i = 0; i < count; i++ ) { if( ( m_inputPorts[ i ].CheckValidType( dataType ) ) || ( countObjectTypeAsValid && m_inputPorts[ i ].DataType == WirePortDataType.OBJECT ) ) return m_inputPorts[ i ]; } return null; } public OutputPort CheckOutputPortAt( Vector3 pos ) { int count = m_outputPorts.Count; for( int i = 0; i < count; i++ ) { if( m_outputPorts[ i ].InsideActiveArea( pos ) ) return m_outputPorts[ i ]; } return null; } public OutputPort GetFirstOutputPortOfType( WirePortDataType dataType, bool checkForCasts ) { int count = m_outputPorts.Count; for( int i = 0; i < count; i++ ) { if( ( m_outputPorts[ i ].CheckValidType( dataType ) ) || ( checkForCasts && UIUtils.CanCast( dataType, m_outputPorts[ i ].DataType ) ) ) return m_outputPorts[ i ]; } return null; } virtual protected void ChangeSizeFinished() { m_firstPreviewDraw = true; /*MarkForPreviewUpdate();*/ } protected void ChangeSize() { m_cachedPos = m_position; //UIUtils.ResetMainSkin(); Vector2 inSize = Vector2.zero; int inputCount = 0; int inputSize = m_inputPorts.Count; for( int i = 0; i < inputSize; i++ ) { if( m_inputPorts[ i ].Visible ) { if( m_inputPorts[ i ].DirtyLabelSize || m_inputPorts[ i ].LabelSize == Vector2.zero ) { m_inputPorts[ i ].DirtyLabelSize = false; m_sizeContentAux.text = m_inputPorts[ i ].Name; m_inputPorts[ i ].UnscaledLabelSize = UIUtils.UnZoomedInputPortStyle.CalcSize( m_sizeContentAux ); } inSize.x = Mathf.Max( inSize.x, m_inputPorts[ i ].UnscaledLabelSize.x ); inSize.y = Mathf.Max( inSize.y, m_inputPorts[ i ].UnscaledLabelSize.y ); inputCount += 1; } } if( inSize.x > 0 ) inSize.x += UIUtils.PortsSize.x + Constants.PORT_TO_LABEL_SPACE_X * 2; inSize.x += m_marginPreviewLeft; inSize.y = Mathf.Max( inSize.y, UIUtils.PortsSize.y ); Vector2 outSize = Vector2.zero; int outputCount = 0; int outputSize = m_outputPorts.Count; for( int i = 0; i < outputSize; i++ ) { if( m_outputPorts[ i ].Visible ) { if( m_outputPorts[ i ].DirtyLabelSize || m_outputPorts[ i ].LabelSize == Vector2.zero ) { m_outputPorts[ i ].DirtyLabelSize = false; m_sizeContentAux.text = m_outputPorts[ i ].Name; m_outputPorts[ i ].UnscaledLabelSize = UIUtils.UnZoomedOutputPortPortStyle.CalcSize( m_sizeContentAux ); } outSize.x = Mathf.Max( outSize.x, m_outputPorts[ i ].UnscaledLabelSize.x ); outSize.y = Mathf.Max( outSize.y, m_outputPorts[ i ].UnscaledLabelSize.y ); outputCount += 1; } } if( outSize.x > 0 ) outSize.x += UIUtils.PortsSize.x + Constants.PORT_TO_LABEL_SPACE_X * 2; outSize.y = Mathf.Max( outSize.y, UIUtils.PortsSize.y ); if( m_additionalContent.text.Length > 0 ) { m_extraHeaderHeight = (int)Constants.NODE_HEADER_EXTRA_HEIGHT; m_hasSubtitle = true && m_showSubtitle; } else { m_extraHeaderHeight = 0; m_hasSubtitle = false; } float headerWidth = Mathf.Max( UIUtils.UnZoomedNodeTitleStyle.CalcSize( m_content ).x + m_paddingTitleLeft + m_paddingTitleRight, UIUtils.UnZoomedPropertyValuesTitleStyle.CalcSize( m_additionalContent ).x + m_paddingTitleLeft + m_paddingTitleRight ); m_position.width = Mathf.Max( headerWidth, Mathf.Max( MinInsideBoxWidth, m_insideSize.x ) + inSize.x + outSize.x ) + Constants.NODE_HEADER_LEFTRIGHT_MARGIN * 2; //m_position.width += m_extraSize.x; m_fontHeight = Mathf.Max( inSize.y, outSize.y ); m_position.height = Mathf.Max( inputCount, outputCount ) * ( m_fontHeight + Constants.INPUT_PORT_DELTA_Y );// + Constants.INPUT_PORT_DELTA_Y; m_position.height = Mathf.Max( m_position.height, Mathf.Max( MinInsideBoxHeight, m_insideSize.y ) ); m_position.height += UIUtils.HeaderMaxHeight + m_extraHeaderHeight + Constants.INPUT_PORT_DELTA_Y;// + m_extraSize.y; if( m_showErrorMessage ) m_position.height += 24; m_unpreviewedPosition = m_position; //UIUtils.CurrentWindow.CameraDrawInfo.InvertedZoom = cachedZoom; if( OnNodeChangeSizeEvent != null ) { OnNodeChangeSizeEvent( this ); } ChangeSizeFinished(); } public virtual void Reset() { } public virtual void OnOutputPortConnected( int portId, int otherNodeId, int otherPortId ) { } public virtual void OnInputPortConnected( int portId, int otherNodeId, int otherPortId, bool activateNode = true ) { InputPort port = GetInputPortByUniqueId( portId ); if( activateNode && m_connStatus == NodeConnectionStatus.Connected ) { port.GetOutputNode().ActivateNode( m_activeNode, m_activePort, m_activeType ); } PreviewIsDirty = true; OnNodeChange(); SetSaveIsDirty(); } public virtual void OnInputPortDisconnected( int portId ) { PreviewIsDirty = true; OnNodeChange(); } public virtual void OnOutputPortDisconnected( int portId ) { } public virtual void OnNodeChange() { CheckSpherePreview(); int count = m_outputPorts.Count; for( int i = 0; i < count; i++ ) { if( m_outputPorts[ i ].IsConnected ) { for( int f = 0; f < m_outputPorts[ i ].ExternalReferences.Count; f++ ) { ContainerGraph.GetNode( m_outputPorts[ i ].ExternalReferences[ f ].NodeId ).OnNodeChange(); } } } } public virtual void ActivateNode( int signalGenNodeId, int signalGenPortId, System.Type signalGenNodeType ) { if( m_selfPowered ) return; ConnStatus = m_restrictions.GetRestiction( signalGenNodeType, signalGenPortId ) ? NodeConnectionStatus.Error : NodeConnectionStatus.Connected; m_activeConnections += 1; if( m_activeConnections == 1 ) { m_activeType = signalGenNodeType; m_activeNode = signalGenNodeId; m_activePort = signalGenPortId; for( int i = 0; i < m_inputPorts.Count; i++ ) { if( m_inputPorts[ i ].IsConnected ) { m_inputPorts[ i ].GetOutputNode().ActivateNode( signalGenNodeId, signalGenPortId, signalGenNodeType ); } } } // saveisdirty might be needed, gonna leave this here for now // SetSaveIsDirty(); } public virtual void DeactivateInputPortNode( int deactivatedPort, bool forceComplete ) { GetInputPortByUniqueId( deactivatedPort ).GetOutputNode().DeactivateNode( deactivatedPort, false ); } public virtual void DeactivateNode( int deactivatedPort, bool forceComplete ) { if( m_selfPowered ) return; // saveisdirty might be needed, gonna leave this here for now // SetSaveIsDirty(); m_activeConnections -= 1; if( forceComplete || m_activeConnections <= 0 ) { m_activeConnections = 0; ConnStatus = NodeConnectionStatus.Not_Connected; for( int i = 0; i < m_inputPorts.Count; i++ ) { if( m_inputPorts[ i ].IsConnected ) { ParentNode node = m_inputPorts[ i ].GetOutputNode(); if( node != null ) node.DeactivateNode( deactivatedPort == -1 ? m_inputPorts[ i ].PortId : deactivatedPort, false ); } } } } public Rect GlobalToLocalPosition( DrawInfo drawInfo ) { float width = m_globalPosition.width / drawInfo.InvertedZoom; float height = m_globalPosition.height / drawInfo.InvertedZoom; float x = m_globalPosition.x / drawInfo.InvertedZoom - drawInfo.CameraOffset.x; float y = m_globalPosition.y / drawInfo.InvertedZoom - drawInfo.CameraOffset.y; return new Rect( x, y, width, height ); } protected void CalculatePositionAndVisibility( DrawInfo drawInfo ) { //m_movingInFrame = false; m_globalPosition = m_position; m_globalPosition.x = drawInfo.InvertedZoom * ( m_globalPosition.x + drawInfo.CameraOffset.x ); m_globalPosition.y = drawInfo.InvertedZoom * ( m_globalPosition.y + drawInfo.CameraOffset.y ); m_globalPosition.width *= drawInfo.InvertedZoom; m_globalPosition.height *= drawInfo.InvertedZoom; m_isVisible = ( m_globalPosition.x + m_globalPosition.width > 0 ) && ( m_globalPosition.x < drawInfo.CameraArea.width ) && ( m_globalPosition.y + m_globalPosition.height > 0 ) && ( m_globalPosition.y < drawInfo.CameraArea.height ); if( m_isMoving > 0 && drawInfo.CurrentEventType != EventType.MouseDrag ) { float deltaX = Mathf.Abs( m_lastPosition.x - m_position.x ); float deltaY = Mathf.Abs( m_lastPosition.y - m_position.y ); if( deltaX < 0.01f && deltaY < 0.01f ) { m_isMoving -= 1; if( m_isMoving == 0 ) { OnSelfStoppedMovingEvent(); } } else { m_isMoving = MoveCountBuffer; } m_lastPosition = m_position; } } public void FireStoppedMovingEvent( bool testOnlySelected, InteractionMode interactionMode ) { if( OnNodeStoppedMovingEvent != null ) OnNodeStoppedMovingEvent( this, testOnlySelected, interactionMode ); } public virtual void OnSelfStoppedMovingEvent() { FireStoppedMovingEvent( true, m_defaultInteractionMode ); } protected void DrawPrecisionProperty( bool withInherit = true ) { if( withInherit ) m_currentPrecisionType = (PrecisionType)EditorGUILayoutPopup( PrecisionContent.text, (int)m_currentPrecisionType, PrecisionLabelsExtra ); else m_currentPrecisionType = (PrecisionType)EditorGUILayoutPopup( PrecisionContent.text, (int)m_currentPrecisionType, PrecisionLabels ); } public virtual void DrawTitle( Rect titlePos ) { if( ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD3 ) { GUI.Label( titlePos, m_content, UIUtils.GetCustomStyle( CustomStyle.NodeTitle ) ); } } public virtual void DrawPreview( DrawInfo drawInfo, Rect rect ) { //if ( !m_drawPreview ) // return; if( m_cachedDrawSphereId == -1 ) m_cachedDrawSphereId = Shader.PropertyToID( "_DrawSphere" ); if( m_cachedInvertedZoomId == -1 ) m_cachedInvertedZoomId = Shader.PropertyToID( "_InvertedZoom" ); m_channelNumber = 0; Vector4 mask = Vector4.one; if( m_outputPorts.Count > 0 ) { switch( m_outputPorts[ 0 ].DataType ) { case WirePortDataType.FLOAT: m_channelNumber = 1; mask.Set( 1, 1, 1, 0 ); break; case WirePortDataType.FLOAT2: m_channelNumber = 2; mask.Set( m_previewChannels[ 0 ] ? 1 : 0, m_previewChannels[ 1 ] ? 1 : 0, 1, 0 ); break; case WirePortDataType.COLOR: case WirePortDataType.FLOAT4: case WirePortDataType.SAMPLER1D: case WirePortDataType.SAMPLER2D: case WirePortDataType.SAMPLER3D: case WirePortDataType.SAMPLERCUBE: case WirePortDataType.SAMPLER2DARRAY: m_channelNumber = 4; mask.Set( m_previewChannels[ 0 ] ? 1 : 0, m_previewChannels[ 1 ] ? 1 : 0, m_previewChannels[ 2 ] ? 1 : 0, m_previewChannels[ 3 ] ? 1 : 0 ); break; default: m_channelNumber = 3; mask.Set( m_previewChannels[ 0 ] ? 1 : 0, m_previewChannels[ 1 ] ? 1 : 0, m_previewChannels[ 2 ] ? 1 : 0, 0 ); break; } } UIUtils.LinearMaterial.SetFloat( m_cachedDrawSphereId, ( SpherePreview ? 1 : 0 ) ); UIUtils.LinearMaterial.SetFloat( m_cachedInvertedZoomId, drawInfo.InvertedZoom ); UIUtils.LinearMaterial.SetVector( "_Mask", mask ); bool cached = GL.sRGBWrite; GL.sRGBWrite = true; //EditorGUI.DrawPreviewTexture( rect, PreviewTexture, UIUtils.LinearMaterial ); int pass = 0; if( SpherePreview ) { if( mask.w == 1 ) pass = 3; else pass = 1; } else if( mask.w == 1 ) pass = 2; Graphics.DrawTexture( rect, PreviewTexture, UIUtils.LinearMaterial, pass ); GL.sRGBWrite = cached; //Preview buttons if( m_drawPreviewMaskButtons ) DrawPreviewMaskButtonsRepaint( drawInfo, rect ); } protected void DrawPreviewMaskButtonsLayout( DrawInfo drawInfo, Rect rect ) { if( rect.Contains( drawInfo.MousePosition ) && m_channelNumber > 1 && ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD2 ) { Rect buttonRect = rect; buttonRect.height = 14 * drawInfo.InvertedZoom; buttonRect.y = rect.yMax - buttonRect.height; buttonRect.width = 14 * drawInfo.InvertedZoom; if( m_channelNumber == 2 ) { m_previewChannels[ 0 ] = GUI.Toggle( buttonRect, m_previewChannels[ 0 ], string.Empty, GUIStyle.none ); buttonRect.x += 14 * drawInfo.InvertedZoom; m_previewChannels[ 1 ] = GUI.Toggle( buttonRect, m_previewChannels[ 1 ], string.Empty, GUIStyle.none ); } else if( m_channelNumber == 3 ) { m_previewChannels[ 0 ] = GUI.Toggle( buttonRect, m_previewChannels[ 0 ], string.Empty, GUIStyle.none ); buttonRect.x += 14 * drawInfo.InvertedZoom; m_previewChannels[ 1 ] = GUI.Toggle( buttonRect, m_previewChannels[ 1 ], string.Empty, GUIStyle.none ); buttonRect.x += 14 * drawInfo.InvertedZoom; m_previewChannels[ 2 ] = GUI.Toggle( buttonRect, m_previewChannels[ 2 ], string.Empty, GUIStyle.none ); } else if( m_channelNumber == 4 ) { m_previewChannels[ 0 ] = GUI.Toggle( buttonRect, m_previewChannels[ 0 ], string.Empty, GUIStyle.none ); buttonRect.x += 14 * drawInfo.InvertedZoom; m_previewChannels[ 1 ] = GUI.Toggle( buttonRect, m_previewChannels[ 1 ], string.Empty, GUIStyle.none ); buttonRect.x += 14 * drawInfo.InvertedZoom; m_previewChannels[ 2 ] = GUI.Toggle( buttonRect, m_previewChannels[ 2 ], string.Empty, GUIStyle.none ); buttonRect.x += 14 * drawInfo.InvertedZoom; m_previewChannels[ 3 ] = GUI.Toggle( buttonRect, m_previewChannels[ 3 ], string.Empty, GUIStyle.none ); } } } protected void DrawPreviewMaskButtonsRepaint( DrawInfo drawInfo, Rect rect ) { if( drawInfo.CurrentEventType == EventType.Repaint && ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD2 && rect.Contains( drawInfo.MousePosition ) && m_channelNumber > 1 ) { Rect buttonRect = rect; buttonRect.height = 14 * drawInfo.InvertedZoom; buttonRect.y = rect.yMax - buttonRect.height; buttonRect.width = 14 * drawInfo.InvertedZoom; if( m_channelNumber == 2 ) { UIUtils.MiniButtonTopMid.Draw( buttonRect, "R", false, false, m_previewChannels[ 0 ], false ); buttonRect.x += 14 * drawInfo.InvertedZoom; UIUtils.MiniButtonTopRight.Draw( buttonRect, "G", false, false, m_previewChannels[ 1 ], false ); } else if( m_channelNumber == 3 ) { UIUtils.MiniButtonTopMid.Draw( buttonRect, "R", false, false, m_previewChannels[ 0 ], false ); buttonRect.x += 14 * drawInfo.InvertedZoom; UIUtils.MiniButtonTopMid.Draw( buttonRect, "G", false, false, m_previewChannels[ 1 ], false ); buttonRect.x += 14 * drawInfo.InvertedZoom; UIUtils.MiniButtonTopRight.Draw( buttonRect, "B", false, false, m_previewChannels[ 2 ], false ); } else if( m_channelNumber == 4 ) { UIUtils.MiniButtonTopMid.Draw( buttonRect, "R", false, false, m_previewChannels[ 0 ], false ); buttonRect.x += 14 * drawInfo.InvertedZoom; UIUtils.MiniButtonTopMid.Draw( buttonRect, "G", false, false, m_previewChannels[ 1 ], false ); buttonRect.x += 14 * drawInfo.InvertedZoom; UIUtils.MiniButtonTopMid.Draw( buttonRect, "B", false, false, m_previewChannels[ 2 ], false ); buttonRect.x += 14 * drawInfo.InvertedZoom; UIUtils.MiniButtonTopRight.Draw( buttonRect, "A", false, false, m_previewChannels[ 3 ], false ); } } } public void SetTimedUpdate( double timerInterval ) { m_timedUpdateInitialValue = EditorApplication.timeSinceStartup; m_timedUpdateInterval = timerInterval; m_fireTimedUpdateRequest = true; } public virtual void FireTimedUpdate() { } /// <summary> /// /// </summary> /// <param name="drawInfo"></param> public virtual void OnNodeLogicUpdate( DrawInfo drawInfo ) { if( m_fireTimedUpdateRequest && ( EditorApplication.timeSinceStartup - m_timedUpdateInitialValue ) > m_timedUpdateInterval ) { m_fireTimedUpdateRequest = false; FireTimedUpdate(); } if( m_repopulateDictionaries ) { m_repopulateDictionaries = false; m_inputPortsDict.Clear(); int inputCount = m_inputPorts.Count; for( int i = 0; i < inputCount; i++ ) { m_inputPortsDict.Add( m_inputPorts[ i ].PortId, m_inputPorts[ i ] ); } m_outputPortsDict.Clear(); int outputCount = m_outputPorts.Count; for( int i = 0; i < outputCount; i++ ) { m_outputPortsDict.Add( m_outputPorts[ i ].PortId, m_outputPorts[ i ] ); } } } /// <summary> /// This method should only be called to calculate layouts of elements to be draw later, only runs once per frame and before wires are drawn /// </summary> /// <param name="drawInfo"></param> public virtual void OnNodeLayout( DrawInfo drawInfo ) { if( ContainerGraph.ChangedLightingModel ) { m_sizeIsDirty = true; m_firstPreviewDraw = true; } if( m_firstDraw ) { m_firstDraw = false; AfterCommonInit(); OnNodeChange(); } if( m_previousErrorMessage != m_showErrorMessage ) { m_sizeIsDirty = true; } if( m_sizeIsDirty ) { m_sizeIsDirty = false; ChangeSize(); } CalculatePositionAndVisibility( drawInfo ); m_unscaledRemainingBox = m_position; m_remainingBox = m_globalPosition; m_lastInputBottomRight = m_position.position; m_lastOutputBottomLeft = m_position.position; m_lastOutputBottomLeft.x += m_position.width; m_visibleInputs = 0; m_visibleOutputs = 0; if( m_hasSubtitle ) m_extraHeaderHeight = (int)Constants.NODE_HEADER_EXTRA_HEIGHT; else m_extraHeaderHeight = 0; m_lastInputBottomRight.y += UIUtils.HeaderMaxHeight + m_extraHeaderHeight; m_lastOutputBottomLeft.y += UIUtils.HeaderMaxHeight + m_extraHeaderHeight; m_unscaledRemainingBox.y += UIUtils.HeaderMaxHeight + m_extraHeaderHeight; if( m_isVisible ) { // Header m_headerPosition = m_globalPosition; m_headerPosition.height = UIUtils.CurrentHeaderHeight + m_extraHeaderHeight * drawInfo.InvertedZoom; // Title m_titlePos = m_globalPosition; m_titlePos.height = m_headerPosition.height; if( m_hasSubtitle ) m_titlePos.yMin += ( 4 * drawInfo.InvertedZoom ); else m_titlePos.yMin += ( 7 * drawInfo.InvertedZoom ); m_titlePos.width -= ( m_paddingTitleLeft + m_paddingTitleRight ) * drawInfo.InvertedZoom; m_titlePos.x += m_paddingTitleLeft * drawInfo.InvertedZoom; // Additional Title if( m_hasSubtitle ) { m_addTitlePos = m_titlePos; m_addTitlePos.y = m_globalPosition.y; m_addTitlePos.yMin += ( 19 * drawInfo.InvertedZoom ); } // Left Dropdown if( m_hasLeftDropdown && ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD4 ) { m_dropdownRect = m_headerPosition; m_dropdownRect.width = Constants.NodeButtonSizeX * drawInfo.InvertedZoom; m_dropdownRect.x = m_globalPosition.x + ( Constants.IconsLeftRightMargin + 1 ) * drawInfo.InvertedZoom; m_dropdownRect.height = Constants.NodeButtonSizeY * drawInfo.InvertedZoom; m_dropdownRect.y = m_globalPosition.y + m_headerPosition.height * 0.5f - 14 * drawInfo.InvertedZoom * 0.5f; } // Expander if( m_drawPreviewExpander && ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD4 ) { m_expandRect = m_globalPosition; m_expandRect.width = Constants.PreviewExpanderWidth * drawInfo.InvertedZoom; m_expandRect.x = m_globalPosition.x + m_globalPosition.width - ( Constants.IconsLeftRightMargin + Constants.PreviewExpanderWidth ) * drawInfo.InvertedZoom; //titlePos.x + titlePos.width; m_expandRect.height = Constants.PreviewExpanderHeight * drawInfo.InvertedZoom; m_expandRect.y = m_globalPosition.y + m_headerPosition.height * 0.5f - Constants.PreviewExpanderHeight * drawInfo.InvertedZoom * 0.5f; } } if( m_anchorAdjust < 0 ) { m_anchorAdjust = UIUtils.GetCustomStyle( CustomStyle.PortEmptyIcon ).normal.background.width; } m_unscaledRemainingBox.y += Constants.INPUT_PORT_DELTA_Y; m_lastOutputBottomLeft.y += Constants.INPUT_PORT_DELTA_Y; m_lastInputBottomRight.y += Constants.INPUT_PORT_DELTA_Y; // Input Ports { m_currInputPortPos = m_globalPosition; m_currInputPortPos.width = drawInfo.InvertedZoom * UIUtils.PortsSize.x; m_currInputPortPos.height = drawInfo.InvertedZoom * UIUtils.PortsSize.y; m_currInputPortPos.x += drawInfo.InvertedZoom * Constants.PORT_INITIAL_X; m_currInputPortPos.y += drawInfo.InvertedZoom * Constants.PORT_INITIAL_Y + m_extraHeaderHeight * drawInfo.InvertedZoom; int inputCount = m_inputPorts.Count; float initialX = m_lastInputBottomRight.x; for( int i = 0; i < inputCount; i++ ) { if( m_inputPorts[ i ].Visible ) { m_visibleInputs++; // Button m_inputPorts[ i ].Position = m_currInputPortPos; // Label m_inputPorts[ i ].LabelPosition = m_currInputPortPos; float deltaX = 1f * drawInfo.InvertedZoom * ( UIUtils.PortsSize.x + Constants.PORT_TO_LABEL_SPACE_X ); m_auxRect = m_inputPorts[ i ].LabelPosition; m_auxRect.x += deltaX; m_inputPorts[ i ].LabelPosition = m_auxRect; //if( m_inputPorts[ i ].DirtyLabelSize || m_inputPorts[ i ].LabelSize == Vector2.zero ) //{ // m_inputPorts[ i ].DirtyLabelSize = false; // m_sizeContentAux.text = m_inputPorts[ i ].Name; // m_inputPorts[ i ].UnscaledLabelSize = UIUtils.UnZoomedInputPortStyle.CalcSize( m_sizeContentAux ); //} m_inputPorts[ i ].LabelSize = m_inputPorts[ i ].UnscaledLabelSize * drawInfo.InvertedZoom; m_lastInputBottomRight.x = Mathf.Max( m_lastInputBottomRight.x, initialX + m_inputPorts[ i ].UnscaledLabelSize.x + Constants.PORT_INITIAL_X + Constants.PORT_TO_LABEL_SPACE_X + UIUtils.PortsSize.x ); if( !m_inputPorts[ i ].Locked ) { float overflow = 2; float scaledOverflow = 4 * drawInfo.InvertedZoom; m_auxRect = m_currInputPortPos; m_auxRect.yMin -= scaledOverflow + overflow; m_auxRect.yMax += scaledOverflow + overflow; m_auxRect.xMin -= Constants.PORT_INITIAL_X * drawInfo.InvertedZoom + scaledOverflow + overflow; if( m_containerGraph.ParentWindow.WireReferenceUtils.OutputPortReference.IsValid ) m_auxRect.xMax += m_inputPorts[ i ].LabelSize.x + Constants.PORT_TO_LABEL_SPACE_X * drawInfo.InvertedZoom + scaledOverflow + overflow; else m_auxRect.xMax += Constants.PORT_TO_LABEL_SPACE_X * drawInfo.InvertedZoom + scaledOverflow + overflow; m_inputPorts[ i ].ActivePortArea = m_auxRect; } m_currInputPortPos.y += drawInfo.InvertedZoom * ( m_fontHeight + Constants.INPUT_PORT_DELTA_Y ); //GUI.Label( m_inputPorts[ i ].ActivePortArea, string.Empty, UIUtils.Box ); } } if( m_visibleInputs > 0 ) m_lastInputBottomRight.y += m_fontHeight * m_visibleInputs + Constants.INPUT_PORT_DELTA_Y * ( m_visibleInputs - 1 ); } // Output Ports { m_currOutputPortPos = m_globalPosition; m_currOutputPortPos.width = drawInfo.InvertedZoom * UIUtils.PortsSize.x; m_currOutputPortPos.height = drawInfo.InvertedZoom * UIUtils.PortsSize.y; m_currOutputPortPos.x += ( m_globalPosition.width - drawInfo.InvertedZoom * ( Constants.PORT_INITIAL_X + m_anchorAdjust ) ); m_currOutputPortPos.y += drawInfo.InvertedZoom * Constants.PORT_INITIAL_Y + m_extraHeaderHeight * drawInfo.InvertedZoom; int outputCount = m_outputPorts.Count; float initialX = m_lastOutputBottomLeft.x; for( int i = 0; i < outputCount; i++ ) { if( m_outputPorts[ i ].Visible ) { m_visibleOutputs++; //Button m_outputPorts[ i ].Position = m_currOutputPortPos; // Label m_outputPorts[ i ].LabelPosition = m_currOutputPortPos; float deltaX = 1f * drawInfo.InvertedZoom * ( UIUtils.PortsSize.x + Constants.PORT_TO_LABEL_SPACE_X ); m_auxRect = m_outputPorts[ i ].LabelPosition; m_auxRect.x -= deltaX; m_outputPorts[ i ].LabelPosition = m_auxRect; m_outputPorts[ i ].LabelSize = m_outputPorts[ i ].UnscaledLabelSize * drawInfo.InvertedZoom; m_lastOutputBottomLeft.x = Mathf.Min( m_lastOutputBottomLeft.x, initialX - m_outputPorts[ i ].UnscaledLabelSize.x - Constants.PORT_INITIAL_X - Constants.PORT_TO_LABEL_SPACE_X - UIUtils.PortsSize.x ); if( !m_outputPorts[ i ].Locked ) { float overflow = 2; float scaledOverflow = 4 * drawInfo.InvertedZoom; m_auxRect = m_currOutputPortPos; m_auxRect.yMin -= scaledOverflow + overflow; m_auxRect.yMax += scaledOverflow + overflow; if( m_containerGraph.ParentWindow.WireReferenceUtils.InputPortReference.IsValid ) m_auxRect.xMin -= m_outputPorts[ i ].LabelSize.x + Constants.PORT_TO_LABEL_SPACE_X * drawInfo.InvertedZoom + scaledOverflow + overflow; else m_auxRect.xMin -= Constants.PORT_TO_LABEL_SPACE_X * drawInfo.InvertedZoom + scaledOverflow + overflow; m_auxRect.xMax += Constants.PORT_INITIAL_X * drawInfo.InvertedZoom + scaledOverflow + overflow; m_outputPorts[ i ].ActivePortArea = m_auxRect; } m_currOutputPortPos.y += drawInfo.InvertedZoom * ( m_fontHeight + Constants.INPUT_PORT_DELTA_Y ); //GUI.Label( m_outputPorts[ i ].ActivePortArea, string.Empty, UIUtils.Box ); } } if( m_visibleOutputs > 0 ) m_lastOutputBottomLeft.y += m_fontHeight * m_visibleOutputs + Constants.INPUT_PORT_DELTA_Y * ( m_visibleOutputs - 1 ); } m_lastInputBottomRight.x += m_marginPreviewLeft; //Vector2 scaledLastOutputBottomLeft = ( m_lastOutputBottomLeft + drawInfo.CameraOffset ) * drawInfo.InvertedZoom; //GUI.Label( new Rect( scaledLastOutputBottomLeft, Vector2.one * 2 ), string.Empty, UIUtils.CurrentWindow.CustomStylesInstance.Box ); m_unscaledRemainingBox.xMin = m_lastInputBottomRight.x; //m_unscaledRemainingBox.yMin = m_lastInputBottomRight.y; m_unscaledRemainingBox.xMax = m_lastOutputBottomLeft.x; m_unscaledRemainingBox.yMax = Mathf.Max( m_lastOutputBottomLeft.y, m_lastInputBottomRight.y ); m_remainingBox.position = ( m_unscaledRemainingBox.position + drawInfo.CameraOffset ) * drawInfo.InvertedZoom; m_remainingBox.size = m_unscaledRemainingBox.size * drawInfo.InvertedZoom; //GUI.Label( m_remainingBox, string.Empty, UIUtils.Box ); if( m_visibleInputs == 0 ) { m_remainingBox.x += Constants.PORT_INITIAL_X * drawInfo.InvertedZoom; m_remainingBox.width -= Constants.PORT_INITIAL_X * drawInfo.InvertedZoom; } if( m_visibleOutputs == 0 ) { m_remainingBox.width -= Constants.PORT_INITIAL_X * drawInfo.InvertedZoom; } if( ContainerGraph.ParentWindow.GlobalPreview != m_globalShowPreview ) { m_globalShowPreview = ContainerGraph.ParentWindow.GlobalPreview; m_sizeIsDirty = true; ContainerGraph.ParentWindow.RequestRepaint(); } // Generate Proper Preview Rect float marginAround = 10; float scaledMarginAround = marginAround * drawInfo.InvertedZoom; float previewSize = 128; PreviewLocation m_autoLocation = m_selectedLocation; if( m_selectedLocation == PreviewLocation.Auto ) { if( m_visibleOutputs > m_visibleInputs ) { m_autoLocation = PreviewLocation.Left; } else if( m_visibleOutputs < m_visibleInputs ) { m_autoLocation = PreviewLocation.Right; } else if( m_unscaledRemainingBox.width > previewSize ) { m_autoLocation = PreviewLocation.TopCenter; } else { m_autoLocation = PreviewLocation.BottomCenter; } } if( m_canExpand && ( m_showPreview || m_globalShowPreview ) ) { if( m_autoLocation == PreviewLocation.TopCenter ) { m_unscaledPreviewRect.y = m_unscaledRemainingBox.y; m_unscaledPreviewRect.x = m_unscaledRemainingBox.center.x - 0.5f * ( previewSize + 2 * marginAround ); } else if( m_autoLocation == PreviewLocation.BottomCenter ) { m_unscaledPreviewRect.y = Mathf.Max( m_lastOutputBottomLeft.y, m_lastInputBottomRight.y ); m_unscaledPreviewRect.x = m_position.x + 0.5f * m_position.width - 0.5f * ( previewSize + 2 * marginAround ); } else if( m_autoLocation == PreviewLocation.Left ) { m_unscaledPreviewRect.y = m_lastInputBottomRight.y; m_unscaledPreviewRect.x = m_position.x; } else if( m_autoLocation == PreviewLocation.Right ) { m_unscaledPreviewRect.y = m_lastOutputBottomLeft.y; m_unscaledPreviewRect.x = m_lastInputBottomRight.x; } if( m_autoLocation == PreviewLocation.BottomCenter ) m_unscaledPreviewRect.height = previewSize + 2 * marginAround; else if( m_autoLocation == PreviewLocation.TopCenter ) m_unscaledPreviewRect.height = previewSize + marginAround; else m_unscaledPreviewRect.height = previewSize + ( m_visibleInputs > 0 && m_visibleOutputs > 0 ? 2 * marginAround : marginAround ); m_unscaledPreviewRect.width = previewSize + 2 * marginAround; m_previewRect = m_unscaledPreviewRect; m_previewRect.position = ( m_previewRect.position + drawInfo.CameraOffset ) * drawInfo.InvertedZoom; m_auxVector2.Set( previewSize * drawInfo.InvertedZoom, previewSize * drawInfo.InvertedZoom ); m_previewRect.size = m_auxVector2; if( m_autoLocation == PreviewLocation.BottomCenter ) { m_auxVector2.Set( m_previewRect.position.x + scaledMarginAround, m_previewRect.position.y + scaledMarginAround ); m_previewRect.position = m_auxVector2; } else if( m_autoLocation == PreviewLocation.TopCenter ) { m_auxVector2.Set( m_previewRect.position.x + scaledMarginAround, m_previewRect.position.y ); m_previewRect.position = m_auxVector2; } else { m_previewRect.position += new Vector2( scaledMarginAround, ( m_visibleInputs > 0 && m_visibleOutputs > 0 ? scaledMarginAround : 0 ) ); } } // Adjust node rect after preview if( m_firstPreviewDraw ) { m_firstPreviewDraw = false; ContainerGraph.ParentWindow.RequestRepaint(); if( m_canExpand && ( m_showPreview || m_globalShowPreview ) ) { if( m_autoLocation == PreviewLocation.TopCenter ) { float fillWidth = m_unscaledRemainingBox.width - m_unscaledPreviewRect.width; m_extraSize.x = Mathf.Max( -fillWidth, 0 ); float fillHeight = m_position.yMax - m_unscaledPreviewRect.yMax; m_extraSize.y = Mathf.Max( -fillHeight, 0 ); } if( m_autoLocation == PreviewLocation.BottomCenter ) { float fillWidth = m_position.width - m_unscaledPreviewRect.width; m_extraSize.x = Mathf.Max( -fillWidth, 0 ); float fillHeight = m_position.yMax - m_unscaledPreviewRect.yMax; m_extraSize.y = Mathf.Max( -fillHeight, 0 ); } else if( m_autoLocation == PreviewLocation.Left ) { float fillWidth = m_lastOutputBottomLeft.x - m_unscaledPreviewRect.xMax; m_extraSize.x = Mathf.Max( -fillWidth, 0 ); float fillHeight = m_position.yMax - m_unscaledPreviewRect.yMax; m_extraSize.y = Mathf.Max( -fillHeight, 0 ); } else if( m_autoLocation == PreviewLocation.Right ) { float fillWidth = m_position.xMax - m_unscaledPreviewRect.xMax; m_extraSize.x = Mathf.Max( -fillWidth, 0 ); float fillHeight = m_position.yMax - m_unscaledPreviewRect.yMax; m_extraSize.y = Mathf.Max( -fillHeight, 0 ); } if( m_showErrorMessage ) m_extraSize.y += 24; } else if( m_canExpand ) { m_extraSize.y = 0; m_extraSize.x = 0; } m_position.width = m_unpreviewedPosition.width + m_extraSize.x; m_position.height = m_unpreviewedPosition.height + m_extraSize.y; } if( m_showErrorMessage ) { m_errorBox = m_globalPosition; m_errorBox.y = ( m_globalPosition.yMax - 28 * drawInfo.InvertedZoom ) + 3 * drawInfo.InvertedZoom; m_errorBox.height = 25 * drawInfo.InvertedZoom; } m_previousErrorMessage = m_showErrorMessage; } /// <summary> /// This method should only be called to draw elements, runs once per frame and after wires are drawn /// </summary> /// <param name="drawInfo"></param> public virtual void OnNodeRepaint( DrawInfo drawInfo ) { if( !m_isVisible ) return; m_colorBuffer = GUI.color; // Background GUI.color = m_infiniteLoopDetected ? Constants.InfiniteLoopColor : Constants.NodeBodyColor; if( m_useSquareNodeTitle || ContainerGraph.LodLevel >= ParentGraph.NodeLOD.LOD2 ) GUI.Label( m_globalPosition, string.Empty, UIUtils.NodeWindowOffSquare ); else GUI.Label( m_globalPosition, string.Empty, UIUtils.GetCustomStyle( CustomStyle.NodeWindowOff ) ); // Header //GUI GUI.color = m_headerColor * m_headerColorModifier; if( m_useSquareNodeTitle || ContainerGraph.LodLevel >= ParentGraph.NodeLOD.LOD2 ) GUI.Label( m_headerPosition, string.Empty, UIUtils.NodeHeaderSquare ); else GUI.Label( m_headerPosition, string.Empty, UIUtils.GetCustomStyle( CustomStyle.NodeHeader ) ); GUI.color = m_colorBuffer; // Title DrawTitle( m_titlePos ); // Additional Tile if( m_hasSubtitle && ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD3 ) GUI.Label( m_addTitlePos, m_additionalContent, UIUtils.GetCustomStyle( CustomStyle.PropertyValuesTitle ) ); // Dropdown if( m_hasLeftDropdown && !m_dropdownEditing && ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD4 ) GUI.Label( m_dropdownRect, string.Empty, UIUtils.PropertyPopUp ); // Expander if( m_drawPreviewExpander && ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD4 ) GUI.Label( m_expandRect, string.Empty, ( m_showPreview ? UIUtils.PreviewCollapser : UIUtils.PreviewExpander ) ); // Input Ports int inputCount = m_inputPorts.Count; for( int i = 0; i < inputCount; i++ ) { if( m_inputPorts[ i ].Visible ) { // Input Port Icon if( ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD4 ) { if( m_inputPorts[ i ].Locked ) GUI.color = Constants.LockedPortColor; else if( ContainerGraph.ParentWindow.Options.ColoredPorts ) GUI.color = UIUtils.GetColorForDataType( m_inputPorts[ i ].DataType, false, true ); else GUI.color = m_inputPorts[ i ].HasCustomColor ? m_inputPorts[ i ].CustomColor : UIUtils.GetColorForDataType( m_inputPorts[ i ].DataType, true, true ); GUIStyle style = m_inputPorts[ i ].IsConnected ? UIUtils.GetCustomStyle( CustomStyle.PortFullIcon ) : UIUtils.GetCustomStyle( CustomStyle.PortEmptyIcon ); GUI.Label( m_inputPorts[ i ].Position, string.Empty, style ); GUI.color = m_colorBuffer; } // Input Port Label if( ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD3 ) { if( m_inputPorts[ i ].Locked ) { GUI.color = Constants.PortLockedTextColor; GUI.Label( m_inputPorts[ i ].LabelPosition, m_inputPorts[ i ].Name, UIUtils.InputPortLabel ); GUI.color = m_colorBuffer; } else { if( m_containerGraph.ParentWindow.GlobalShowInternalData && !m_inputPorts[ i ].IsConnected && UIUtils.InternalDataOnPort.fontSize > 1f && ( m_inputPorts[ i ].AutoDrawInternalData || ( m_autoDrawInternalPortData && m_useInternalPortData ) ) && m_inputPorts[ i ].DisplayInternalData.Length > 4 && ( ( m_inputPorts[ i ].DataType >= WirePortDataType.FLOAT && m_inputPorts[ i ].DataType <= WirePortDataType.INT ) || m_inputPorts[ i ].DataType == WirePortDataType.UINT ) ) { GUI.color = Constants.NodeBodyColor/* * new Color( 1f, 1f, 1f, 0.75f )*/; Rect internalBox = m_inputPorts[ i ].LabelPosition; m_sizeContentAux.text = m_inputPorts[ i ].DisplayInternalData; Vector2 portText = UIUtils.InternalDataOnPort.CalcSize( m_sizeContentAux ); internalBox.width = portText.x; internalBox.height = portText.y; internalBox.y = m_inputPorts[ i ].LabelPosition.center.y - internalBox.height * 0.5f; internalBox.x = GlobalPosition.x - internalBox.width - 4 * drawInfo.InvertedZoom - 1; Rect backBox = new Rect( internalBox ); backBox.xMin -= 4 * drawInfo.InvertedZoom; backBox.xMax += 4 * drawInfo.InvertedZoom; backBox.yMin -= 2 * drawInfo.InvertedZoom; backBox.yMax += 2 * drawInfo.InvertedZoom; GUI.Label( backBox, string.Empty, UIUtils.InternalDataBackground ); GUI.color *= new Color( 1f, 1f, 1f, 0.5f ); GUI.Label( internalBox, m_sizeContentAux, UIUtils.InternalDataOnPort ); GUI.color = m_colorBuffer; } GUI.Label( m_inputPorts[ i ].LabelPosition, m_inputPorts[ i ].Name, UIUtils.InputPortLabel ); } } } } // Output Ports int outputCount = m_outputPorts.Count; for( int i = 0; i < outputCount; i++ ) { if( m_outputPorts[ i ].Visible ) { // Output Port Icon if( ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD4 ) { if( m_outputPorts[ i ].Locked ) GUI.color = Constants.LockedPortColor; else if( ContainerGraph.ParentWindow.Options.ColoredPorts ) GUI.color = UIUtils.GetColorForDataType( m_outputPorts[ i ].DataType, false, false ); else GUI.color = m_outputPorts[ i ].HasCustomColor ? m_outputPorts[ i ].CustomColor : UIUtils.GetColorForDataType( m_outputPorts[ i ].DataType, true, false ); GUIStyle style = m_outputPorts[ i ].IsConnected ? UIUtils.GetCustomStyle( CustomStyle.PortFullIcon ) : UIUtils.GetCustomStyle( CustomStyle.PortEmptyIcon ); GUI.Label( m_outputPorts[ i ].Position, string.Empty, style ); GUI.color = m_colorBuffer; } // Output Port Label if( ContainerGraph.LodLevel <= ParentGraph.NodeLOD.LOD3 ) { if( m_outputPorts[ i ].Locked ) { GUI.color = Constants.PortLockedTextColor; GUI.Label( m_outputPorts[ i ].LabelPosition, m_outputPorts[ i ].Name, UIUtils.OutputPortLabel ); GUI.color = m_colorBuffer; } else { GUI.Label( m_outputPorts[ i ].LabelPosition, m_outputPorts[ i ].Name, UIUtils.OutputPortLabel ); } } } } // Preview if( ( m_showPreview || m_globalShowPreview ) && m_drawPreview ) DrawPreview( drawInfo, m_previewRect ); // Error and Warning bottom message if( m_showErrorMessage ) { GUI.color = new Color( 0.0f, 0.0f, 0.0f, 0.5f ); GUI.Label( m_errorBox, string.Empty, UIUtils.Separator ); GUI.color = m_colorBuffer; switch( m_errorMessageTypeIsError ) { default: case NodeMessageType.Error: { m_errorMessage.text = ErrorTitle; m_errorIcon.image = UIUtils.SmallErrorIcon; m_errorCurrentStyle = UIUtils.BoldErrorStyle; } break; case NodeMessageType.Warning: { m_errorMessage.text = WarningTitle; m_errorIcon.image = UIUtils.SmallWarningIcon; m_errorCurrentStyle = UIUtils.BoldWarningStyle; } break; case NodeMessageType.Info: { m_errorMessage.text = InfoTitle; m_errorIcon.image = UIUtils.SmallInfoIcon; m_errorCurrentStyle = UIUtils.BoldInfoStyle; } break; } Rect textBox = m_errorBox; textBox.y += 1 * drawInfo.InvertedZoom; textBox.height = 24 * drawInfo.InvertedZoom; float textWidth = m_errorCurrentStyle.CalcSize( m_errorMessage ).x; GUI.Label( textBox, m_errorMessage, m_errorCurrentStyle ); textBox.x -= textWidth * 0.5f + 12 * drawInfo.InvertedZoom; GUI.Label( textBox, m_errorIcon, m_errorCurrentStyle ); textBox.x += textWidth + 24 * drawInfo.InvertedZoom; GUI.Label( textBox, m_errorIcon, m_errorCurrentStyle ); } // Selection Box if( m_selected ) { GUI.color = Constants.NodeSelectedColor; if( m_useSquareNodeTitle || ContainerGraph.LodLevel >= ParentGraph.NodeLOD.LOD2 ) GUI.Label( m_globalPosition, string.Empty, UIUtils.NodeWindowOnSquare ); else GUI.Label( m_globalPosition, string.Empty, UIUtils.GetCustomStyle( CustomStyle.NodeWindowOn ) ); GUI.color = m_colorBuffer; } // Debug Visualizers //if( FinishPreviewRender || m_continuousPreviewRefresh ) //{ // GUI.color = new Color( 0, 1, 0.5f, 1f ); // Rect r = m_globalPosition; // r.width = 8; // r.height = 8; // r.x -= 5 * drawInfo.InvertedZoom; // r.y -= 5 * drawInfo.InvertedZoom; // GUI.Label( r, string.Empty, UIUtils.GetCustomStyle( CustomStyle.PortFullIcon ) ); // GUI.color = m_colorBuffer; // FinishPreviewRender = false; //} //GUI.Label( m_remainingBox, string.Empty, UIUtils.Box ); } public bool DropdownEditing { get { return m_dropdownEditing; } set { m_dropdownEditing = value; PreviewIsDirty = true; } } public void DisablePreview() { m_showPreview = false; m_sizeIsDirty = true; } /// <summary> /// Handles gui controls, runs before node layout /// </summary> /// <param name="drawInfo"></param> public virtual void DrawGUIControls( DrawInfo drawInfo ) { if( !m_initialized ) return; if( !m_isVisible ) return; if( drawInfo.CurrentEventType == EventType.MouseDown && drawInfo.LeftMouseButtonPressed ) { if( m_expandRect.Contains( drawInfo.MousePosition ) ) { m_showPreview = !m_showPreview; m_sizeIsDirty = true; ContainerGraph.ParentWindow.MouseInteracted = true; } if( m_hasLeftDropdown && m_dropdownRect.Contains( drawInfo.MousePosition ) ) { m_dropdownEditing = true; } else if( m_dropdownEditing ) { DropdownEditing = false; } } DrawGuiPorts( drawInfo ); } //public static bool MyRepeatButton( DrawInfo drawInfo, Rect position, string text, GUIStyle style ) //{ // if(/* drawInfo.CurrentEventType == EventType.MouseDown &&*/ position.Contains( drawInfo.MousePosition ) ) // { // UIUtils.CurrentWindow.MouseInteracted = true; // return true; // } // return false; //} public void DrawGuiPorts( DrawInfo drawInfo ) { if( !m_initialized ) return; if( !m_isVisible ) return; if( drawInfo.CurrentEventType == EventType.MouseDown ) { int inputCount = m_inputPorts.Count; int outputCount = m_outputPorts.Count; for( int i = 0; i < inputCount; i++ ) { if( m_inputPorts[ i ].Visible && !m_inputPorts[ i ].Locked && m_isVisible && m_inputPorts[ i ].ActivePortArea.Contains( drawInfo.MousePosition ) && drawInfo.LeftMouseButtonPressed ) { UIUtils.CurrentWindow.MouseInteracted = true; m_inputPorts[ i ].Click(); // need to put the mouse button on a hot state so it will detect the Mouse Up event correctly on the Editor Window int controlID = GUIUtility.GetControlID( FocusType.Passive ); //int controlID = GUIUtility.GetControlID( "repeatButton".GetHashCode(), FocusType.Passive, m_inputPorts[ i ].ActivePortArea ); GUIUtility.hotControl = controlID; bool saveReference = true; if( m_inputPorts[ i ].IsConnected ) { double doubleTapTime = EditorApplication.timeSinceStartup; bool doubleTap = ( doubleTapTime - m_doubleClickTimestamp ) < DoubleClickTime; m_doubleClickTimestamp = doubleTapTime; if( doubleTap ) { m_containerGraph.DeleteConnection( true, UniqueId, m_inputPorts[ i ].PortId, true, true ); Event.current.Use(); } else //if ( AppyModifierToPort( _inputPorts[ i ], true ) ) //{ //saveReference = false; //} if( !ApplyModifierToPort( m_inputPorts[ i ], true ) ) { UIUtils.ShowContextOnPick = false; PickInput( m_inputPorts[ i ] ); } saveReference = false; } if( saveReference && !m_containerGraph.ParentWindow.WireReferenceUtils.InputPortReference.IsValid ) //if ( !modifierApplied && !UIUtils.InputPortReference.IsValid ) { m_containerGraph.ParentWindow.WireReferenceUtils.SetInputReference( m_uniqueId, m_inputPorts[ i ].PortId, m_inputPorts[ i ].DataType, m_inputPorts[ i ].TypeLocked ); } IsDirty = true; inputCount = m_inputPorts.Count; } } for( int i = 0; i < outputCount; i++ ) { if( m_outputPorts[ i ].Visible && m_outputPorts[ i ].ActivePortArea.Contains( drawInfo.MousePosition ) && drawInfo.LeftMouseButtonPressed ) { UIUtils.CurrentWindow.MouseInteracted = true; m_outputPorts[ i ].Click(); // need to put the mouse button on a hot state so it will detect the Mouse Up event correctly on the Editor Window int controlID = GUIUtility.GetControlID( FocusType.Passive ); //int controlID = GUIUtility.GetControlID( "aseRepeatButton".GetHashCode(), FocusType.Passive, m_outputPorts[ i ].ActivePortArea ); GUIUtility.hotControl = controlID; bool saveReference = true; if( m_outputPorts[ i ].IsConnected ) { if( ApplyModifierToPort( m_outputPorts[ i ], false ) ) { saveReference = false; } } if( saveReference && !m_containerGraph.ParentWindow.WireReferenceUtils.OutputPortReference.IsValid ) { m_containerGraph.ParentWindow.WireReferenceUtils.SetOutputReference( m_uniqueId, m_outputPorts[ i ].PortId, m_outputPorts[ i ].DataType, false ); } IsDirty = true; outputCount = m_outputPorts.Count; } } } //Preview buttons if( m_drawPreviewMaskButtons && ( drawInfo.CurrentEventType == EventType.MouseDown || drawInfo.CurrentEventType == EventType.MouseUp ) ) DrawPreviewMaskButtonsLayout( drawInfo, m_previewRect ); } /// <summary> /// Can be used to draw an entire node, runs after wires /// </summary> /// <param name="drawInfo"></param> public virtual void Draw( DrawInfo drawInfo ) { if( !m_initialized ) return; if( drawInfo.CurrentEventType == EventType.Repaint ) OnNodeRepaint( drawInfo ); } public virtual void SetPreviewInputs() { if( !HasPreviewShader || !m_initialized ) return; int count = m_inputPorts.Count; for( int i = 0; i < count; i++ ) { if( m_inputPorts[ i ].IsConnected && m_inputPorts[ i ].InputNodeHasPreview( ContainerGraph ) ) { m_inputPorts[ i ].SetPreviewInputTexture( ContainerGraph ); } else { m_inputPorts[ i ].SetPreviewInputValue( ContainerGraph ); } } } public bool SafeDraw( DrawInfo drawInfo ) { EditorGUI.BeginChangeCheck(); Draw( drawInfo ); if( EditorGUI.EndChangeCheck() ) { SaveIsDirty = true; return true; } return false; } public bool ShowTooltip( DrawInfo drawInfo ) { if( string.IsNullOrEmpty( m_tooltipText ) ) return false; if( m_globalPosition.Contains( drawInfo.MousePosition ) || m_linkVisibility ) { if( m_tooltipTimestamp + 0.6f < Time.realtimeSinceStartup || m_linkVisibility ) { bool errorTooltip = false; if( m_showErrorMessage && m_errorBox.Contains( drawInfo.MousePosition ) && !string.IsNullOrEmpty( m_errorMessageTooltip ) ) errorTooltip = true; Rect globalTooltipPos = m_globalPosition; GUIContent temp = new GUIContent( errorTooltip ? m_errorMessageTooltip : m_tooltipText ); UIUtils.TooltipBox.wordWrap = false; Vector2 optimal = UIUtils.TooltipBox.CalcSize( temp ); if( optimal.x > 300f ) { UIUtils.TooltipBox.wordWrap = true; optimal.x = 300f; optimal.y = UIUtils.TooltipBox.CalcHeight( temp, 300f ); } globalTooltipPos.width = Mathf.Max( 120, optimal.x ); globalTooltipPos.height = optimal.y; globalTooltipPos.center = m_globalPosition.center; if( !errorTooltip && m_hasTooltipLink ) globalTooltipPos.height += 16; if( errorTooltip ) globalTooltipPos.y = 10 + m_globalPosition.yMax; else globalTooltipPos.y = m_globalPosition.yMin - 10 - globalTooltipPos.height; if ( globalTooltipPos.x < 10 ) globalTooltipPos.x = 10; if( globalTooltipPos.x + globalTooltipPos.width > Screen.width - 10 ) globalTooltipPos.x = Screen.width - globalTooltipPos.width - 10; //UNCOMMENT this for auto adjust tooltip to the top window box //if( globalTooltipPos.y < 40 ) // globalTooltipPos.y = 40; if( errorTooltip && globalTooltipPos.y + globalTooltipPos.height > Screen.height - 32 ) globalTooltipPos.y = Screen.height - 32 - globalTooltipPos.height; GUI.Label( globalTooltipPos, temp, UIUtils.TooltipBox ); if( !errorTooltip && m_hasTooltipLink ) { Rect link = globalTooltipPos; link.y = globalTooltipPos.yMax - 16; link.height = 16; link.width = 86; link.x = globalTooltipPos.center.x - 43; Rect hover = globalTooltipPos; hover.yMax += 15;// m_globalPosition.yMax; m_linkVisibility = hover.Contains( drawInfo.MousePosition ); if( link.Contains( drawInfo.MousePosition ) ) { if( drawInfo.CurrentEventType == EventType.MouseDown ) { if( m_tooltipTimestamp + 1.25f < Time.realtimeSinceStartup ) { Application.OpenURL( Attributes.NodeUrl ); } } else { UIUtils.MainSkin.customStyles[ 52 ].Draw( link, WikiLinkStr, true, false, false, false ); } } else { GUI.Label( link, WikiLinkStr, UIUtils.MainSkin.customStyles[ 52 ] ); } } ContainerGraph.ParentWindow.RequestRepaint(); return true; } } else { if( !m_linkVisibility ) m_tooltipTimestamp = Time.realtimeSinceStartup; } return false; } public virtual bool SafeDrawProperties() { EditorGUI.BeginChangeCheck(); PreDrawProperties(); if( m_autoWrapProperties ) { NodeUtils.DrawPropertyGroup( ref m_propertiesFoldout, Constants.ParameterLabelStr, DrawProperties ); } else { DrawProperties(); } if( EditorGUI.EndChangeCheck() ) { PreviewIsDirty = true; //UIUtils.RecordObject(this); //MarkForPreviewUpdate(); return true; } return false; } public void PreDrawProperties() { if( m_useInternalPortData && m_autoDrawInternalPortData ) { DrawInternalDataGroup(); } } virtual public void DrawProperties() { } protected void DrawInternalDataGroup() { bool drawInternalDataUI = false; int inputCount = m_inputPorts.Count; if( inputCount > 0 ) { for( int i = 0; i < inputCount; i++ ) { if( m_inputPorts[ i ].Available && m_inputPorts[ i ].ValidInternalData && !m_inputPorts[ i ].IsConnected /*&& ( m_inputPorts[ i ].AutoDrawInternalData || ( m_autoDrawInternalPortData && m_useInternalPortData ) )*/ /*&& m_inputPorts[ i ].AutoDrawInternalData*/ ) { drawInternalDataUI = true; break; } } } if( drawInternalDataUI ) NodeUtils.DrawPropertyGroup( ref m_internalDataFoldout, Constants.InternalDataLabelStr, () => { for( int i = 0; i < m_inputPorts.Count; i++ ) { if( m_inputPorts[ i ].ValidInternalData && !m_inputPorts[ i ].IsConnected && m_inputPorts[ i ].Visible /*&& m_inputPorts[ i ].AutoDrawInternalData*/ ) { m_inputPorts[ i ].ShowInternalData( this ); } } } ); } protected void PickInput( InputPort port ) { WireReference connection = port.GetConnection( 0 ); OutputPort from = port.GetOutputConnection( 0 ); m_containerGraph.ParentWindow.WireReferenceUtils.OutputPortReference.SetReference( from.NodeId, from.PortId, from.DataType, connection.TypeLocked ); m_containerGraph.DeleteConnection( true, UniqueId, port.PortId, true, true ); //TODO: check if not necessary Event.current.Use(); IsDirty = true; SetSaveIsDirty(); } protected bool ApplyModifierToPort( WirePort port, bool isInput ) { bool modifierApplied = false; switch( Event.current.modifiers ) { case EventModifiers.Alt: { m_containerGraph.DeleteConnection( isInput, UniqueId, port.PortId, true, true ); modifierApplied = true; m_containerGraph.ParentWindow.InvalidateAlt(); } break; case EventModifiers.Control: { //WireReference connection = port.GetConnection( 0 ); //if ( isInput ) //{ // UIUtils.OutputPortReference.SetReference( connection.NodeId, connection.PortId, connection.DataType, connection.TypeLocked ); //} //else //{ // UIUtils.InputPortReference.SetReference( connection.NodeId, connection.PortId, connection.DataType, connection.TypeLocked ); //} //UIUtils.DeleteConnection( isInput, UniqueId, port.PortId, true ); //modifierApplied = true; if( !isInput ) { WireReference connection = port.GetConnection( 0 ); m_containerGraph.ParentWindow.WireReferenceUtils.InputPortReference.SetReference( connection.NodeId, connection.PortId, connection.DataType, connection.TypeLocked ); m_containerGraph.DeleteConnection( isInput, UniqueId, port.PortId, true, true ); modifierApplied = true; } } break; } if( isInput ) m_containerGraph.ParentWindow.WireReferenceUtils.SwitchPortReference.SetReference( port.NodeId, port.PortId, port.DataType, false ); //always save last connection else m_containerGraph.ParentWindow.WireReferenceUtils.SwitchPortReference.SetReference( -1, -1, WirePortDataType.OBJECT, false ); //invalidate connection if( modifierApplied ) { Event.current.Use(); IsDirty = true; SetSaveIsDirty(); } return modifierApplied; } public void DeleteAllInputConnections( bool alsoDeletePorts , bool inhibitWireNodeAutoDel = false ) { int count = m_inputPorts.Count; for( int i = 0; i < count; i++ ) { if( m_inputPorts[ i ].IsConnected ) { ParentNode connNode = null; if( inhibitWireNodeAutoDel ) { connNode = m_inputPorts[ i ].GetOutputNode(); connNode.Alive = false; } m_containerGraph.DeleteConnection( true, UniqueId, m_inputPorts[ i ].PortId, false, true ); if( inhibitWireNodeAutoDel ) { connNode.Alive = true; } } } if( alsoDeletePorts ) { m_inputPorts.Clear(); m_inputPortsDict.Clear(); } SetSaveIsDirty(); } public void DeleteAllOutputConnections( bool alsoDeletePorts ) { int count = m_outputPorts.Count; for( int i = 0; i < count; i++ ) { if( m_outputPorts[ i ].IsConnected ) m_containerGraph.DeleteConnection( false, UniqueId, m_outputPorts[ i ].PortId, false, true ); } if( alsoDeletePorts ) { m_outputPorts.Clear(); m_outputPortsDict.Clear(); } SetSaveIsDirty(); } public void DeleteInputPortByArrayIdx( int arrayIdx ) { if( arrayIdx >= m_inputPorts.Count ) return; m_containerGraph.DeleteConnection( true, UniqueId, m_inputPorts[ arrayIdx ].PortId, false, true ); m_inputPortsDict.Remove( m_inputPorts[ arrayIdx ].PortId ); m_inputPorts.RemoveAt( arrayIdx ); m_sizeIsDirty = true; SetSaveIsDirty(); RecalculateInputPortIdx(); } public void DeleteOutputPortByArrayIdx( int portIdx ) { if( portIdx >= m_outputPorts.Count ) return; m_containerGraph.DeleteConnection( false, UniqueId, m_outputPorts[ portIdx ].PortId, false, true ); m_outputPortsDict.Remove( m_outputPorts[ portIdx ].PortId ); m_outputPorts.RemoveAt( portIdx ); m_sizeIsDirty = true; } public InputPort GetInputPortByArrayId( int id ) { if( id < m_inputPorts.Count ) return m_inputPorts[ id ]; return null; } public OutputPort GetOutputPortByArrayId( int id ) { if( id < m_outputPorts.Count ) return m_outputPorts[ id ]; return null; } public InputPort GetInputPortByUniqueId( int id ) { if( m_inputPortsDict.ContainsKey( id ) ) return m_inputPortsDict[ id ]; if( m_inputPortsDict.Count != m_inputPorts.Count ) m_repopulateDictionaries = true; int inputCount = m_inputPorts.Count; for( int i = 0; i < inputCount; i++ ) { if( m_inputPorts[ i ].PortId == id ) { return m_inputPorts[ i ]; } } return null; } public OutputPort GetOutputPortByUniqueId( int id ) { if( m_outputPortsDict.ContainsKey( id ) ) return m_outputPortsDict[ id ]; if( m_outputPortsDict.Count != m_outputPorts.Count ) m_repopulateDictionaries = true; int outputCount = m_outputPorts.Count; for( int i = 0; i < outputCount; i++ ) { if( m_outputPorts[ i ].PortId == id ) return m_outputPorts[ i ]; } return null; } public virtual void AfterDuplication(){} public override string ToString() { string dump = ""; dump += ( "Type: " + GetType() ); dump += ( " Unique Id: " + UniqueId + "\n" ); dump += ( " Inputs: \n" ); int inputCount = m_inputPorts.Count; int outputCount = m_outputPorts.Count; for( int inputIdx = 0; inputIdx < inputCount; inputIdx++ ) { dump += ( m_inputPorts[ inputIdx ] + "\n" ); } dump += ( "Outputs: \n" ); for( int outputIdx = 0; outputIdx < outputCount; outputIdx++ ) { dump += ( m_outputPorts[ outputIdx ] + "\n" ); } return dump; } public string GetValueFromOutputStr( int outputId, WirePortDataType inputPortType, ref MasterNodeDataCollector dataCollector, bool ignoreLocalvar ) { if( ignoreLocalvar ) { return GenerateShaderForOutput( outputId, ref dataCollector, ignoreLocalvar ); } OutputPort outPort = GetOutputPortByUniqueId( outputId ); if( outPort.IsLocalValue( dataCollector.PortCategory ) ) { if( outPort.DataType != WirePortDataType.OBJECT && outPort.DataType != inputPortType ) { return UIUtils.CastPortType( ref dataCollector, CurrentPrecisionType, new NodeCastInfo( m_uniqueId, outputId ), null, outPort.DataType, inputPortType, outPort.LocalValue( dataCollector.PortCategory ) ); } else { return outPort.LocalValue( dataCollector.PortCategory ); } } string result = GenerateShaderForOutput( outputId, ref dataCollector, ignoreLocalvar ); result = CreateOutputLocalVariable( outputId, result, ref dataCollector ); if( outPort.DataType != WirePortDataType.OBJECT && outPort.DataType != inputPortType ) { result = UIUtils.CastPortType( ref dataCollector, CurrentPrecisionType, new NodeCastInfo( m_uniqueId, outputId ), null, outPort.DataType, inputPortType, result ); } return result; } public virtual string GenerateShaderForOutput( int outputId, ref MasterNodeDataCollector dataCollector, bool ignoreLocalvar ) { if( dataCollector.IsSRP ) { switch( dataCollector.CurrentSRPType ) { case TemplateSRPType.HD: if(OnHDAction!=null) OnHDAction( outputId, ref dataCollector ); break; case TemplateSRPType.Lightweight:if(OnLightweightAction != null) OnLightweightAction( outputId, ref dataCollector ); break; } } return string.Empty; } public string GenerateValueInVertex( ref MasterNodeDataCollector dataCollector, WirePortDataType dataType, string dataValue, string dataName, bool createInterpolator ) { if( !dataCollector.IsFragmentCategory ) return dataValue; //TEMPLATES if( dataCollector.IsTemplate ) { if( createInterpolator && dataCollector.TemplateDataCollectorInstance.HasCustomInterpolatedData( dataName ) ) return dataName; MasterNodePortCategory category = dataCollector.PortCategory; dataCollector.PortCategory = MasterNodePortCategory.Vertex; dataCollector.PortCategory = category; if( createInterpolator ) { dataCollector.TemplateDataCollectorInstance.RegisterCustomInterpolatedData( dataName, dataType, CurrentPrecisionType, dataValue ); } else { dataCollector.AddToVertexLocalVariables( -1, CurrentPrecisionType, dataType, dataName, dataValue ); } return dataName; } //SURFACE { if( dataCollector.TesselationActive ) { UIUtils.ShowMessage( UniqueId, "Unable to use Vertex to Frag when Tessellation is active" ); return m_outputPorts[ 0 ].ErrorValue; } if( createInterpolator ) dataCollector.AddToInput( UniqueId, dataName, dataType, CurrentPrecisionType ); MasterNodePortCategory portCategory = dataCollector.PortCategory; dataCollector.PortCategory = MasterNodePortCategory.Vertex; if( createInterpolator ) { dataCollector.AddLocalVariable( UniqueId, Constants.VertexShaderOutputStr + "." + dataName, dataValue + ";" ); } else { dataCollector.AddLocalVariable( UniqueId, CurrentPrecisionType, dataType, dataName, dataValue ); } dataCollector.PortCategory = portCategory; return createInterpolator ? Constants.InputVarStr + "." + dataName : dataName; } } public string GenerateInputInVertex( ref MasterNodeDataCollector dataCollector , int inputPortUniqueId , string varName , bool createInterpolator , bool noInterpolationFlag = false , bool sampleFlag = false ) { InputPort inputPort = GetInputPortByUniqueId( inputPortUniqueId ); if( !dataCollector.IsFragmentCategory ) { string value = inputPort.GeneratePortInstructions( ref dataCollector ); dataCollector.AddLocalVariable( -1 , CurrentPrecisionType , inputPort.DataType , varName , value ); return varName; } //TEMPLATES if( dataCollector.IsTemplate ) { if( createInterpolator && dataCollector.TemplateDataCollectorInstance.HasCustomInterpolatedData( varName ) ) return varName; MasterNodePortCategory category = dataCollector.PortCategory; dataCollector.PortCategory = MasterNodePortCategory.Vertex; //bool dirtyVertexVarsBefore = dataCollector.DirtyVertexVariables; //ContainerGraph.ResetNodesLocalVariablesIfNot( this, MasterNodePortCategory.Vertex ); string data = inputPort.GeneratePortInstructions( ref dataCollector ); dataCollector.PortCategory = category; //if( !dirtyVertexVarsBefore && dataCollector.DirtyVertexVariables ) //{ // dataCollector.AddVertexInstruction( dataCollector.VertexLocalVariablesFromList, UniqueId, false ); // dataCollector.ClearVertexLocalVariables(); // ContainerGraph.ResetNodesLocalVariablesIfNot( this, MasterNodePortCategory.Vertex ); //} //ContainerGraph.ResetNodesLocalVariablesIfNot( this, MasterNodePortCategory.Fragment ); if( createInterpolator ) { dataCollector.TemplateDataCollectorInstance.RegisterCustomInterpolatedData( varName, inputPort.DataType, CurrentPrecisionType, data,true,MasterNodePortCategory.Fragment,noInterpolationFlag,sampleFlag ); } else { dataCollector.AddToVertexLocalVariables( -1, CurrentPrecisionType, inputPort.DataType, varName, data ); } return varName; } //SURFACE { if( dataCollector.TesselationActive ) { UIUtils.ShowMessage( UniqueId, "Unable to use Vertex to Frag when Tessellation is active" ); return m_outputPorts[ 0 ].ErrorValue; } if( createInterpolator ) dataCollector.AddToInput( UniqueId, varName, inputPort.DataType, CurrentPrecisionType ); MasterNodePortCategory portCategory = dataCollector.PortCategory; dataCollector.PortCategory = MasterNodePortCategory.Vertex; //bool dirtyVertexVarsBefore = dataCollector.DirtyVertexVariables; //ContainerGraph.ResetNodesLocalVariablesIfNot( this, MasterNodePortCategory.Vertex ); string vertexVarValue = inputPort.GeneratePortInstructions( ref dataCollector ); if( createInterpolator ) { dataCollector.AddLocalVariable( UniqueId, Constants.VertexShaderOutputStr + "." + varName, vertexVarValue + ";" ); } else { dataCollector.AddLocalVariable( UniqueId, CurrentPrecisionType, inputPort.DataType, varName, vertexVarValue ); } dataCollector.PortCategory = portCategory; //if( !dirtyVertexVarsBefore && dataCollector.DirtyVertexVariables ) //{ // dataCollector.AddVertexInstruction( dataCollector.VertexLocalVariables, UniqueId, false ); // dataCollector.ClearVertexLocalVariables(); // ContainerGraph.ResetNodesLocalVariablesIfNot( this, MasterNodePortCategory.Vertex ); //} //ContainerGraph.ResetNodesLocalVariablesIfNot( this, MasterNodePortCategory.Fragment ); return createInterpolator ? Constants.InputVarStr + "." + varName : varName; } } protected virtual void OnUniqueIDAssigned() { } public string CreateOutputLocalVariable( int outputArrayId, string value, ref MasterNodeDataCollector dataCollector ) { OutputPort port = GetOutputPortByUniqueId( outputArrayId ); if( port.IsLocalValue( dataCollector.PortCategory ) ) return port.LocalValue( dataCollector.PortCategory ); if( port.ConnectionCount > 1 ) { RegisterLocalVariable( outputArrayId, value, ref dataCollector ); return port.LocalValue( dataCollector.PortCategory ); } else { // revisit later (break to components case) port.SetLocalValue( value, dataCollector.PortCategory ); } return value; } public void RegisterLocalVariable( int outputArrayId, string value, ref MasterNodeDataCollector dataCollector, string customName = null ) { OutputPort port = GetOutputPortByUniqueId( outputArrayId ); if( (int)port.DataType >= (int)( 1 << 10 ) || port.DataType == WirePortDataType.OBJECT ) //10 is the flag start of sampler types { port.SetLocalValue( value, dataCollector.PortCategory ); return; } bool vertexMode = dataCollector.PortCategory == MasterNodePortCategory.Vertex || dataCollector.PortCategory == MasterNodePortCategory.Tessellation; string localVar = port.ConfigOutputLocalValue( CurrentPrecisionType, value, customName, dataCollector.PortCategory ); if( vertexMode ) { dataCollector.AddToVertexLocalVariables( m_uniqueId, localVar ); } else { dataCollector.AddToFragmentLocalVariables( m_uniqueId, localVar ); } } public void InvalidateConnections() { int inputCount = m_inputPorts.Count; int outputCount = m_outputPorts.Count; for( int i = 0; i < inputCount; i++ ) { m_inputPorts[ i ].InvalidateAllConnections(); } for( int i = 0; i < outputCount; i++ ) { m_outputPorts[ i ].InvalidateAllConnections(); } } public virtual bool OnClick( Vector2 currentMousePos2D ) { bool singleClick = true; if( ( EditorApplication.timeSinceStartup - m_lastTimeSelected ) < NodeClickTime ) { OnNodeDoubleClicked( currentMousePos2D ); singleClick = false; } m_lastTimeSelected = EditorApplication.timeSinceStartup; return singleClick; } public virtual void OnNodeDoubleClicked( Vector2 currentMousePos2D ) { ContainerGraph.ParentWindow.ParametersWindow.IsMaximized = !ContainerGraph.ParentWindow.ParametersWindow.IsMaximized; } public virtual void OnNodeSelected( bool value ) { if( !value ) { if( m_inputPorts != null ) { int count = m_inputPorts.Count; for( int i = 0; i < count; i++ ) { m_inputPorts[ i ].ResetEditing(); } } if( m_outputPorts != null ) { int count = m_outputPorts.Count; for( int i = 0; i < count; i++ ) { m_outputPorts[ i ].ResetEditing(); } } } } public void ResetOutputLocals() { int outputCount = m_outputPorts.Count; for( int i = 0; i < outputCount; i++ ) { m_outputPorts[ i ].ResetLocalValue(); } } public void ResetOutputLocalsIfNot( MasterNodePortCategory category ) { int outputCount = m_outputPorts.Count; for( int i = 0; i < outputCount; i++ ) { //if( !m_outputPorts[ i ].IsLocalOnCategory( category ) ) // m_outputPorts[ i ].ResetLocalValue(); m_outputPorts[ i ].ResetLocalValueIfNot( category ); } } public virtual void Rewire() { } //public virtual List<int> NodeReferences { get { return null; } } public int UniqueId { get { return m_uniqueId; } set { m_uniqueId = value; int inputCount = m_inputPorts.Count; int outputCount = m_outputPorts.Count; for( int inputIdx = 0; inputIdx < inputCount; inputIdx++ ) { m_inputPorts[ inputIdx ].NodeId = value; } for( int outputIdx = 0; outputIdx < outputCount; outputIdx++ ) { m_outputPorts[ outputIdx ].NodeId = value; } OnUniqueIDAssigned(); } } public void SetBaseUniqueId( int uniqueId, bool setOnPorts = false ) { m_uniqueId = uniqueId; if( setOnPorts ) { int inputCount = m_inputPorts.Count; int outputCount = m_outputPorts.Count; for( int inputIdx = 0; inputIdx < inputCount; inputIdx++ ) { m_inputPorts[ inputIdx ].NodeId = uniqueId; } for( int outputIdx = 0; outputIdx < outputCount; outputIdx++ ) { m_outputPorts[ outputIdx ].NodeId = uniqueId; } } } public string OutputId { get { if( ContainerGraph.GraphId > 0 ) return UniqueId + "_g" + ContainerGraph.GraphId; else return UniqueId.ToString(); } } public virtual Rect Position { get { return m_position; } } public Rect TruePosition { get { return m_position; } } public Vector2 CenterPosition { get { return new Vector2( m_position.x + m_position.width * 0.5f, m_position.y + m_position.height * 0.5f ); ; } } public Rect GlobalPosition { get { return m_globalPosition; } } public Vector2 Corner { get { return new Vector2( m_position.x + m_position.width, m_position.y + m_position.height ); } } public Vector2 Vec2Position { get { return new Vector2( m_position.x, m_position.y ); } set { m_position.x = value.x; m_position.y = value.y; } } public Vector3 Vec3Position { get { return new Vector3( m_position.x, m_position.y, 0f ); } set { m_position.x = value.x; m_position.y = value.y; } } public bool Selected { get { return m_selected; } set { m_infiniteLoopDetected = false; m_selected = value; OnNodeSelected( value ); } } public List<InputPort> InputPorts { get { return m_inputPorts; } } public List<OutputPort> OutputPorts { get { return m_outputPorts; } } public bool IsConnected { get { return m_connStatus == NodeConnectionStatus.Connected; } } public NodeConnectionStatus ConnStatus { get { return m_connStatus; } set { if( m_selfPowered ) { m_connStatus = NodeConnectionStatus.Connected; } else { m_connStatus = value; } switch( m_connStatus ) { case NodeConnectionStatus.Island: case NodeConnectionStatus.Not_Connected: m_statusColor = Constants.NodeDefaultColor; break; case NodeConnectionStatus.Connected: m_statusColor = Constants.NodeConnectedColor; break; case NodeConnectionStatus.Error: m_statusColor = Constants.NodeErrorColor; break; } } } public bool SelfPowered { set { m_selfPowered = value; if( value ) { ConnStatus = NodeConnectionStatus.Connected; } } } // This is also called when recording on Undo public virtual void OnBeforeSerialize() { } public virtual void OnAfterDeserialize() { m_selected = false; m_isOnGrid = false; for( int i = 0; i < m_inputPorts.Count; i++ ) { m_inputPorts[ i ].ResetWireReferenceStatus(); } m_repopulateDictionaries = true; m_sizeIsDirty = true; } public virtual int InputIdFromDeprecated( int oldInputId ) { return oldInputId; } public virtual int OutputIdFromDeprecated( int oldOutputId ) { return oldOutputId; } public virtual void ReadFromDeprecated( ref string[] nodeParams, Type oldType = null ) { } //Inherited classes must call this base method in order to setup id and position public virtual void ReadFromString( ref string[] nodeParams ) { ParentReadFromString( ref nodeParams ); } public void ParentReadFromString( ref string[] nodeParams ) { m_currentReadParamIdx = IOUtils.NodeTypeId + 1; UniqueId = Convert.ToInt32( nodeParams[ m_currentReadParamIdx++ ] ); string[] posCoordinates = nodeParams[ m_currentReadParamIdx++ ].Split( IOUtils.VECTOR_SEPARATOR ); m_position.x = Convert.ToSingle( posCoordinates[ 0 ] ); m_position.y = Convert.ToSingle( posCoordinates[ 1 ] ); if( UIUtils.CurrentShaderVersion() > 22 ) { string val = GetCurrentParam( ref nodeParams ); if( m_customPrecision ) { if( val.Equals("Fixed") ) m_currentPrecisionType = PrecisionType.Half; else m_currentPrecisionType = (PrecisionType)Enum.Parse( typeof( PrecisionType ), val ); } else { m_currentPrecisionType = PrecisionType.Inherit; } } if( UIUtils.CurrentShaderVersion() > 5004 ) m_showPreview = Convert.ToBoolean( GetCurrentParam( ref nodeParams ) ); } //should be called after ReadFromString public virtual void ReadInputDataFromString( ref string[] nodeParams ) { int count = 0; if( UIUtils.CurrentShaderVersion() > 7003 ) { try { count = Convert.ToInt32( nodeParams[ m_currentReadParamIdx++ ] ); } catch( Exception e ) { Debug.LogException( e ); } } else { count = ( m_oldInputCount < 0 ) ? m_inputPorts.Count : m_oldInputCount; } for( int i = 0; i < count && i < nodeParams.Length && m_currentReadParamIdx < nodeParams.Length; i++ ) { if( UIUtils.CurrentShaderVersion() < 5003 ) { int newId = VersionConvertInputPortId( i ); if( UIUtils.CurrentShaderVersion() > 23 ) { m_inputPorts[ newId ].DataType = (WirePortDataType)Enum.Parse( typeof( WirePortDataType ), nodeParams[ m_currentReadParamIdx++ ] ); } m_inputPorts[ newId ].InternalData = nodeParams[ m_currentReadParamIdx++ ]; if( m_inputPorts[ newId ].IsEditable && UIUtils.CurrentShaderVersion() >= 3100 && m_currentReadParamIdx < nodeParams.Length ) { m_inputPorts[ newId ].Name = nodeParams[ m_currentReadParamIdx++ ]; } m_inputPorts[ newId ].UpdatePreviewInternalData(); } else { string portIdStr = nodeParams[ m_currentReadParamIdx++ ]; int portId = -1; try { portId = Convert.ToInt32( portIdStr ); } catch( Exception e ) { Debug.LogException( e ); } WirePortDataType DataType = (WirePortDataType)Enum.Parse( typeof( WirePortDataType ), nodeParams[ m_currentReadParamIdx++ ] ); string InternalData = nodeParams[ m_currentReadParamIdx++ ]; bool isEditable = Convert.ToBoolean( nodeParams[ m_currentReadParamIdx++ ] ); string Name = string.Empty; if( isEditable && m_currentReadParamIdx < nodeParams.Length ) { Name = nodeParams[ m_currentReadParamIdx++ ]; } InputPort inputPort = GetInputPortByUniqueId( portId ); if( inputPort != null ) { if( UIUtils.IsValidType( DataType ) ) inputPort.DataType = DataType; inputPort.InternalData = InternalData; if( !string.IsNullOrEmpty( Name ) ) { inputPort.Name = Name; } inputPort.UpdatePreviewInternalData(); } } } } public virtual void ReadOutputDataFromString( ref string[] nodeParams ) { int count = 0; if( UIUtils.CurrentShaderVersion() > 7003 ) { count = Convert.ToInt32( nodeParams[ m_currentReadParamIdx++ ] ); } else { count = m_outputPorts.Count; } for( int i = 0; i < count && i < nodeParams.Length && m_currentReadParamIdx < nodeParams.Length; i++ ) { try { WirePortDataType dataType = (WirePortDataType)Enum.Parse( typeof( WirePortDataType ), nodeParams[ m_currentReadParamIdx++ ] ); int portId = -1; if( UIUtils.CurrentShaderVersion() > 13903 ) { portId = Convert.ToInt32( nodeParams[ m_currentReadParamIdx++ ] ); ; } else { portId = i; } OutputPort port = GetOutputPortByUniqueId( portId ); if( port != null && UIUtils.IsValidType( dataType ) ) { port.DataType = dataType; } } catch( Exception e ) { Debug.LogException( e ); } } } public virtual void ReadAdditionalClipboardData( ref string[] nodeParams ) { } protected string GetCurrentParam( ref string[] nodeParams ) { if( m_currentReadParamIdx < nodeParams.Length ) { return nodeParams[ m_currentReadParamIdx++ ]; } UIUtils.ShowMessage( UniqueId, "Invalid params number in node " + m_uniqueId + " of type " + GetType(), MessageSeverity.Error ); return string.Empty; } protected string GetCurrentParam( int index, ref string[] nodeParams ) { if( m_currentReadParamIdx < nodeParams.Length ) { return nodeParams[ index ]; } UIUtils.ShowMessage( UniqueId, "Invalid params number in node " + m_uniqueId + " of type " + GetType(), MessageSeverity.Error ); return string.Empty; } public virtual void WriteToString( ref string nodeInfo, ref string connectionsInfo ) { IOUtils.AddTypeToString( ref nodeInfo, IOUtils.NodeParam ); IOUtils.AddFieldValueToString( ref nodeInfo, GetType() ); IOUtils.AddFieldValueToString( ref nodeInfo, m_uniqueId ); IOUtils.AddFieldValueToString( ref nodeInfo, ( m_position.x.ToString() + IOUtils.VECTOR_SEPARATOR + m_position.y.ToString() ) ); IOUtils.AddFieldValueToString( ref nodeInfo, m_currentPrecisionType ); IOUtils.AddFieldValueToString( ref nodeInfo, m_showPreview ); for( int i = 0; i < m_inputPorts.Count; i++ ) { m_inputPorts[ i ].WriteToString( ref connectionsInfo ); } } public virtual void WriteInputDataToString( ref string nodeInfo ) { IOUtils.AddFieldValueToString( ref nodeInfo, m_inputPorts.Count ); for( int i = 0; i < m_inputPorts.Count; i++ ) { IOUtils.AddFieldValueToString( ref nodeInfo, m_inputPorts[ i ].PortId ); IOUtils.AddFieldValueToString( ref nodeInfo, m_inputPorts[ i ].DataType ); IOUtils.AddFieldValueToString( ref nodeInfo, m_inputPorts[ i ].InternalData ); IOUtils.AddFieldValueToString( ref nodeInfo, m_inputPorts[ i ].IsEditable ); if( m_inputPorts[ i ].IsEditable ) { IOUtils.AddFieldValueToString( ref nodeInfo, m_inputPorts[ i ].Name ); } } } public void WriteOutputDataToString( ref string nodeInfo ) { IOUtils.AddFieldValueToString( ref nodeInfo, m_outputPorts.Count ); for( int i = 0; i < m_outputPorts.Count; i++ ) { IOUtils.AddFieldValueToString( ref nodeInfo, m_outputPorts[ i ].DataType ); IOUtils.AddFieldValueToString( ref nodeInfo, m_outputPorts[ i ].PortId ); } } public virtual void WriteAdditionalClipboardData( ref string nodeInfo ) { } public virtual string GetIncludes() { return string.Empty; } public virtual void OnObjectDropped( UnityEngine.Object obj ) { } public virtual void SetupFromCastObject( UnityEngine.Object obj ) { } public virtual bool OnNodeInteraction( ParentNode node ) { return false; } public virtual void OnConnectedOutputNodeChanges( int portId, int otherNodeId, int otherPortId, string name, WirePortDataType type ) { } public virtual void OnConnectedInputNodeChanges( int portId, int otherNodeId, int otherPortId, string name, WirePortDataType type ) { } public Rect CachedPos { get { return m_cachedPos; } } public bool IsOnGrid { set { m_isOnGrid = value; } get { return m_isOnGrid; } } public uint CurrentReadParamIdx { get { return m_currentReadParamIdx++; } set { m_currentReadParamIdx = value; } } public Dictionary<string, InputPort> InputPortsDict { get { Dictionary<string, InputPort> dict = new Dictionary<string, InputPort>(); for( int i = 0; i < m_inputPorts.Count; i++ ) { dict.Add( m_inputPorts[ i ].Name, m_inputPorts[ i ] ); } return dict; } } public bool IsDirty { set { m_isDirty = value && UIUtils.DirtyMask; } get { bool value = m_isDirty; m_isDirty = false; return value; } } public virtual void ResetNodeData() { m_category = 0; m_graphDepth = 0; } public virtual void PropagateNodeData( NodeData nodeData, ref MasterNodeDataCollector dataCollector ) { UIUtils.SetCategoryInBitArray( ref m_category, nodeData.Category ); nodeData.GraphDepth += 1; if( nodeData.GraphDepth > m_graphDepth ) { m_graphDepth = nodeData.GraphDepth; } int count = m_inputPorts.Count; for( int i = 0; i < count; i++ ) { if( m_inputPorts[ i ].IsConnected ) { m_inputPorts[ i ].GetOutputNode().PropagateNodeData( nodeData, ref dataCollector ); } } } public void SetTitleTextOnCallback( string compareTitle, Action<ParentNode, string> callback ) { if( !m_previousTitle.Equals( compareTitle ) ) { m_previousTitle = compareTitle; m_sizeIsDirty = true; callback( this, compareTitle ); } } public void SetAdditonalTitleTextOnCallback( string compareTitle, Action<ParentNode, string> callback ) { if( !m_previousAdditonalTitle.Equals( compareTitle ) ) { m_previousAdditonalTitle = compareTitle; m_sizeIsDirty = true; callback( this, compareTitle ); } } public virtual void SetClippedTitle( string newText, int maxSize = 170, string endString = "..." ) { m_content.text = GenerateClippedTitle( newText,maxSize,endString ); m_sizeIsDirty = true; } public virtual void SetClippedAdditionalTitle( string newText, int maxSize = 170, string endString = "..." ) { m_additionalContent.text = GenerateClippedTitle( newText, maxSize, endString ); m_sizeIsDirty = true; } public void SetTitleText( string newText ) { if( !newText.Equals( m_content.text ) ) { m_content.text = newText; m_sizeIsDirty = true; } } public void SetAdditonalTitleText( string newText ) { if( !newText.Equals( m_additionalContent.text ) ) { m_additionalContent.text = newText; m_sizeIsDirty = true; } } public string GenerateErrorValue( int outputIdx = 0 ) { switch( m_outputPorts[ outputIdx ].DataType ) { case WirePortDataType.FLOAT2: { return "(0).xx"; } case WirePortDataType.FLOAT3: { return "(0).xxx"; } case WirePortDataType.FLOAT4: case WirePortDataType.COLOR: { return "(0).xxxx"; } } return "0"; } //Methods created to take into account new ports added on nodes newer versions //This way we can convert connections from previous versions to newer ones and not brake shader graph public virtual int VersionConvertInputPortId( int portId ) { return portId; } public virtual int VersionConvertOutputPortId( int portId ) { return portId; } public virtual string DataToArray { get { return string.Empty; } } public bool SaveIsDirty { set { m_saveIsDirty = value && UIUtils.DirtyMask; } get { bool value = m_saveIsDirty; m_saveIsDirty = false; return value; } } public GUIContent TitleContent { get { return m_content; } } public GUIContent AdditonalTitleContent { get { return m_additionalContent; } } public bool IsVisible { get { return m_isVisible; } } public NodeAttributes Attributes { get { return m_nodeAttribs; } } public bool ReorderLocked { get { return m_reorderLocked; } } public bool RequireMaterialUpdate { get { return m_requireMaterialUpdate; } } public bool RMBIgnore { get { return m_rmbIgnore; } } public float TextLabelWidth { get { return m_textLabelWidth; } } public bool IsMoving { get { return m_isMoving > 0; } } public bool MovingInFrame { get { return m_movingInFrame; } set { m_movingInFrame = value; } } public bool SizeIsDirty { get { return m_sizeIsDirty; } set { m_sizeIsDirty = value; } } public int Category { get { return m_category; } } public int CommentaryParent { get { return m_commentaryParent; } set { m_commentaryParent = value; } } public int Depth { get { return m_depth; } set { m_depth = value; } } public int MatrixId { get { return m_matrixId; } set { m_matrixId = value; } } public float PaddingTitleRight { get { return m_paddingTitleRight; } set { m_paddingTitleRight += value; } } public float PaddingTitleLeft { get { return m_paddingTitleLeft; } set { m_paddingTitleLeft += value; } } public int CachedPortsId { get { return m_cachedPortsId; } } public virtual bool RecursivePreviewUpdate( Dictionary<string,bool> duplicatesDict = null ) { if( duplicatesDict == null ) { duplicatesDict = ContainerGraph.ParentWindow.VisitedChanged; } for( int i = 0; i < InputPorts.Count; i++ ) { ParentNode outNode = null; if( InputPorts[ i ].ExternalReferences.Count > 0 ) { outNode = ContainerGraph.GetNode( InputPorts[ i ].ExternalReferences[ 0 ].NodeId ); } if( outNode != null ) { if( !duplicatesDict.ContainsKey( outNode.OutputId ) ) { bool result = outNode.RecursivePreviewUpdate(); if( result ) PreviewIsDirty = true; } else if( duplicatesDict[ outNode.OutputId ] ) { PreviewIsDirty = true; } } } bool needsUpdate = PreviewIsDirty; RenderNodePreview(); if( !duplicatesDict.ContainsKey( OutputId ) ) duplicatesDict.Add( OutputId, needsUpdate ); return needsUpdate; } public virtual void RenderNodePreview() { //Runs at least one time if( !HasPreviewShader || !m_initialized ) { // nodes with no preview don't update at all PreviewIsDirty = false; return; } if( !PreviewIsDirty && !m_continuousPreviewRefresh ) return; //Debug.Log( "PREVIEW " + this ); SetPreviewInputs(); if( m_cachedMainTexId == -1 ) m_cachedMainTexId = Shader.PropertyToID( "_MainTex" ); if( m_cachedMaskTexId == -1 ) m_cachedMaskTexId = Shader.PropertyToID( "_MaskTex" ); if( m_cachedPortsId == -1 ) m_cachedPortsId = Shader.PropertyToID( "_Ports" ); if( m_cachedPortId == -1 ) m_cachedPortId = Shader.PropertyToID( "_Port" ); int count = m_outputPorts.Count; for( int i = 0; i < count; i++ ) { if( i == 0 ) { RenderTexture temp = RenderTexture.active; RenderTexture beforeMask = RenderTexture.GetTemporary( Constants.PreviewSize , Constants.PreviewSize , 0, Constants.PreviewFormat , RenderTextureReadWrite.Linear ); RenderTexture.active = beforeMask; Graphics.Blit( null, beforeMask, PreviewMaterial, m_previewMaterialPassId ); m_portMask.Set( 0, 0, 0, 0 ); switch( m_outputPorts[ i ].DataType ) { case WirePortDataType.INT: case WirePortDataType.FLOAT: m_portMask.Set( 1, 1, 1, 1 ); break; case WirePortDataType.FLOAT2: m_portMask.Set( 1, 1, 0, 0 ); break; case WirePortDataType.FLOAT3: m_portMask.Set( 1, 1, 1, 0 ); break; case WirePortDataType.COLOR: case WirePortDataType.FLOAT4: m_portMask.Set( 1, 1, 1, 1 ); break; default: m_portMask.Set( 1, 1, 1, 1 ); break; } if( m_outputPorts[ i ].DataType == WirePortDataType.FLOAT3x3 || m_outputPorts[ i ].DataType == WirePortDataType.FLOAT4x4 ) { m_outputPorts[ i ].MaskingMaterial.SetTexture( m_cachedMainTexId, EditorGUIUtility.whiteTexture ); } else { m_outputPorts[ i ].MaskingMaterial.SetTexture( m_cachedMainTexId, beforeMask ); } m_outputPorts[ i ].MaskingMaterial.SetVector( m_cachedPortsId, m_portMask ); if( !Preferences.GlobalDisablePreviews ) { RenderTexture.active = m_outputPorts[ i ].OutputPreviewTexture; Graphics.Blit( null , m_outputPorts[ i ].OutputPreviewTexture , m_outputPorts[ i ].MaskingMaterial , 0 ); RenderTexture.ReleaseTemporary( beforeMask ); RenderTexture.active = temp; } } else { if( !Preferences.GlobalDisablePreviews ) { RenderTexture temp = RenderTexture.active; m_outputPorts[ i ].MaskingMaterial.SetTexture( m_cachedMaskTexId , PreviewTexture ); m_outputPorts[ i ].MaskingMaterial.SetFloat( m_cachedPortId , i ); RenderTexture.active = m_outputPorts[ i ].OutputPreviewTexture; Graphics.Blit( null , m_outputPorts[ i ].OutputPreviewTexture , m_outputPorts[ i ].MaskingMaterial , 1 ); RenderTexture.active = temp; } } } PreviewIsDirty = m_continuousPreviewRefresh; FinishPreviewRender = true; } protected void ShowTab( NodeMessageType type, string tooltip ) { m_showErrorMessage = true; m_errorMessageTypeIsError = type; m_errorMessageTooltip = tooltip; } protected void ShowTab() { m_showErrorMessage = true; } protected void HideTab() { m_showErrorMessage = false; } public virtual RenderTexture PreviewTexture { get { if( m_outputPorts.Count > 0 ) return m_outputPorts[ 0 ].OutputPreviewTexture; else return null; } } public void FullWriteToString( ref string nodesInfo, ref string connectionsInfo ) { WriteToString( ref nodesInfo, ref connectionsInfo ); WriteInputDataToString( ref nodesInfo ); WriteOutputDataToString( ref nodesInfo ); } public void ClipboardFullWriteToString( ref string nodesInfo, ref string connectionsInfo ) { FullWriteToString( ref nodesInfo, ref connectionsInfo ); WriteAdditionalClipboardData( ref nodesInfo ); } public void FullReadFromString( ref string[] parameters ) { try { ReadFromString( ref parameters ); ReadInputDataFromString( ref parameters ); ReadOutputDataFromString( ref parameters ); } catch( Exception e ) { Debug.LogException( e ); } } public void ClipboardFullReadFromString( ref string[] parameters ) { try { FullReadFromString( ref parameters ); ReadAdditionalClipboardData( ref parameters ); } catch( Exception e ) { Debug.LogException( e ); } } public string GenerateClippedTitle( string original , int maxSize = 170, string endString = "..." ) { if( UIUtils.UnZoomedNodeTitleStyle == null ) return original; GUIContent content = new GUIContent( original ); string finalTitle = string.Empty; bool addEllipsis = false; for( int i = 1; i <= original.Length; i++ ) { content.text = original.Substring( 0, i ); Vector2 titleSize = UIUtils.UnZoomedNodeTitleStyle.CalcSize( content ); if( titleSize.x > maxSize ) { addEllipsis = true; break; } else { finalTitle = content.text; } } if( addEllipsis ) finalTitle += endString; return finalTitle; } public virtual void RefreshOnUndo() { } public virtual void CalculateCustomGraphDepth() { } public int GraphDepth { get { return m_graphDepth; } } public PrecisionType CurrentPrecisionType { get { return m_currentPrecisionType == PrecisionType.Inherit ? ContainerGraph.CurrentPrecision : m_currentPrecisionType; } } public Material PreviewMaterial { get { if( m_previewMaterial == null ) { m_previewMaterial = new Material( PreviewShader ); } return m_previewMaterial; } } public Shader PreviewShader { get { if( m_previewShader == null ) { m_previewShader = AssetDatabase.LoadAssetAtPath<Shader>( AssetDatabase.GUIDToAssetPath( m_previewShaderGUID ) ); } if( m_previewShader == null ) { m_previewShader = AssetDatabase.LoadAssetAtPath<Shader>( AssetDatabase.GUIDToAssetPath( "d9ca47581ac157145bff6f72ac5dd73e" ) ); //ranged float guid } if( m_previewShader == null ) m_previewShader = Shader.Find( "Unlit/Colored Transparent" ); return m_previewShader; } } public bool HasPreviewShader { get { return !string.IsNullOrEmpty( m_previewShaderGUID ); } } public void CheckSpherePreview() { bool oneIsSphere = false; if( m_drawPreviewAsSphere ) oneIsSphere = true; int count = m_inputPorts.Count; for( int i = 0; i < count; i++ ) { ParentNode node = null; if( m_inputPorts[ i ].ExternalReferences.Count > 0) { node = ContainerGraph.GetNode( m_inputPorts[ i ].ExternalReferences[ 0 ].NodeId ); } if( node != null && node.SpherePreview ) oneIsSphere = true; } if( m_forceDrawPreviewAsPlane ) oneIsSphere = false; SpherePreview = oneIsSphere; } public bool SpherePreview { get { return m_spherePreview; } set { m_spherePreview = value; } } public bool ShowPreview { get { return m_showPreview; } set { m_showPreview = value; } } public int VisiblePorts { get { return m_visiblePorts; } set { m_visiblePorts = value; } } public bool Docking { get { return m_docking; } set { m_docking = value; } } public bool UseSquareNodeTitle { get { return m_useSquareNodeTitle; } set { m_useSquareNodeTitle = value; } } public bool InsideShaderFunction { get { return ContainerGraph != ContainerGraph.ParentWindow.CurrentGraph; } } public virtual void SetContainerGraph( ParentGraph newgraph ) { m_containerGraph = newgraph; } public virtual void OnMasterNodeReplaced( MasterNode newMasterNode ) { } public virtual void RefreshExternalReferences() { } public Rect DropdownRect { get { return m_dropdownRect; } } public virtual bool Contains( Vector2 pos ) { return m_globalPosition.Contains( pos ); } public virtual bool Contains( Vector3 pos ) { return m_globalPosition.Contains( pos ); } public bool IsNodeBeingCopied { get { return m_isNodeBeingCopied; } set { m_isNodeBeingCopied = value; } } public virtual WirePortDataType GetInputPortVisualDataTypeByArrayIdx( int portArrayIdx ) { return m_inputPorts[ portArrayIdx ].DataType; } public virtual WirePortDataType GetOutputPortVisualDataTypeById( int portId ) { return GetOutputPortByUniqueId( portId ).DataType; } public virtual bool CheckFindText( string text ) { return TitleContent.text.IndexOf( text, StringComparison.CurrentCultureIgnoreCase ) >= 0; } public virtual ParentNode ExecuteStubCode(){ return this; } public virtual float HeightEstimate { get { float heightEstimate = 0; heightEstimate = 32 + Constants.INPUT_PORT_DELTA_Y; for( int i = 0; i < InputPorts.Count; i++ ) { if( InputPorts[ i ].Visible ) heightEstimate += 18 + Constants.INPUT_PORT_DELTA_Y; } return heightEstimate; // Magic number 18 represents m_fontHeight that might not be set yet //return Constants.NODE_HEADER_EXTRA_HEIGHT + Mathf.Max( 18 + m_inputPorts.Count, m_outputPorts.Count ) * Constants.INPUT_PORT_DELTA_Y; } } public bool WasDeprecated { get { return m_wasDeprecated; } set { m_wasDeprecated = value; } } public bool Alive { get { return m_alive;} set { m_alive = value; } } public string TypeName { get { if( m_nodeAttribs != null ) return m_nodeAttribs.Name;return GetType().ToString(); } } public bool PreviewIsDirty { set { m_previewIsDirty = value; } get { return m_previewIsDirty; } } protected bool FinishPreviewRender { get { return m_finishPreviewRender; } set { m_finishPreviewRender = value; } } public virtual bool IsStubNode { get { return false; } } } }
31.92162
473
0.692624
[ "MIT" ]
DANaini13/2022GGJ
Assets/AmplifyShaderEditor/Plugins/Editor/Nodes/ParentNode.cs
121,366
C#
using UnityEngine; using System.Collections; public class Test : MonoBehaviour { void Update() { if(Input.GetKeyDown(KeyCode.T)) GameManager.Instance.test(); } }
16.727273
40
0.673913
[ "MIT" ]
rafedb/unity_singletons
Assets/Scripts/Test.cs
186
C#
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Generic; using System.Collections.Immutable; using Microsoft.CodeAnalysis; namespace Roslyn.Utilities { internal static class Hash { /// <summary> /// This is how VB Anonymous Types combine hash values for fields. /// </summary> internal static int Combine(int newKey, int currentKey) { return unchecked((currentKey * (int)0xA5555529) + newKey); } internal static int Combine(bool newKeyPart, int currentKey) { return Combine(currentKey, newKeyPart ? 1 : 0); } /// <summary> /// This is how VB Anonymous Types combine hash values for fields. /// PERF: Do not use with enum types because that involves multiple /// unnecessary boxing operations. Unfortunately, we can't constrain /// T to "non-enum", so we'll use a more restrictive constraint. /// </summary> internal static int Combine<T>(T newKeyPart, int currentKey) where T : class { int hash = unchecked(currentKey * (int)0xA5555529); if (newKeyPart != null) { return unchecked(hash + newKeyPart.GetHashCode()); } return hash; } internal static int CombineValues<T>(IEnumerable<T> values, int maxItemsToHash = int.MaxValue) { if (values == null) { return 0; } var hashCode = 0; var count = 0; foreach (var value in values) { if (count++ >= maxItemsToHash) { break; } // Should end up with a constrained virtual call to object.GetHashCode (i.e. avoid boxing where possible). if (value != null) { hashCode = Hash.Combine(value.GetHashCode(), hashCode); } } return hashCode; } internal static int CombineValues<T>(T[] values, int maxItemsToHash = int.MaxValue) { if (values == null) { return 0; } var maxSize = Math.Min(maxItemsToHash, values.Length); var hashCode = 0; for (int i = 0; i < maxSize; i++) { T value = values[i]; // Should end up with a constrained virtual call to object.GetHashCode (i.e. avoid boxing where possible). if (value != null) { hashCode = Hash.Combine(value.GetHashCode(), hashCode); } } return hashCode; } internal static int CombineValues<T>(ImmutableArray<T> values, int maxItemsToHash = int.MaxValue) { if (values.IsDefaultOrEmpty) { return 0; } var hashCode = 0; var count = 0; foreach (var value in values) { if (count++ >= maxItemsToHash) { break; } // Should end up with a constrained virtual call to object.GetHashCode (i.e. avoid boxing where possible). if (value != null) { hashCode = Hash.Combine(value.GetHashCode(), hashCode); } } return hashCode; } internal static int CombineValues(IEnumerable<string> values, StringComparer stringComparer, int maxItemsToHash = int.MaxValue) { if (values == null) { return 0; } var hashCode = 0; var count = 0; foreach (var value in values) { if (count++ >= maxItemsToHash) { break; } if (value != null) { hashCode = Hash.Combine(stringComparer.GetHashCode(value), hashCode); } } return hashCode; } /// <summary> /// The offset bias value used in the FNV-1a algorithm /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> internal const int FnvOffsetBias = unchecked((int)2166136261); /// <summary> /// The generative factor used in the FNV-1a algorithm /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> internal const int FnvPrime = 16777619; /// <summary> /// Compute the FNV-1a hash of a sequence of bytes /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> /// <param name="data">The sequence of bytes</param> /// <returns>The FNV-1a hash of <paramref name="data"/></returns> internal static int GetFNVHashCode(byte[] data) { int hashCode = Hash.FnvOffsetBias; for (int i = 0; i < data.Length; i++) { hashCode = unchecked((hashCode ^ data[i]) * Hash.FnvPrime); } return hashCode; } /// <summary> /// Compute the FNV-1a hash of a sequence of bytes and determines if the byte /// sequence is valid ASCII and hence the hash code matches a char sequence /// encoding the same text. /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> /// <param name="data">The sequence of bytes that are likely to be ASCII text.</param> /// <param name="isAscii">True if the sequence contains only characters in the ASCII range.</param> /// <returns>The FNV-1a hash of <paramref name="data"/></returns> internal static int GetFNVHashCode(ReadOnlySpan<byte> data, out bool isAscii) { int hashCode = Hash.FnvOffsetBias; byte asciiMask = 0; for (int i = 0; i < data.Length; i++) { byte b = data[i]; asciiMask |= b; hashCode = unchecked((hashCode ^ b) * Hash.FnvPrime); } isAscii = (asciiMask & 0x80) == 0; return hashCode; } /// <summary> /// Compute the FNV-1a hash of a sequence of bytes /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> /// <param name="data">The sequence of bytes</param> /// <returns>The FNV-1a hash of <paramref name="data"/></returns> internal static int GetFNVHashCode(ImmutableArray<byte> data) { int hashCode = Hash.FnvOffsetBias; for (int i = 0; i < data.Length; i++) { hashCode = unchecked((hashCode ^ data[i]) * Hash.FnvPrime); } return hashCode; } /// <summary> /// Compute the hashcode of a sub-string using FNV-1a /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// Note: FNV-1a was developed and tuned for 8-bit sequences. We're using it here /// for 16-bit Unicode chars on the understanding that the majority of chars will /// fit into 8-bits and, therefore, the algorithm will retain its desirable traits /// for generating hash codes. /// </summary> /// <param name="text">The input string</param> /// <param name="start">The start index of the first character to hash</param> /// <param name="length">The number of characters, beginning with <paramref name="start"/> to hash</param> /// <returns>The FNV-1a hash code of the substring beginning at <paramref name="start"/> and ending after <paramref name="length"/> characters.</returns> internal static int GetFNVHashCode(string text, int start, int length) { int hashCode = Hash.FnvOffsetBias; int end = start + length; for (int i = start; i < end; i++) { hashCode = unchecked((hashCode ^ text[i]) * Hash.FnvPrime); } return hashCode; } internal static int GetCaseInsensitiveFNVHashCode(string text) { return GetCaseInsensitiveFNVHashCode(text, 0, text.Length); } internal static int GetCaseInsensitiveFNVHashCode(string text, int start, int length) { int hashCode = Hash.FnvOffsetBias; int end = start + length; for (int i = start; i < end; i++) { hashCode = unchecked((hashCode ^ CaseInsensitiveComparison.ToLower(text[i])) * Hash.FnvPrime); } return hashCode; } /// <summary> /// Compute the hashcode of a sub-string using FNV-1a /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> /// <param name="text">The input string</param> /// <param name="start">The start index of the first character to hash</param> /// <returns>The FNV-1a hash code of the substring beginning at <paramref name="start"/> and ending at the end of the string.</returns> internal static int GetFNVHashCode(string text, int start) { return GetFNVHashCode(text, start, length: text.Length - start); } /// <summary> /// Compute the hashcode of a string using FNV-1a /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> /// <param name="text">The input string</param> /// <returns>The FNV-1a hash code of <paramref name="text"/></returns> internal static int GetFNVHashCode(string text) { return CombineFNVHash(Hash.FnvOffsetBias, text); } /// <summary> /// Compute the hashcode of a string using FNV-1a /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> /// <param name="text">The input string</param> /// <returns>The FNV-1a hash code of <paramref name="text"/></returns> internal static int GetFNVHashCode(System.Text.StringBuilder text) { int hashCode = Hash.FnvOffsetBias; int end = text.Length; for (int i = 0; i < end; i++) { hashCode = unchecked((hashCode ^ text[i]) * Hash.FnvPrime); } return hashCode; } /// <summary> /// Compute the hashcode of a sub string using FNV-1a /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> /// <param name="text">The input string as a char array</param> /// <param name="start">The start index of the first character to hash</param> /// <param name="length">The number of characters, beginning with <paramref name="start"/> to hash</param> /// <returns>The FNV-1a hash code of the substring beginning at <paramref name="start"/> and ending after <paramref name="length"/> characters.</returns> internal static int GetFNVHashCode(char[] text, int start, int length) { int hashCode = Hash.FnvOffsetBias; int end = start + length; for (int i = start; i < end; i++) { hashCode = unchecked((hashCode ^ text[i]) * Hash.FnvPrime); } return hashCode; } /// <summary> /// Compute the hashcode of a single character using the FNV-1a algorithm /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// Note: In general, this isn't any more useful than "char.GetHashCode". However, /// it may be needed if you need to generate the same hash code as a string or /// substring with just a single character. /// </summary> /// <param name="ch">The character to hash</param> /// <returns>The FNV-1a hash code of the character.</returns> internal static int GetFNVHashCode(char ch) { return Hash.CombineFNVHash(Hash.FnvOffsetBias, ch); } /// <summary> /// Combine a string with an existing FNV-1a hash code /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> /// <param name="hashCode">The accumulated hash code</param> /// <param name="text">The string to combine</param> /// <returns>The result of combining <paramref name="hashCode"/> with <paramref name="text"/> using the FNV-1a algorithm</returns> internal static int CombineFNVHash(int hashCode, string text) { foreach (char ch in text) { hashCode = unchecked((hashCode ^ ch) * Hash.FnvPrime); } return hashCode; } /// <summary> /// Combine a char with an existing FNV-1a hash code /// See http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function /// </summary> /// <param name="hashCode">The accumulated hash code</param> /// <param name="ch">The new character to combine</param> /// <returns>The result of combining <paramref name="hashCode"/> with <paramref name="ch"/> using the FNV-1a algorithm</returns> internal static int CombineFNVHash(int hashCode, char ch) { return unchecked((hashCode ^ ch) * Hash.FnvPrime); } } }
37.975543
161
0.554275
[ "Apache-2.0" ]
Ashera138/roslyn
src/Compilers/Core/Portable/InternalUtilities/Hash.cs
13,977
C#
using Amazon.JSII.Runtime.Deputy; [assembly: JsiiAssembly("@alicloud/ros-cdk-memcache", "1.0.4", "alicloud-ros-cdk-memcache-1.0.4.tgz")]
34.5
102
0.731884
[ "Apache-2.0" ]
piotr-kalanski/Resource-Orchestration-Service-Cloud-Development-Kit
multiple-languages/dotnet/AlibabaCloud.SDK.ROS.CDK.Memcache/AssemblyInfo.cs
138
C#
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("InductiveSens")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("InductiveSens")] [assembly: AssemblyCopyright("Copyright © 2020")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("fa2f5edf-4dd2-42e1-98b0-7860acda79a1")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
37.675676
84
0.748207
[ "MIT" ]
mzsoltmolnar/Measurement-Modeling
src/CapacitiveSens/CapacitiveSens/Properties/AssemblyInfo.cs
1,397
C#
 using Nop.Core.Configuration; namespace Nop.Core.Domain.Media { public class MediaSettings : ISettings { public int AvatarPictureSize { get; set; } public int ProductThumbPictureSize { get; set; } public int ProductDetailsPictureSize { get; set; } public int ProductThumbPictureSizeOnProductDetailsPage { get; set; } public int AssociatedProductPictureSize { get; set; } public int CategoryThumbPictureSize { get; set; } public int ManufacturerThumbPictureSize { get; set; } public int CartThumbPictureSize { get; set; } public int MiniCartThumbPictureSize { get; set; } public int AutoCompleteSearchThumbPictureSize { get; set; } public bool DefaultPictureZoomEnabled { get; set; } public int MaximumImageSize { get; set; } /// <summary> /// Geta or sets a default quality used for image generation /// </summary> public int DefaultImageQuality { get; set; } /// <summary> /// Geta or sets a vaue indicating whether single (/content/images/thumbs/) or multiple (/content/images/thumbs/001/ and /content/images/thumbs/002/) directories will used for picture thumbs /// </summary> public bool MultipleThumbDirectories { get; set; } } }
40.030303
198
0.659349
[ "MIT" ]
tebyaniyan/internet-Shop
Libraries/Nop.Core/Domain/Media/MediaSettings.cs
1,323
C#
using System; /* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace org.camunda.bpm.engine.impl.cmd { using IdentityOperationResult = org.camunda.bpm.engine.impl.identity.IdentityOperationResult; using Command = org.camunda.bpm.engine.impl.interceptor.Command; using CommandContext = org.camunda.bpm.engine.impl.interceptor.CommandContext; /// <summary> /// Allows to unlock users. Only CAMUNDA_ADMIN is authorised to do it. /// /// @author Yana Vasileva /// /// </summary> [Serializable] public class UnlockUserCmd : Command<object> { private const long serialVersionUID = 1L; internal string userId; public UnlockUserCmd(string userId) { this.userId = userId; } public virtual object execute(CommandContext commandContext) { commandContext.AuthorizationManager.checkCamundaAdmin(); IdentityOperationResult operationResult = commandContext.WritableIdentityProvider.unlockUser(userId); commandContext.OperationLogManager.logUserOperation(operationResult, userId); return null; } } }
31.701754
103
0.76425
[ "Apache-2.0" ]
luizfbicalho/Camunda.NET
camunda-bpm-platform-net/engine/src/main/java/org/camunda/bpm/engine/impl/cmd/UnlockUserCmd.cs
1,809
C#
using UnityEngine; using UnityEditor; using System.IO; namespace AssetBundles { /// Preserve code in Asset Bundles. See https://docs.unity3d.com/Manual/BuildingAssetBundles.html public class BuildScript { public static void BuildPlayer () { BuildPlayerOptions buildPlayerOptions = new BuildPlayerOptions (); string manifestPath = Path.Combine (CreateAssetBundles.buildPath, "AssetBundles.manifest"); // Manifest path buildPlayerOptions.assetBundleManifestPath = manifestPath; // Build the Player ensuring engine code is included for AssetBundles in the manifest. BuildPipeline.BuildPlayer (buildPlayerOptions); Debug.Log ("BuildPlayer using Asset Bundle manifest: " + manifestPath); } } }
25.137931
98
0.765432
[ "MIT" ]
HaraldHeide/Metaverse-Marketing
Assets/StorageServicesDemo/Editor/PreserveCode.cs
731
C#
// <auto-generated> // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is // regenerated. // </auto-generated> namespace Slack.Api.CSharp.WebApi.Models { using Microsoft.Rest; /// <summary> /// Exception thrown for an invalid response with ListErrorModel6 /// information. /// </summary> public partial class ListErrorModel6Exception : RestException { /// <summary> /// Gets information about the associated HTTP request. /// </summary> public HttpRequestMessageWrapper Request { get; set; } /// <summary> /// Gets information about the associated HTTP response. /// </summary> public HttpResponseMessageWrapper Response { get; set; } /// <summary> /// Gets or sets the body object. /// </summary> public ListErrorModel6 Body { get; set; } /// <summary> /// Initializes a new instance of the ListErrorModel6Exception class. /// </summary> public ListErrorModel6Exception() { } /// <summary> /// Initializes a new instance of the ListErrorModel6Exception class. /// </summary> /// <param name="message">The exception message.</param> public ListErrorModel6Exception(string message) : this(message, null) { } /// <summary> /// Initializes a new instance of the ListErrorModel6Exception class. /// </summary> /// <param name="message">The exception message.</param> /// <param name="innerException">Inner exception.</param> public ListErrorModel6Exception(string message, System.Exception innerException) : base(message, innerException) { } } }
31.305085
88
0.60693
[ "MIT" ]
JamesMarcogliese/slack-api-csharp
src/Slack.Api.CSharp/WebApi/Models/ListErrorModel6Exception.cs
1,847
C#
using System.Windows.Controls; namespace SandBeige.MealRecipes.Views.Settings { /// <summary> /// NetworkSettingsPage.xaml の相互作用ロジック /// </summary> public partial class NetworkSettingsPage : UserControl { public NetworkSettingsPage() { InitializeComponent(); } } }
21.384615
57
0.741007
[ "MIT" ]
southernwind/Gohan
MealRecipes/Views/Settings/NetworkSettingsPage.xaml.cs
296
C#
using MTCG; using MTCG.Entity; using MTCG.Model; using NUnit.Framework; namespace UnitTest { [TestFixture] public class CalculateDamageTests { [SetUp] public void Setup() { } [Test] public void Dragon_Attack_FireElf() { //Arrange var dragonEntity = new CardEntity {Damage = 10, Race = Race.Dragon, CardType = CardType.MonsterCard}; var elfEntity = new CardEntity {Damage = 10, Race = Race.FireElf, CardType = CardType.MonsterCard}; //Act var result = GameModell.CalculateDamge(dragonEntity, elfEntity); //Assert Assert.That(result <= 0); } [Test] public void Goblin_Attack_Dragon() { //Arrange var card1Entity = new CardEntity {Damage = 10, Race = Race.Goblin, CardType = CardType.MonsterCard}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Dragon, CardType = CardType.MonsterCard}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result <= 0); } [Test] public void Orc_Attack_Wizard() { //Arrange var card1Entity = new CardEntity {Damage = 10, Race = Race.Orc, CardType = CardType.MonsterCard}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Wizard, CardType = CardType.MonsterCard}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result <= 0); } [Test] public void FireSpell_Attack_Kraken() { //Arrange var card1Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Kraken, CardType = CardType.MonsterCard}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result <= 0); } [Test] public void NormalSpell_Attack_Kraken() { //Arrange var card1Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard, ElementType = ElementType.Normal}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Kraken, CardType = CardType.MonsterCard}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result <= 0); } [Test] public void WaterSpell_Attack_Knight() { //Arrange var card1Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard, ElementType = ElementType.Water}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Knight, CardType = CardType.MonsterCard}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result >= 9999); } [Test] public void WaterSpell_Attack_Kraken() { //Arrange var card1Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard, ElementType = ElementType.Water}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Kraken, CardType = CardType.MonsterCard}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result <= 0); } [Test] public void WaterSpell_Attack_WeakOrc() { //Arrange var card1Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard, ElementType = ElementType.Water}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Orc, CardType = CardType.MonsterCard, ElementType = ElementType.Fire}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result >= 20); } [Test] public void WaterSpell_Attack_Orc() { //Arrange var card1Entity = new CardEntity {Damage = 20, CardType = CardType.SpellCard, ElementType = ElementType.Water}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Orc, CardType = CardType.MonsterCard, ElementType = ElementType.Normal}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result >= 10); } [Test] public void FireSpell_Attack_WeakOrc() { //Arrange var card1Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard, ElementType = ElementType.Fire}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Orc, CardType = CardType.MonsterCard, ElementType = ElementType.Normal}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result >= 20); } [Test] public void NormalSpell_Attack_Orc() { //Arrange var card1Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard, ElementType = ElementType.Normal}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Orc, CardType = CardType.MonsterCard, ElementType = ElementType.Normal}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result >= 10); } [Test] public void NormalOrc_Attack_WeakWaterSpell() { //Arrange var card2Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard, ElementType = ElementType.Water}; var card1Entity = new CardEntity {Damage = 10, Race = Race.Orc, CardType = CardType.MonsterCard, ElementType = ElementType.Normal}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result >= 20); } [Test] public void FireOrc_Attack_WaterSpell() { //Arrange var card2Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard, ElementType = ElementType.Water}; var card1Entity = new CardEntity {Damage = 10, Race = Race.Orc, CardType = CardType.MonsterCard, ElementType = ElementType.Fire}; //Act var result = GameModell.CalculateDamge(card1Entity, card2Entity); //Assert Assert.That(result >= 5); } [Test] public void NormalOrc_Attack_FireSpell() { //Arrange var card1Entity = new CardEntity {Damage = 10, CardType = CardType.SpellCard, ElementType = ElementType.Fire}; var card2Entity = new CardEntity {Damage = 10, Race = Race.Orc, CardType = CardType.MonsterCard, ElementType = ElementType.Normal}; //Act var result = GameModell.CalculateDamge(card2Entity,card1Entity); //Assert Assert.That(result >= 5); } } }
37.460784
114
0.561241
[ "Apache-2.0" ]
Ephaltes/MTCG
UnitTest/CalculateDamageTests.cs
7,642
C#
using Newtonsoft.Json.Converters; namespace Alexa.NET.Management { internal class Iso8601Converter:IsoDateTimeConverter { public Iso8601Converter() { DateTimeFormat = "yyyy-MM-ddTHH:mm:ss.fffZ"; } } }
20.833333
56
0.64
[ "MIT" ]
dennis-kuypers-gcx/Alexa.NET.Management
Alexa.NET.Management/Iso8601Converter.cs
252
C#
using System; using System.IO; using BizHawk.Common.BufferExtensions; using BizHawk.Emulation.Common; using BizHawk.Emulation.Cores.Consoles.Nintendo.Gameboy; using BizHawk.Emulation.Cores.Properties; namespace BizHawk.Emulation.Cores.Nintendo.Gameboy { /// <summary> /// a gameboy/gameboy color emulator wrapped around native C++ libgambatte /// </summary> [Core( CoreNames.Gambatte, "", isPorted: true, isReleased: true, portedVersion: "SVN 344", portedUrl: "http://gambatte.sourceforge.net/", singleInstance: false)] [ServiceNotApplicable(new[] { typeof(IDriveLight) })] public partial class Gameboy : IEmulator, IVideoProvider, ISoundProvider, ISaveRam, IStatable, IInputPollable, ICodeDataLogger, IBoardInfo, IRomInfo, IDebuggable, ISettable<Gameboy.GambatteSettings, Gameboy.GambatteSyncSettings>, IGameboyCommon, ICycleTiming, ILinkable { [CoreConstructor("GB")] [CoreConstructor("GBC")] public Gameboy(CoreComm comm, GameInfo game, byte[] file, Gameboy.GambatteSettings settings, Gameboy.GambatteSyncSettings syncSettings, bool deterministic) { var ser = new BasicServiceProvider(this); ser.Register<IDisassemblable>(new GBDisassembler()); ServiceProvider = ser; Tracer = new TraceBuffer { Header = "Z80: PC, opcode, registers (A, B, C, D, E, F, H, L, LY, SP, CY)" }; ser.Register<ITraceable>(Tracer); InitMemoryCallbacks(); ThrowExceptionForBadRom(file); BoardName = MapperName(file); DeterministicEmulation = deterministic; GambatteState = LibGambatte.gambatte_create(); if (GambatteState == IntPtr.Zero) { throw new InvalidOperationException($"{nameof(LibGambatte.gambatte_create)}() returned null???"); } Console.WriteLine(game.System); try { _syncSettings = (GambatteSyncSettings)syncSettings ?? new GambatteSyncSettings(); LibGambatte.LoadFlags flags = 0; switch (_syncSettings.ConsoleMode) { case GambatteSyncSettings.ConsoleModeType.GB: flags |= LibGambatte.LoadFlags.FORCE_DMG; break; case GambatteSyncSettings.ConsoleModeType.GBC: break; case GambatteSyncSettings.ConsoleModeType.GBA: flags |= LibGambatte.LoadFlags.GBA_CGB; break; default: if (game.System == "GB") flags |= LibGambatte.LoadFlags.FORCE_DMG; break; } if (_syncSettings.MulticartCompat) { flags |= LibGambatte.LoadFlags.MULTICART_COMPAT; } if (LibGambatte.gambatte_load(GambatteState, file, (uint)file.Length, flags) != 0) { throw new InvalidOperationException($"{nameof(LibGambatte.gambatte_load)}() returned non-zero (is this not a gb or gbc rom?)"); } byte[] bios; string biosSystemId; string biosId; if ((flags & LibGambatte.LoadFlags.FORCE_DMG) == LibGambatte.LoadFlags.FORCE_DMG) { biosSystemId = "GB"; biosId = "World"; IsCgb = false; } else { biosSystemId = "GBC"; biosId = _syncSettings.ConsoleMode == GambatteSyncSettings.ConsoleModeType.GBA ? "AGB" : "World"; IsCgb = true; } if (_syncSettings.EnableBIOS) { bios = comm.CoreFileProvider.GetFirmware(biosSystemId, biosId, true, "BIOS Not Found, Cannot Load. Change SyncSettings to run without BIOS."); } else { var builtinBios = (biosSystemId, biosId) switch { ("GB", "World") => Resources.FastDmgBoot, ("GBC", "World") => Resources.FastCgbBoot, ("GBC", "AGB") => Resources.FastAgbBoot, (_, _) => throw new Exception("Internal GB Error (BIOS??)"), }; bios = BizHawk.Common.Util.DecompressGzipFile(new MemoryStream(builtinBios.Value, false)); } if (LibGambatte.gambatte_loadbios(GambatteState, bios, (uint)bios.Length) != 0) { throw new InvalidOperationException($"{nameof(LibGambatte.gambatte_loadbios)}() returned non-zero (bios error)"); } // set real default colors (before anyone mucks with them at all) PutSettings((GambatteSettings)settings ?? new GambatteSettings()); InitSound(); Frame = 0; LagCount = 0; IsLagFrame = false; InputCallback = new LibGambatte.InputGetter(ControllerCallback); LibGambatte.gambatte_setinputgetter(GambatteState, InputCallback); InitMemoryDomains(); RomDetails = $"{game.Name}\r\nSHA1:{file.HashSHA1()}\r\nMD5:{file.HashMD5()}\r\n"; byte[] buff = new byte[32]; LibGambatte.gambatte_romtitle(GambatteState, buff); string romname = System.Text.Encoding.ASCII.GetString(buff); Console.WriteLine("Core reported rom name: {0}", romname); if (!DeterministicEmulation && _syncSettings.RealTimeRTC) { LibGambatte.gambatte_settimemode(GambatteState, false); } LibGambatte.gambatte_setrtcdivisoroffset(GambatteState, _syncSettings.RTCDivisorOffset); _cdCallback = new LibGambatte.CDCallback(CDCallbackProc); NewSaveCoreSetBuff(); } catch { Dispose(); throw; } } public string RomDetails { get; } /// <summary> /// the nominal length of one frame /// </summary> private const uint TICKSINFRAME = 35112; /// <summary> /// number of ticks per second /// </summary> private const uint TICKSPERSECOND = 2097152; /// <summary> /// keep a copy of the input callback delegate so it doesn't get GCed /// </summary> private LibGambatte.InputGetter InputCallback; /// <summary> /// whatever keys are currently depressed /// </summary> private LibGambatte.Buttons CurrentButtons = 0; /// <summary> /// internal gambatte state /// </summary> internal IntPtr GambatteState { get; private set; } = IntPtr.Zero; public int LagCount { get; set; } public bool IsLagFrame { get; set; } public bool IsCgb { get; set; } // all cycle counts are relative to a 2*1024*1024 mhz refclock /// <summary> /// total cycles actually executed /// </summary> private ulong _cycleCount = 0; private ulong callbackCycleCount = 0; /// <summary> /// number of extra cycles we overran in the last frame /// </summary> private uint frameOverflow = 0; public long CycleCount => (long)_cycleCount; public double ClockRate => TICKSPERSECOND; public static readonly ControllerDefinition GbController = new ControllerDefinition { Name = "Gameboy Controller", BoolButtons = { "Up", "Down", "Left", "Right", "Start", "Select", "B", "A", "Power" } }; private LibGambatte.Buttons ControllerCallback() { InputCallbacks.Call(); IsLagFrame = false; return CurrentButtons; } /// <summary> /// true if the emulator is currently emulating CGB /// </summary> public bool IsCGBMode() { //return LibGambatte.gambatte_iscgb(GambatteState); return IsCgb; } private InputCallbackSystem _inputCallbacks = new InputCallbackSystem(); // low priority TODO: due to certain aspects of the core implementation, // we don't smartly use the ActiveChanged event here. public IInputCallbackSystem InputCallbacks => _inputCallbacks; /// <summary> /// for use in dual core /// </summary> public void ConnectInputCallbackSystem(InputCallbackSystem ics) { _inputCallbacks = ics; } internal void FrameAdvancePrep(IController controller) { Frame++; // update our local copy of the controller data CurrentButtons = 0; if (controller.IsPressed("Up")) CurrentButtons |= LibGambatte.Buttons.UP; if (controller.IsPressed("Down")) CurrentButtons |= LibGambatte.Buttons.DOWN; if (controller.IsPressed("Left")) CurrentButtons |= LibGambatte.Buttons.LEFT; if (controller.IsPressed("Right")) CurrentButtons |= LibGambatte.Buttons.RIGHT; if (controller.IsPressed("A")) CurrentButtons |= LibGambatte.Buttons.A; if (controller.IsPressed("B")) CurrentButtons |= LibGambatte.Buttons.B; if (controller.IsPressed("Select")) CurrentButtons |= LibGambatte.Buttons.SELECT; if (controller.IsPressed("Start")) CurrentButtons |= LibGambatte.Buttons.START; // the controller callback will set this to false if it actually gets called during the frame IsLagFrame = true; if (controller.IsPressed("Power")) { LibGambatte.gambatte_reset(GambatteState); } if (Tracer.Enabled) { _tracecb = MakeTrace; } else { _tracecb = null; } LibGambatte.gambatte_settracecallback(GambatteState, _tracecb); LibGambatte.gambatte_setlayers(GambatteState, (_settings.DisplayBG ? 1 : 0) | (_settings.DisplayOBJ ? 2 : 0) | (_settings.DisplayWindow ? 4 : 0)); } internal void FrameAdvancePost() { if (IsLagFrame) { LagCount++; } endofframecallback?.Invoke(LibGambatte.gambatte_cpuread(GambatteState, 0xff40)); } private static string MapperName(byte[] romdata) { switch (romdata[0x147]) { case 0x00: return "Plain ROM"; // = PLAIN; break; case 0x01: return "MBC1 ROM"; // = MBC1; break; case 0x02: return "MBC1 ROM+RAM"; // = MBC1; break; case 0x03: return "MBC1 ROM+RAM+BATTERY"; // = MBC1; break; case 0x05: return "MBC2 ROM"; // = MBC2; break; case 0x06: return "MBC2 ROM+BATTERY"; // = MBC2; break; case 0x08: return "Plain ROM+RAM"; // = PLAIN; break; case 0x09: return "Plain ROM+RAM+BATTERY"; // = PLAIN; break; case 0x0F: return "MBC3 ROM+TIMER+BATTERY"; // = MBC3; break; case 0x10: return "MBC3 ROM+TIMER+RAM+BATTERY"; // = MBC3; break; case 0x11: return "MBC3 ROM"; // = MBC3; break; case 0x12: return "MBC3 ROM+RAM"; // = MBC3; break; case 0x13: return "MBC3 ROM+RAM+BATTERY"; // = MBC3; break; case 0x19: return "MBC5 ROM"; // = MBC5; break; case 0x1A: return "MBC5 ROM+RAM"; // = MBC5; break; case 0x1B: return "MBC5 ROM+RAM+BATTERY"; // = MBC5; break; case 0x1C: return "MBC5 ROM+RUMBLE"; // = MBC5; break; case 0x1D: return "MBC5 ROM+RUMBLE+RAM"; // = MBC5; break; case 0x1E: return "MBC5 ROM+RUMBLE+RAM+BATTERY"; // = MBC5; break; case 0xFF: return "HuC1 ROM+RAM+BATTERY"; // = HUC1; break; case 0xFE: return "HuC3 ROM+RAM+BATTERY"; default: return "UNKNOWN"; } } /// <summary> /// throw exception with intelligible message on some kinds of bad rom /// </summary> private static void ThrowExceptionForBadRom(byte[] romdata) { if (romdata.Length < 0x148) { throw new ArgumentException("ROM is far too small to be a valid GB\\GBC rom!"); } switch (romdata[0x147]) { case 0x00: break; case 0x01: break; case 0x02: break; case 0x03: break; case 0x05: break; case 0x06: break; case 0x08: break; case 0x09: break; case 0x0b: throw new UnsupportedGameException("\"MM01\" Mapper not supported!"); case 0x0c: throw new UnsupportedGameException("\"MM01\" Mapper not supported!"); case 0x0d: throw new UnsupportedGameException("\"MM01\" Mapper not supported!"); case 0x0f: break; case 0x10: break; case 0x11: break; case 0x12: break; case 0x13: break; case 0x15: throw new UnsupportedGameException("\"MBC4\" Mapper not supported!"); case 0x16: throw new UnsupportedGameException("\"MBC4\" Mapper not supported!"); case 0x17: throw new UnsupportedGameException("\"MBC4\" Mapper not supported!"); case 0x19: break; case 0x1a: break; case 0x1b: break; case 0x1c: break; // rumble case 0x1d: break; // rumble case 0x1e: break; // rumble case 0x20: throw new UnsupportedGameException("\"MBC6\" Mapper not supported!"); case 0x22: throw new UnsupportedGameException("\"MBC7\" Mapper not supported!"); case 0xfc: throw new UnsupportedGameException("\"Pocket Camera\" Mapper not supported!"); case 0xfd: throw new UnsupportedGameException("\"Bandai TAMA5\" Mapper not supported!"); case 0xfe: break; case 0xff: break; default: throw new UnsupportedGameException($"Unknown mapper: {romdata[0x147]:x2}"); } } public IGPUMemoryAreas LockGPU() { var _vram = IntPtr.Zero; var _bgpal = IntPtr.Zero; var _sppal = IntPtr.Zero; var _oam = IntPtr.Zero; int unused = 0; if (!LibGambatte.gambatte_getmemoryarea(GambatteState, LibGambatte.MemoryAreas.vram, ref _vram, ref unused) || !LibGambatte.gambatte_getmemoryarea(GambatteState, LibGambatte.MemoryAreas.bgpal, ref _bgpal, ref unused) || !LibGambatte.gambatte_getmemoryarea(GambatteState, LibGambatte.MemoryAreas.sppal, ref _sppal, ref unused) || !LibGambatte.gambatte_getmemoryarea(GambatteState, LibGambatte.MemoryAreas.oam, ref _oam, ref unused)) { throw new InvalidOperationException("Unexpected error in gambatte_getmemoryarea"); } return new GPUMemoryAreas { Vram = _vram, Oam = _oam, Sppal = _sppal, Bgpal = _bgpal, }; } private class GPUMemoryAreas : IGPUMemoryAreas { public IntPtr Vram { get; init; } public IntPtr Oam { get; init; } public IntPtr Sppal { get; init; } public IntPtr Bgpal { get; init; } public void Dispose() {} } /// <summary> /// set up callback /// </summary> /// <param name="line">scanline. -1 = end of frame, -2 = RIGHT NOW</param> public void SetScanlineCallback(ScanlineCallback callback, int line) { if (GambatteState == IntPtr.Zero) { return; // not sure how this is being reached. tried the debugger... } endofframecallback = null; if (callback == null || line == -1 || line == -2) { scanlinecb = null; LibGambatte.gambatte_setscanlinecallback(GambatteState, null, 0); if (line == -1) { endofframecallback = callback; } else if (line == -2) { callback(LibGambatte.gambatte_cpuread(GambatteState, 0xff40)); } } else if (line >= 0 && line <= 153) { scanlinecb = () => callback(LibGambatte.gambatte_cpuread(GambatteState, 0xff40)); LibGambatte.gambatte_setscanlinecallback(GambatteState, scanlinecb, line); } else { throw new ArgumentOutOfRangeException(nameof(line), "line must be in [0, 153]"); } } private GambattePrinter printer; /// <summary> /// set up Printer callback /// </summary> public void SetPrinterCallback(PrinterCallback callback) { // Copying SetScanlineCallback for this check, I assume this is still a bug somewhere if (GambatteState == IntPtr.Zero) { return; // not sure how this is being reached. tried the debugger... } if (callback != null) { printer = new GambattePrinter(this, callback); LinkConnected = true; } else { LinkConnected = false; printer.Disconnect(); printer = null; } } private LibGambatte.ScanlineCallback scanlinecb; private ScanlineCallback endofframecallback; /// <summary> /// update gambatte core's internal colors /// </summary> public void ChangeDMGColors(int[] colors) { for (int i = 0; i < 12; i++) { LibGambatte.gambatte_setdmgpalettecolor(GambatteState, (LibGambatte.PalType)(i / 4), (uint)i % 4, (uint)colors[i]); } } public void SetCGBColors(GBColors.ColorType type) { int[] lut = GBColors.GetLut(type); LibGambatte.gambatte_setcgbpalette(GambatteState, lut); } } }
30.888889
158
0.655832
[ "MIT" ]
david-a-perez/BizHawk
src/BizHawk.Emulation.Cores/Consoles/Nintendo/Gameboy/Gambatte.cs
15,570
C#
 namespace Shared { public enum ClientPacketTypes { CMSG_INIT_ENCRYPTED_RSA = 1, CMSG_INIT_ENCRYPTED_AES } }
13.8
36
0.644928
[ "MIT" ]
Arcidev/Arci.Networking
Shared/ClientPacketTypes.cs
140
C#
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics.ContractsLight; using System.Linq; using System.Threading; using System.Threading.Tasks; using BuildXL.Engine.Cache.Fingerprints; using BuildXL.Ipc; using BuildXL.Ipc.Interfaces; using BuildXL.Pips; using BuildXL.Pips.Builders; using BuildXL.Pips.Operations; using BuildXL.Scheduler.Fingerprints; using BuildXL.Scheduler.Performance; using BuildXL.Tracing; using BuildXL.Utilities; using BuildXL.Utilities.Collections; using BuildXL.Utilities.Configuration; using BuildXL.Utilities.Instrumentation.Common; using BuildXL.Utilities.Tracing; using static BuildXL.Utilities.FormattableStringEx; using Logger = BuildXL.Scheduler.Tracing.Logger; #pragma warning disable 1591 // disabling warning about missing API documentation; TODO: Remove this line and write documentation! namespace BuildXL.Scheduler.Graph { /// <summary> /// Defines graph of pips and allows adding Pips with validation. /// </summary> public sealed partial class PipGraph { public class Builder : PipGraphBase, IPipGraphBuilder { /// <summary> /// Lazily initialized BuildXL server IPC moniker. /// </summary> private readonly Lazy<IIpcMoniker> m_lazyApiServerMoniker; /// <summary> /// Creates a new moniker if it hasn't already been created; otherwise returns the previously created one. /// </summary> public IIpcMoniker GetApiServerMoniker() => m_lazyApiServerMoniker.Value; private static readonly SortedReadOnlyArray<FileArtifact, OrdinalFileArtifactComparer> s_emptySealContents = SortedReadOnlyArray<FileArtifact, OrdinalFileArtifactComparer>.CloneAndSort( CollectionUtilities.EmptyArray<FileArtifact>(), OrdinalFileArtifactComparer.Instance); private IScheduleConfiguration ScheduleConfiguration => m_configuration.Schedule; private readonly NodeId m_dummyHashSourceFileNode; private readonly IConfiguration m_configuration; private WindowsOsDefaults m_windowsOsDefaults; private MacOsDefaults m_macOsDefaults; private readonly object m_osDefaultLock = new object(); #region State /// <summary> /// Set of those file artifacts (where <see cref="FileArtifact.IsOutputFile" />) for which there exists a consuming pip. /// Typically an artifact can be used as input multiple times, but this is disallowed for artifacts which are re-written /// (only the re-writer can consume the artifacts). /// </summary> /// <remarks> /// TODO: This may go away entirely when we support swapping in prior versions of an artifact (with optimistic locks) as in /// CloudMake. /// Maintained by <see cref="AddInput" /> and <see cref="AddOutput" /> /// </remarks> private readonly ConcurrentBigSet<FileArtifact> m_outputFileArtifactsUsedAsInputs; /// <summary> /// A multi-value map from service PipId to its client PipIds. /// </summary> private readonly ConcurrentBigMap<PipId, ConcurrentBigSet<PipId>> m_servicePipClients; /// <summary> /// A mapping of Service PipId to corresponding Shutdown PipId (<see cref="BuildXL.Pips.Operations.Process.ShutdownProcessPipId"/>). /// </summary> private readonly ConcurrentBigMap<PipId, ServiceInfo> m_servicePipToServiceInfoMap; /// <summary> /// Set of temporary outputs. /// </summary> private readonly ConcurrentBigSet<FileArtifact> m_temporaryOutputFiles; /// <summary> /// A mapping of untracked paths or scopes to corresponding PipIds /// </summary> private readonly ConcurrentBigMap<AbsolutePath, PipId> m_untrackedPathsAndScopes; /// <summary> /// A mapping of source file to a corresponding PipId. /// A source file can be declared by multiple process pips, so the first producing pip processed is arbitrarily saved. /// </summary> private readonly ConcurrentBigMap<AbsolutePath, PipId> m_sourceFiles; /// <summary> /// Logging context used for error reporting during PipGraph construction. /// </summary> private LoggingContext LoggingContext { get; } /// <summary> /// Logger instance /// </summary> private Logger Logger { get; } /// <summary> /// Mutable version of the Dataflow graph. This will be null when the graph is cached. /// </summary> public readonly MutableDirectedGraph MutableDataflowGraph; /// <summary> /// Manages locking of paths and pips for pip graph and scheduler /// </summary> internal readonly LockManager LockManager; /// <summary> /// The names of temp environment variables /// </summary> private readonly HashSet<StringId> m_tempEnvironmentVariables; /// <summary> /// The immutable pip graph. This is stored to allow Build() to be called multiple times and return the same instance /// </summary> private PipGraph m_immutablePipGraph; /// <summary> /// Value indicating if the constructed graph is valid. /// </summary> private bool m_isValidConstructedGraph = true; /// <inheritdoc /> [Pure] public bool IsImmutable => m_immutablePipGraph != null || !m_isValidConstructedGraph; private readonly PipGraphStaticFingerprints m_pipStaticFingerprints = new PipGraphStaticFingerprints(); #endregion State /// <summary> /// Mapping of path names to <see cref="SealedDirectoryTable" />s representing the full / partial /// <see cref="SealDirectory" /> /// pips rooted at particular paths. /// </summary> /// <remarks> /// Enumerating these mappings can be used to find seals containing particular files. /// Internaly exposes the seal directory table which is used by <see cref="PatchablePipGraph"/> to /// mark 'start' and 'finish' of graph patching. /// </remarks> internal SealedDirectoryTable SealDirectoryTable { get; } private readonly CounterCollection<PipGraphCounter> m_counters = new CounterCollection<PipGraphCounter>(); private bool ShouldComputePipStaticFingerprints => ScheduleConfiguration.ComputePipStaticFingerprints; private readonly PipStaticFingerprinter m_pipStaticFingerprinter; /// <summary> /// Class constructor /// </summary> public Builder( PipTable pipTable, PipExecutionContext context, Logger logger, LoggingContext loggingContext, IConfiguration configuration, SemanticPathExpander semanticPathExpander, string fingerprintSalt = null, DirectoryMembershipFingerprinterRuleSet directoryMembershipFingerprinterRules = null) : base(pipTable, context, semanticPathExpander, new MutableDirectedGraph()) { MutableDataflowGraph = (MutableDirectedGraph)DataflowGraph; Logger = logger; LoggingContext = loggingContext; m_tempEnvironmentVariables = new HashSet<StringId>( BuildParameters .DisallowedTempVariables .Select(tmpVar => StringId.Create(Context.StringTable, tmpVar))); SealDirectoryTable = new SealedDirectoryTable(Context.PathTable); m_outputFileArtifactsUsedAsInputs = new ConcurrentBigSet<FileArtifact>(); m_servicePipClients = new ConcurrentBigMap<PipId, ConcurrentBigSet<PipId>>(); m_servicePipToServiceInfoMap = new ConcurrentBigMap<PipId, ServiceInfo>(); m_temporaryOutputFiles = new ConcurrentBigSet<FileArtifact>(); m_untrackedPathsAndScopes = new ConcurrentBigMap<AbsolutePath, PipId>(); m_sourceFiles = new ConcurrentBigMap<AbsolutePath, PipId>(); m_lazyApiServerMoniker = configuration.Schedule.UseFixedApiServerMoniker ? Lazy.Create(() => IpcFactory.GetFixedMoniker()) : Lazy.Create(() => IpcFactory.GetProvider().CreateNewMoniker()); LockManager = new LockManager(); // Prime the dummy provenance since its creation requires adding a string to the TokenText table, which gets frozen after scheduling // is complete. GetDummyProvenance may be called during execution (after the schedule phase) GetDummyProvenance(); m_configuration = configuration; var extraFingerprintSalts = new ExtraFingerprintSalts( configuration, PipFingerprintingVersion.TwoPhaseV2, fingerprintSalt ?? string.Empty, searchPathToolsHash: directoryMembershipFingerprinterRules?.ComputeSearchPathToolsHash()); m_pipStaticFingerprinter = new PipStaticFingerprinter( context.PathTable, GetSealDirectoryFingerprint, GetDirectoryProducerFingerprint, extraFingerprintSalts, semanticPathExpander) { FingerprintTextEnabled = configuration.Schedule.LogPipStaticFingerprintTexts }; m_dummyHashSourceFileNode = PipId.DummyHashSourceFilePipId.ToNodeId(); } /// <summary> /// Marks the pip graph as complete and subsequently immutable. /// Following this, <see cref="IsImmutable"/> is set. /// </summary> public PipGraph Build() { using (LockManager.AcquireGlobalExclusiveLock()) { MutableDataflowGraph.Seal(); if (!IsImmutable) { StringId apiServerMonikerId = m_lazyApiServerMoniker.IsValueCreated || m_servicePipToServiceInfoMap.Count > 0 ? StringId.Create(Context.StringTable, m_lazyApiServerMoniker.Value.Id) : StringId.Invalid; var semistableProcessFingerprint = PerformanceDataUtilities.ComputeGraphSemistableFingerprint(LoggingContext, PipTable, Context.PathTable); var pipGraphState = new SerializedState( values: Values, specFiles: SpecFiles, modules: Modules, pipProducers: PipProducers, opaqueDirectoryProducers: OutputDirectoryProducers, outputDirectoryRoots: OutputDirectoryRoots, compositeSharedOpaqueProducers: CompositeOutputDirectoryProducers, sourceSealedDirectoryRoots: SourceSealedDirectoryRoots, temporaryPaths: TemporaryPaths, sealDirectoryNodes: SealDirectoryTable.FinishAndMarkReadOnly(), rewritingPips: RewritingPips, rewrittenPips: RewrittenPips, latestWriteCountsByPath: LatestWriteCountsByPath, servicePipClients: m_servicePipClients, apiServerMoniker: apiServerMonikerId, // If there are N paths in the path table (including AbsolutePath.Invalid), the path table count will be N and the value // of the last added absolute path will be N - 1. Therefore, the max absolute path should be N - 1. // Capture this here so we know that all paths < PathTable.Count are valid to use with serialized pip graph. maxAbsolutePath: Context.PathTable.Count - 1, semistableProcessFingerprint: semistableProcessFingerprint, pipStaticFingerprints: m_pipStaticFingerprints); m_immutablePipGraph = new PipGraph( pipGraphState, MutableDataflowGraph, PipTable, Context, SemanticPathExpander); if (!ScheduleConfiguration.UnsafeDisableGraphPostValidation && !IsValidGraph()) { m_isValidConstructedGraph = false; return null; } m_counters.LogAsStatistics("PipGraph.Builder", LoggingContext); } } return m_immutablePipGraph; } /// <inheritdoc /> public override NodeId GetSealedDirectoryNode(DirectoryArtifact directoryArtifact) { SealDirectoryTable.TryGetSealForDirectoryArtifact(directoryArtifact, out PipId pipId); return pipId.ToNodeId(); } /// <inheritdoc /> public bool ApplyCurrentOsDefaults(ProcessBuilder processBuilder) { if (OperatingSystemHelper.IsUnixOS) { if (m_macOsDefaults == null) { lock(m_osDefaultLock) { if (m_macOsDefaults == null) { m_macOsDefaults = new MacOsDefaults(Context.PathTable, this); } } } return m_macOsDefaults.ProcessDefaults(processBuilder); } else { if (m_windowsOsDefaults == null) { lock(m_osDefaultLock) { if (m_windowsOsDefaults == null) { m_windowsOsDefaults = new WindowsOsDefaults(Context.PathTable); } } } return m_windowsOsDefaults.ProcessDefaults(processBuilder); } } #region Validation private bool IsValidGraph() { Contract.Requires(LockManager.HasGlobalExclusiveAccess); using (m_counters.StartStopwatch(PipGraphCounter.GraphPostValidation)) { return ValidateSealDirectoryConstruction() && ValidateTempPaths(); } } /// <summary> /// Validates no pip has declared artifacts in any declared temp directories. /// Temp directories are guaranteed cleaned before pips run, and non-deterministically cleaned after runs, so it is not safe to place anything required within temps. /// </summary> private bool ValidateTempPaths() { // Make sure no items will be added to graph so it's safe to iterate over concurrent set Contract.Requires(IsImmutable); var success = true; if (!ValidateTempPathsHelper(out var invalidTempPath, out var invalidTempProducerNode, out var invalidArtifactPath, out var invalidArtifactProducerNode)) { success = false; LogTempValidationError(invalidTempPath, invalidTempProducerNode, invalidArtifactPath, invalidArtifactProducerNode); } return success; } private bool ValidateTempPathsHelper(out AbsolutePath invalidTempPath, out NodeId invalidTempProducerNode, out AbsolutePath invalidArtifactPath, out NodeId invalidArtifactProducerNode) { foreach (var kvp in TemporaryPaths) { invalidTempPath = kvp.Key; invalidTempProducerNode = kvp.Value.ToNodeId(); // Search for declared artifacts from the temp path down to make sure no pip declared an artifact within a temp directory foreach (var childPathId in Context.PathTable.EnumerateHierarchyTopDown(invalidTempPath.Value).Concat(new HierarchicalNameId[] { invalidTempPath.Value })) { var childPath = new AbsolutePath(childPathId); // Source files are tracked independently from other build artifacts to make sure the the process pip that declared a dependency on the source file is correctly associated // Otherwise, it's possible that an intermediate HashSourceFile pip is reported back as the associated pip if (m_sourceFiles.ContainsKey(childPath)) { invalidArtifactPath = childPath; var declaringNodeFound = m_sourceFiles.TryGetValue(childPath, out var invalidArtifactProducerPipId); invalidArtifactProducerNode = declaringNodeFound ? invalidArtifactProducerPipId.ToNodeId() : NodeId.Invalid; return false; } // If there is a producer for the temp's child path, then a pip in the build expects that path to exist as a build artifact, which makes this an invalid temp path else if (TryFindProducerForPath(childPath, out invalidArtifactProducerNode) && !m_temporaryOutputFiles.Contains(FileArtifact.CreateOutputFile(childPath)) /* Temp files paths will return a producing node, so if the child path itself is a temp file, just skip it */) { invalidArtifactPath = childPath; return false; } } } invalidTempPath = AbsolutePath.Invalid; invalidTempProducerNode = NodeId.Invalid; invalidArtifactPath = AbsolutePath.Invalid; invalidArtifactProducerNode = NodeId.Invalid; return true; } /// <summary> /// Logs a descriptive error when temp validation fails. /// </summary> private void LogTempValidationError(AbsolutePath invalidTempPath, NodeId invalidTempProducerNode, AbsolutePath invalidArtifactPath, NodeId invalidArtifactProducerNode) { // Overlapping paths found, log a descriptive error string artifactProducerNode = "Node not found"; Location artifactProducerLocation = default; if (invalidArtifactProducerNode != NodeId.Invalid) { var artifactProducerPipId = invalidArtifactProducerNode.ToPipId(); artifactProducerNode = m_immutablePipGraph.GetPipFromPipId(artifactProducerPipId).GetDescription(Context); artifactProducerLocation = m_immutablePipGraph.GetPipFromPipId(artifactProducerPipId).Provenance.Token.ToLogLocation(Context.PathTable); } var fullArtifactPath = invalidArtifactPath.ToString(Context.PathTable); string tempProducerNode = "Node not found"; Location tempProducerLocation = default; if (invalidTempProducerNode != NodeId.Invalid) { var tempProducerPipId = invalidTempProducerNode.ToPipId(); tempProducerNode = m_immutablePipGraph.GetPipFromPipId(tempProducerPipId).GetDescription(Context); tempProducerLocation = m_immutablePipGraph.GetPipFromPipId(tempProducerPipId).Provenance.Token.ToLogLocation(Context.PathTable); } var fullTempPath = invalidTempPath.ToString(Context.PathTable); Logger.Log.InvalidGraphSinceArtifactPathOverlapsTempPath(LoggingContext, tempProducerLocation, fullTempPath, tempProducerNode, artifactProducerLocation, fullArtifactPath, artifactProducerNode); } /// <summary> /// Given a path, tries to find a node in the build that produces that path as a build artifact. /// </summary> /// <param name="path"> /// <see cref="AbsolutePath"/> of the path. /// </param> /// <param name="producerNode"> /// If a producer node is found, the producer's Node ID; otherwise, <see cref="NodeId.Invalid"/>. /// </param> /// <returns> /// If a producer node is found, true; otherwise, false. /// </returns> private bool TryFindProducerForPath(AbsolutePath path, out NodeId producerNode) { producerNode = TryGetOriginalProducerForPath(path); // Try to look for shared opaque directory producer which are tracked separately if (producerNode == NodeId.Invalid) { foreach (var kvp in SealDirectoryTable.GetSealedDirectories(path)) { producerNode = kvp.Value.ToNodeId(); } } return producerNode != NodeId.Invalid; } private bool ValidateSealDirectoryConstruction() { Contract.Requires(LockManager.HasGlobalExclusiveAccess); var visitedOutputDirectories = new ConcurrentDictionary<HierarchicalNameId, bool>(); var visitedSourceSealDirectories = new ConcurrentDictionary<HierarchicalNameId, bool>(); var visitedFullSealDirectories = new ConcurrentDictionary<HierarchicalNameId, bool>(); var queuePool = new ObjectPool<Queue<HierarchicalNameId>>( () => new Queue<HierarchicalNameId>(), queue => queue.Clear()); int errorCount = 0; Parallel.ForEach( m_immutablePipGraph.GetSealDirectoriesByKind(PipQueryContext.PipGraphPostValidation, kind => true).ToList(), new ParallelOptions { MaxDegreeOfParallelism = ScheduleConfiguration.MaxProcesses, }, sealDirectory => { bool isValid; switch (sealDirectory.Kind) { case SealDirectoryKind.Opaque: isValid = EnsureOutputDirectoryDoesNotClashWithOtherArtifactsAndDoesNotHaveChild(queuePool, visitedOutputDirectories, sealDirectory); break; case SealDirectoryKind.SharedOpaque: isValid = EnsureSharedOpaqueDirectoriesHaveNoDisallowedChildren(queuePool, visitedOutputDirectories, sealDirectory); break; case SealDirectoryKind.Full: isValid = EnsureFullSealDirectoriesCoverAllPathsUnderneath(queuePool, visitedFullSealDirectories, sealDirectory); break; case SealDirectoryKind.SourceAllDirectories: case SealDirectoryKind.SourceTopDirectoryOnly: isValid = EnsureSourceSealDirectoryHasNoOutputs(queuePool, visitedSourceSealDirectories, sealDirectory); break; default: return; } if (!isValid) { Interlocked.Increment(ref errorCount); } }); return errorCount == 0; } /// <summary> /// A shared opaque directory is not allowed to contain: /// - exclusive (non-shared) opaque directories /// - fully sealed directories /// </summary> private bool EnsureSharedOpaqueDirectoriesHaveNoDisallowedChildren( ObjectPool<Queue<HierarchicalNameId>> queuePool, ConcurrentDictionary<HierarchicalNameId, bool> visited, SealDirectory sealDirectory) { Contract.Requires(queuePool != null); Contract.Requires(visited != null); Contract.Requires(sealDirectory != null); Contract.Requires(sealDirectory.Kind == SealDirectoryKind.SharedOpaque); // In case of a composite shared opaque, restrictions are checked for its elements // already if (sealDirectory.IsComposite) { return true; } int errorCount = 0; var directory = sealDirectory.Directory; var outputDirectoryProducerNode = OutputDirectoryProducers[directory]; var outputDirectoryProducer = PipTable.HydratePip( outputDirectoryProducerNode.ToPipId(), PipQueryContext.PipGraphPostValidation); var outputDirectoryProducerProvenance = outputDirectoryProducer.Provenance ?? GetDummyProvenance(); AbsolutePath directoryPath = directory.Path; if (!visited.TryAdd(directory.Path.Value, true)) { return true; } errorCount = EnsureSharedOpaqueDirectoriesHaveNoExclusiveOpaquesNorFullySealedDirectories( queuePool, visited, directory, outputDirectoryProducerProvenance, outputDirectoryProducer, errorCount); return errorCount == 0; } /// <summary> /// Under a shared opaque we don't allow: /// - exclusive opaque directory (their delete-all-before-run semantics doesn't play well with sharing) /// - fully sealed directories (nothing under a fully sealed is supposed to change) /// </summary> /// <returns>Number of disallowed artifacts found under the given directory</returns> private int EnsureSharedOpaqueDirectoriesHaveNoExclusiveOpaquesNorFullySealedDirectories( ObjectPool<Queue<HierarchicalNameId>> queuePool, ConcurrentDictionary<HierarchicalNameId, bool> visited, DirectoryArtifact directory, PipProvenance outputDirectoryProducerProvenance, Pip outputDirectoryProducer, int errorCount) { if (PathIsExclusiveOpaqueOrFullySealed(directory.Path, directory, outputDirectoryProducerProvenance, outputDirectoryProducer)) { return 1; } using (var wrappedQueue = queuePool.GetInstance()) { var queue = wrappedQueue.Instance; queue.Enqueue(directory.Path.Value); while (queue.Count > 0) { var current = queue.Dequeue(); foreach (var child in Context.PathTable.EnumerateImmediateChildren(current)) { var childAsPath = new AbsolutePath(child); var childError = PathIsExclusiveOpaqueOrFullySealed(childAsPath, directory, outputDirectoryProducerProvenance, outputDirectoryProducer); if (!childError && visited.TryAdd(child, true)) { queue.Enqueue(child); } if (childError) { ++errorCount; } } } } return errorCount; } private bool PathIsExclusiveOpaqueOrFullySealed(AbsolutePath path, DirectoryArtifact directory, PipProvenance outputDirectoryProducerProvenance, Pip outputDirectoryProducer) { foreach (var sealedDirectoryAndProducer in SealDirectoryTable.GetSealedDirectories(path)) { var directoryArtifact = sealedDirectoryAndProducer.Key; // Exclusive opaque directories are blocked if (directoryArtifact.IsOutputDirectory() && !directoryArtifact.IsSharedOpaque) { LogInvalidGraphSinceSharedOpaqueDirectoryContainsExclusiveOpaqueDirectory( sealedDirectoryAndProducer, directory, outputDirectoryProducerProvenance, outputDirectoryProducer, path); return true; } // Fully sealed directories are blocked (partial sealed are ok) if (PipTable.GetSealDirectoryKind(sealedDirectoryAndProducer.Value) == SealDirectoryKind.Full) { LogInvalidGraphSinceOutputDirectoryContainsOrCoincidesSealedDirectory( sealedDirectoryAndProducer, directory, outputDirectoryProducerProvenance, outputDirectoryProducer); return true; } } return false; } private void LogInvalidGraphSinceOutputDirectoryContainsOrCoincidesSealedDirectory( KeyValuePair<DirectoryArtifact, PipId> sealedDirectoryAndProducer, DirectoryArtifact outputDirectory, PipProvenance outputDirectoryProducerProvenance, Pip outputDirectoryProducer) { if (!OutputDirectoryProducers.TryGetValue(sealedDirectoryAndProducer.Key, out var producerChildNode)) { producerChildNode = sealedDirectoryAndProducer.Value.ToNodeId(); } var sealedDirectoryProducer = PipTable.HydratePip( producerChildNode.ToPipId(), PipQueryContext.PipGraphPostValidation); if (sealedDirectoryAndProducer.Key.Path == outputDirectory.Path) { Logger.Log.ScheduleFailInvalidGraphSinceOutputDirectoryCoincidesSealedDirectory( LoggingContext, outputDirectoryProducerProvenance.Token.Path.ToString(Context.PathTable), outputDirectoryProducerProvenance.Token.Line, outputDirectoryProducerProvenance.Token.Position, outputDirectory.Path.ToString(Context.PathTable), outputDirectoryProducer.GetDescription(Context), sealedDirectoryAndProducer.Key.Path.ToString(Context.PathTable), sealedDirectoryProducer.GetDescription(Context)); } else { Logger.Log.ScheduleFailInvalidGraphSinceOutputDirectoryContainsSealedDirectory( LoggingContext, outputDirectoryProducerProvenance.Token.Path.ToString(Context.PathTable), outputDirectoryProducerProvenance.Token.Line, outputDirectoryProducerProvenance.Token.Position, outputDirectory.Path.ToString(Context.PathTable), outputDirectoryProducer.GetDescription(Context), sealedDirectoryAndProducer.Key.Path.ToString(Context.PathTable), sealedDirectoryProducer.GetDescription(Context)); } } private void LogInvalidGraphSinceSharedOpaqueDirectoryContainsExclusiveOpaqueDirectory( KeyValuePair<DirectoryArtifact, PipId> exclusiveOpaqueDirectoryAndProducer, DirectoryArtifact directory, PipProvenance sharedOpaqueProducerProvenance, Pip sharedOpaqueProducer, AbsolutePath childAsPath) { // Error because the shared opaque directory contains an exclusive opaque directory. if (!OutputDirectoryProducers.TryGetValue(exclusiveOpaqueDirectoryAndProducer.Key, out var exclusiveOpaqueProducerChildNode)) { exclusiveOpaqueProducerChildNode = exclusiveOpaqueDirectoryAndProducer.Value.ToNodeId(); } var exclusiveOpaqueProducer = PipTable.HydratePip( exclusiveOpaqueProducerChildNode.ToPipId(), PipQueryContext.PipGraphPostValidation); Logger.Log.ScheduleFailInvalidGraphSinceSharedOpaqueDirectoryContainsExclusiveOpaqueDirectory( LoggingContext, sharedOpaqueProducerProvenance.Token.Path.ToString(Context.PathTable), sharedOpaqueProducerProvenance.Token.Line, sharedOpaqueProducerProvenance.Token.Position, directory.Path.ToString(Context.PathTable), sharedOpaqueProducer.GetDescription(Context), childAsPath.ToString(Context.PathTable), exclusiveOpaqueProducer.GetDescription(Context)); } private bool EnsureOutputDirectoryDoesNotClashWithOtherArtifactsAndDoesNotHaveChild( ObjectPool<Queue<HierarchicalNameId>> queuePool, ConcurrentDictionary<HierarchicalNameId, bool> visited, SealDirectory sealDirectory) { Contract.Requires(queuePool != null); Contract.Requires(visited != null); Contract.Requires(sealDirectory != null); Contract.Requires(sealDirectory.Kind == SealDirectoryKind.Opaque); int errorCount = 0; var directory = sealDirectory.Directory; var outputDirectoryProducerNode = OutputDirectoryProducers[directory]; var outputDirectoryProducer = PipTable.HydratePip( outputDirectoryProducerNode.ToPipId(), PipQueryContext.PipGraphPostValidation); var outputDirectoryProducerProvenance = outputDirectoryProducer.Provenance ?? GetDummyProvenance(); using (var wrappedQueue = queuePool.GetInstance()) { var queue = wrappedQueue.Instance; if (visited.TryAdd(directory.Path.Value, true)) { queue.Enqueue(directory.Path.Value); } while (queue.Count > 0) { var current = queue.Dequeue(); bool currentError = false; var currentAsPath = new AbsolutePath(current); NodeId producerChildNode; int latestRewriteCount; if (LatestWriteCountsByPath.TryGetValue(currentAsPath, out latestRewriteCount)) { for (int i = 0; i <= latestRewriteCount; ++i) { // Validate against all possible rewrite counts because outputs with different rewrite counts // are generated by different pips. var currentAsFile = new FileArtifact(currentAsPath, i); if (PipProducers.TryGetValue(currentAsFile, out producerChildNode)) { if (currentAsFile.Path == directory.Path) { // Error because output directory coincides with a file. if (currentAsFile.IsSourceFile) { Logger.Log.ScheduleFailInvalidGraphSinceOutputDirectoryCoincidesSourceFile( LoggingContext, outputDirectoryProducerProvenance.Token.Path.ToString(Context.PathTable), outputDirectoryProducerProvenance.Token.Line, outputDirectoryProducerProvenance.Token.Position, directory.Path.ToString(Context.PathTable), outputDirectoryProducer.GetDescription(Context), currentAsPath.ToString(Context.PathTable)); } else { // Error because the output directory contains an output, but the output file has a different // producer from the producer of the output directory itself. var outputFileProducer = PipTable.HydratePip( producerChildNode.ToPipId(), PipQueryContext.PipGraphPostValidation); Logger.Log.ScheduleFailInvalidGraphSinceOutputDirectoryCoincidesOutputFile( LoggingContext, outputDirectoryProducerProvenance.Token.Path.ToString(Context.PathTable), outputDirectoryProducerProvenance.Token.Line, outputDirectoryProducerProvenance.Token.Position, directory.Path.ToString(Context.PathTable), outputDirectoryProducer.GetDescription(Context), currentAsPath.ToString(Context.PathTable), outputFileProducer.GetDescription(Context)); } currentError = true; break; } if (currentAsFile.IsSourceFile) { // Error because the output directory contains a source file. Logger.Log.ScheduleFailInvalidGraphSinceOutputDirectoryContainsSourceFile( LoggingContext, outputDirectoryProducerProvenance.Token.Path.ToString(Context.PathTable), outputDirectoryProducerProvenance.Token.Line, outputDirectoryProducerProvenance.Token.Position, directory.Path.ToString(Context.PathTable), outputDirectoryProducer.GetDescription(Context), currentAsPath.ToString(Context.PathTable)); currentError = true; } else if (producerChildNode != outputDirectoryProducerNode) { // Error because the output directory contains an output, but the output file has a different // producer from the producer of the output directory itself. var outputFileProducer = PipTable.HydratePip( producerChildNode.ToPipId(), PipQueryContext.PipGraphPostValidation); Logger.Log.ScheduleFailInvalidGraphSinceOutputDirectoryContainsOutputFile( LoggingContext, outputDirectoryProducerProvenance.Token.Path.ToString(Context.PathTable), outputDirectoryProducerProvenance.Token.Line, outputDirectoryProducerProvenance.Token.Position, directory.Path.ToString(Context.PathTable), outputDirectoryProducer.GetDescription(Context), currentAsPath.ToString(Context.PathTable), outputFileProducer.GetDescription(Context)); currentError = true; } } } } foreach (var sealedDirectoryAndProducer in SealDirectoryTable.GetSealedDirectories(currentAsPath)) { if (sealedDirectoryAndProducer.Key != directory) { LogInvalidGraphSinceOutputDirectoryContainsOrCoincidesSealedDirectory( sealedDirectoryAndProducer, directory, outputDirectoryProducerProvenance, outputDirectoryProducer); currentError = true; } } if (!currentError) { foreach (var child in Context.PathTable.EnumerateImmediateChildren(current)) { if (visited.TryAdd(child, true)) { queue.Enqueue(child); } } } if (currentError) { ++errorCount; } } } return errorCount == 0; } private bool EnsureSourceSealDirectoryHasNoOutputs( ObjectPool<Queue<HierarchicalNameId>> queuePool, ConcurrentDictionary<HierarchicalNameId, bool> visited, SealDirectory sealDirectory) { Contract.Requires(queuePool != null); Contract.Requires(visited != null); Contract.Requires(sealDirectory != null); Contract.Requires(sealDirectory.IsSealSourceDirectory); int errorCount = 0; var directory = sealDirectory.Directory; var sealDirectoryProvenance = sealDirectory.Provenance ?? GetDummyProvenance(); using (var wrappedQueue = queuePool.GetInstance()) { var queue = wrappedQueue.Instance; if (visited.TryAdd(directory.Path.Value, true)) { queue.Enqueue(directory.Path.Value); } while (queue.Count > 0) { var current = queue.Dequeue(); bool currentError = false; var currentAsPath = new AbsolutePath(current); NodeId producerChildNode; int latestRewriteCount; if (LatestWriteCountsByPath.TryGetValue(currentAsPath, out latestRewriteCount)) { var currentAsFile = new FileArtifact(currentAsPath, latestRewriteCount); if (currentAsFile.IsSourceFile) { if (currentAsFile.Path == directory.Path) { Logger.Log.ScheduleFailInvalidGraphSinceSourceSealedDirectoryCoincidesSourceFile( LoggingContext, sealDirectoryProvenance.Token.Path.ToString(Context.PathTable), sealDirectoryProvenance.Token.Line, sealDirectoryProvenance.Token.Position, directory.Path.ToString(Context.PathTable), currentAsPath.ToString(Context.PathTable)); currentError = true; } } else { LogScheduleFailInvalidGraphSinceSourceSealedDirectoryCoincidesOrContainsOutputFile( directory, sealDirectoryProvenance, currentAsFile); currentError = true; } } foreach (var sealedDirectoryAndProducer in SealDirectoryTable.GetSealedDirectories(currentAsPath)) { // Error because the output directory contains a sealed directory, that can potentially be another output directory. if (OutputDirectoryProducers.TryGetValue(sealedDirectoryAndProducer.Key, out producerChildNode)) { var sealedDirectoryProducer = PipTable.HydratePip( producerChildNode.ToPipId(), PipQueryContext.PipGraphPostValidation); if (sealedDirectoryAndProducer.Key.Path == directory.Path) { Logger.Log.ScheduleFailInvalidGraphSinceOutputDirectoryCoincidesSealedDirectory( LoggingContext, sealedDirectoryProducer.Provenance.Token.Path.ToString(Context.PathTable), sealedDirectoryProducer.Provenance.Token.Line, sealedDirectoryProducer.Provenance.Token.Position, sealedDirectoryAndProducer.Key.Path.ToString(Context.PathTable), sealedDirectoryProducer.GetDescription(Context), directory.Path.ToString(Context.PathTable), sealDirectory.GetDescription(Context)); } else { Logger.Log.ScheduleFailInvalidGraphSinceSourceSealedDirectoryContainsOutputDirectory( LoggingContext, sealDirectoryProvenance.Token.Path.ToString(Context.PathTable), sealDirectoryProvenance.Token.Line, sealDirectoryProvenance.Token.Position, directory.Path.ToString(Context.PathTable), currentAsPath.ToString(Context.PathTable), sealedDirectoryProducer.GetDescription(Context)); } currentError = true; } } if (!currentError) { foreach (var child in Context.PathTable.EnumerateImmediateChildren(current)) { if (visited.TryAdd(child, true)) { queue.Enqueue(child); } } } if (currentError) { ++errorCount; } } } return errorCount == 0; } private NodeId LogScheduleFailInvalidGraphSinceSourceSealedDirectoryCoincidesOrContainsOutputFile( DirectoryArtifact directory, PipProvenance sealDirectoryProvenance, FileArtifact outputFile) { NodeId producerChildNode; bool getProducer = PipProducers.TryGetValue(outputFile, out producerChildNode); Contract.Assert(getProducer); var outputFileProducer = PipTable.HydratePip( producerChildNode.ToPipId(), PipQueryContext.PipGraphPostValidation); if (outputFile.Path == directory.Path) { Logger.Log.ScheduleFailInvalidGraphSinceSourceSealedDirectoryCoincidesOutputFile( LoggingContext, sealDirectoryProvenance.Token.Path.ToString(Context.PathTable), sealDirectoryProvenance.Token.Line, sealDirectoryProvenance.Token.Position, directory.Path.ToString(Context.PathTable), outputFile.Path.ToString(Context.PathTable), outputFileProducer.GetDescription(Context)); } else { Logger.Log.ScheduleFailInvalidGraphSinceSourceSealedDirectoryContainsOutputFile( LoggingContext, sealDirectoryProvenance.Token.Path.ToString(Context.PathTable), sealDirectoryProvenance.Token.Line, sealDirectoryProvenance.Token.Position, directory.Path.ToString(Context.PathTable), outputFile.Path.ToString(Context.PathTable), outputFileProducer.GetDescription(Context)); } return producerChildNode; } private bool EnsureFullSealDirectoriesCoverAllPathsUnderneath( ObjectPool<Queue<HierarchicalNameId>> queuePool, ConcurrentDictionary<HierarchicalNameId, bool> visited, SealDirectory sealDirectory) { Contract.Requires(queuePool != null); Contract.Requires(visited != null); Contract.Requires(sealDirectory != null); Contract.Requires(sealDirectory.Kind == SealDirectoryKind.Full); int errorCount = 0; var directory = sealDirectory.Directory; var sealDirectoryProvenance = sealDirectory.Provenance ?? GetDummyProvenance(); using (var wrappedQueue = queuePool.GetInstance()) using (var wrappedSet = Pools.GetFileArtifactSet()) { var queue = wrappedQueue.Instance; if (visited.TryAdd(directory.Path.Value, true)) { queue.Enqueue(directory.Path.Value); } HashSet<FileArtifact> fullSealContents = wrappedSet.Instance; foreach (var item in sealDirectory.Contents) { fullSealContents.Add(item); } while (queue.Count > 0) { var current = queue.Dequeue(); foreach (var child in Context.PathTable.EnumerateImmediateChildren(current)) { bool childError = false; var childAsPath = new AbsolutePath(child); int latestRewriteCount; if (!LatestWriteCountsByPath.TryGetValue(childAsPath, out latestRewriteCount)) { // Since paths that get validated are created by enumerating the path table, intermediate directory paths // will be included in addition to file paths. Those intermediate directories won't have any producing // pip and should therefore be omitted from the validation, but we need to keep following down to their children. // // For example: // SealedDirectory: c:\foo // Files: c:\foo\1.txt, c:\foo\bar\2.txt // c:\foo\bar will be a path that gets checked since it exists in the PathTable. But that's a directory // so it shouldn't flag a warning about not having a fully specified sealed directory. if (visited.TryAdd(child, true)) { queue.Enqueue(child); } continue; } var childAsLatestFileVersion = new FileArtifact(childAsPath, latestRewriteCount); if (!fullSealContents.Contains(childAsLatestFileVersion)) { NodeId pipReferencingUnsealedFile; if (!PipProducers.TryGetValue(childAsLatestFileVersion, out pipReferencingUnsealedFile)) { Contract.Assume(false, "Should have found a producer for the referenced path."); } if (pipReferencingUnsealedFile.IsValid // Ignore this for Source files, they should be okay. && PipTable.GetPipType(pipReferencingUnsealedFile.ToPipId()) != PipType.HashSourceFile) { var pip = PipTable.HydratePip( pipReferencingUnsealedFile.ToPipId(), PipQueryContext.PipGraphPostValidation); Logger.Log.InvalidGraphSinceFullySealedDirectoryIncomplete( LoggingContext, sealDirectoryProvenance.Token.Path.ToString(Context.PathTable), sealDirectoryProvenance.Token.Line, sealDirectoryProvenance.Token.Position, directory.Path.ToString(Context.PathTable), pip.GetDescription(Context), childAsPath.ToString(Context.PathTable)); childError = true; } } if (!childError && visited.TryAdd(child, true)) { queue.Enqueue(child); } if (childError) { ++errorCount; } } } } return errorCount == 0; } private bool IsValidIpc(IpcPip ipcPip, LockManager.PathAccessGroupLock pathAccessLock) { var semanticPathExpander = SemanticPathExpander.GetModuleExpander(ipcPip.Provenance.ModuleId); if (ipcPip.FileDependencies.Any(f => !IsValidInputFileArtifact(pathAccessLock, f, ipcPip, semanticPathExpander))) { return false; } if (ipcPip.DirectoryDependencies.Any(d => !IsValidInputDirectoryArtifact(pathAccessLock, d, ipcPip))) { return false; } if (!CheckServicePipDependencies(ipcPip.ServicePipDependencies)) { Logger.Log.ScheduleFailAddPipDueToInvalidServicePipDependency( LoggingContext, file: string.Empty, line: 0, column: 0, pipSemiStableHash: ipcPip.MessageBody.GetHashCode(), pipDescription: ipcPip.GetDescription(Context), pipValueId: ipcPip.PipId.ToString()); return false; } return true; } /// <summary> /// Validates process pip. /// </summary> private bool IsValidProcess( Process process, LockManager.PathAccessGroupLock pathAccessLock, out Dictionary<AbsolutePath, FileArtifact> dependenciesByPath, out Dictionary<AbsolutePath, FileArtifact> outputsByPath) { Contract.Requires(process != null, "Argument process cannot be null"); var semanticPathExpander = SemanticPathExpander.GetModuleExpander(process.Provenance.ModuleId); dependenciesByPath = new Dictionary<AbsolutePath, FileArtifact>(process.Dependencies.Length); outputsByPath = new Dictionary<AbsolutePath, FileArtifact>(process.FileOutputs.Length); var outputDirectorySet = new HashSet<AbsolutePath>(); // Process dependencies. foreach (FileArtifact dependency in process.Dependencies) { if (!dependenciesByPath.TryGetValue(dependency.Path, out FileArtifact existingDependencyOnPath)) { if (!IsValidInputFileArtifact(pathAccessLock, dependency, process, semanticPathExpander)) { return false; } dependenciesByPath.Add(dependency.Path, dependency); } else { Contract.Assume(existingDependencyOnPath != dependency, "Should not contain duplicates"); LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidInputDueToMultipleConflictingRewriteCounts, process, dependency); return false; } } if (process.DirectoryDependencies.Any(d => !IsValidInputDirectoryArtifact(pathAccessLock, d, process))) { return false; } Contract.Assert(dependenciesByPath.ContainsKey(process.Executable.Path), "Dependency set must contain the executable."); Contract.Assert( !process.StandardInput.IsFile || dependenciesByPath.ContainsKey(process.StandardInput.File.Path), "Dependency set must contain the standard input."); foreach (PipId pipId in process.OrderDependencies) { if (!PipTable.IsValid(pipId)) { Contract.Assume(false, "Invalid pip id"); return false; } } // Process outputs // Every pip must have at least one output artifact if (process.FileOutputs.Length == 0 && process.DirectoryOutputs.Length == 0) { LogEventWithPipProvenance(Logger.ScheduleFailAddProcessPipProcessDueToNoOutputArtifacts, process); return false; } // Temp file outputs are included in FileOutputs (whereas temp directories are excluded from DirectoryOutputs), // so its possible that the length of FileOutputs only reflects temp files. // If there are no DirectoryOutputs, make sure there is at least one FileOutput that is NOT temporary. bool hasOneRequiredOutput = process.DirectoryOutputs.Length > 0; foreach (FileArtifactWithAttributes outputWithAttributes in process.FileOutputs) { FileArtifact output = outputWithAttributes.ToFileArtifact(); if (!outputsByPath.TryGetValue(output.Path, out FileArtifact existingOutputToPath)) { if (!dependenciesByPath.TryGetValue(output.Path, out FileArtifact correspondingInput)) { correspondingInput = FileArtifact.Invalid; } if (!IsValidOutputFileArtifact(pathAccessLock, output, correspondingInput, process, semanticPathExpander)) { return false; } outputsByPath.Add(output.Path, output); } else { Contract.Assume(existingOutputToPath != output, "Should not contain duplicates"); LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputDueToMultipleConflictingRewriteCounts, process, output); return false; } hasOneRequiredOutput |= !outputWithAttributes.IsTemporaryOutputFile; } if (!hasOneRequiredOutput) { LogEventWithPipProvenance(Logger.ScheduleFailAddProcessPipProcessDueToNoOutputArtifacts, process); return false; } foreach (var directory in process.DirectoryOutputs) { if (outputsByPath.ContainsKey(directory.Path)) { LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidOutputSinceOutputIsBothSpecifiedAsFileAndDirectory, process, directory.Path); return false; } if (!IsValidOutputDirectory(directory, process, semanticPathExpander)) { return false; } outputDirectorySet.Add(directory.Path); } // TODO: no explicit inputs are allowed in OD dependencies. // Validate temp directory environment variables if (process.EnvironmentVariables.IsValid) { foreach (EnvironmentVariable environmentVariable in process.EnvironmentVariables) { if (m_tempEnvironmentVariables.Contains(environmentVariable.Name)) { if (!ValidateTempDirectory(process, environmentVariable, semanticPathExpander)) { return false; } } } } if (!CheckServicePipDependencies(process.ServicePipDependencies)) { LogEventWithPipProvenance(Logger.ScheduleFailAddPipDueToInvalidServicePipDependency, process); return false; } if (process.PreserveOutputWhitelist.IsValid && process.PreserveOutputWhitelist.Length > 0) { if (!process.AllowPreserveOutputs) { LogEventWithPipProvenance(Logger.ScheduleFailAddPipDueToInvalidAllowPreserveOutputsFlag, process); return false; } foreach (var whitelistPath in process.PreserveOutputWhitelist) { if (!outputsByPath.ContainsKey(whitelistPath) && !outputDirectorySet.Contains(whitelistPath)) { LogEventWithPipProvenance(Logger.ScheduleFailAddPipDueToInvalidPreserveOutputWhitelist, process); return false; } } } Contract.Assert( !process.StandardOutput.IsValid || outputsByPath.ContainsKey(process.StandardOutput.Path), "Output set must contain the standard output file, if specified."); Contract.Assert( !process.StandardError.IsValid || outputsByPath.ContainsKey(process.StandardError.Path), "Output set must contain the standard error file, if specified."); return true; } private bool CheckServicePipDependencies(ReadOnlyArray<PipId> servicePipDependencies) { Contract.Requires(servicePipDependencies.IsValid); // check service pip dependencies are service pips (and have already been added) foreach (PipId servicePipId in servicePipDependencies) { if (!m_servicePipToServiceInfoMap.ContainsKey(servicePipId)) { return false; } } return true; } /// <summary> /// Checks if copy-file pip is valid. /// </summary> private bool IsValidCopyFile(CopyFile copyFile, LockManager.PathAccessGroupLock pathAccessLock) { Contract.Requires(copyFile != null, "Argument copyFile cannot be null"); var semanticPathExpander = SemanticPathExpander.GetModuleExpander(copyFile.Provenance.ModuleId); if (copyFile.Source.Path == copyFile.Destination.Path) { LogEventWithPipProvenance(Logger.ScheduleFailAddCopyFilePipDueToSameSourceAndDestinationPath, copyFile, copyFile.Destination); return false; } if (!IsValidInputFileArtifact(pathAccessLock, copyFile.Source, copyFile, semanticPathExpander)) { return false; } if (!IsValidOutputFileArtifact(pathAccessLock, copyFile.Destination, FileArtifact.Invalid, copyFile, semanticPathExpander)) { return false; } return true; } /// <summary> /// Checks if write-file pip is valid. /// </summary> private bool IsValidWriteFile(WriteFile writeFile, LockManager.PathAccessGroupLock pathAccessLock) { Contract.Requires(writeFile != null, "Argument writeFile cannot be null"); var semanticPathExpander = SemanticPathExpander.GetModuleExpander(writeFile.Provenance.ModuleId); if (!IsValidOutputFileArtifact(pathAccessLock, writeFile.Destination, FileArtifact.Invalid, writeFile, semanticPathExpander)) { return false; } // It doesn't make much sense to allow WriteFile to rewrite things. WriteFile pips have no dependencies, so there's no // way to constrain their scheduling (a rewriting WriteFile would run as soon as the previous version was written, // so why write the previous version at all?). Note that this is not true for CopyFile, since it has an input edge. if (writeFile.Destination.RewriteCount != 1) { LogEventWithPipProvenance(Logger.ScheduleFailAddWriteFilePipSinceOutputIsRewritten, writeFile, writeFile.Destination); return false; } return true; } /// <summary> /// Checks if a given file artifact is a valid source file artifact. /// </summary> /// <param name="pathAccessLock">The access lock acquired by the enclosing operation for read access to the file</param> /// <param name="input">Artifact that has been specified as an input of the pip</param> /// <param name="pip">The pip which has specified the given output</param> /// <param name="semanticPathExpander">The semantic path expander for the pip</param> /// <remarks> /// The path read lock must be held when calling this method. /// </remarks> private bool IsValidInputFileArtifact(LockManager.PathAccessGroupLock pathAccessLock, FileArtifact input, Pip pip, SemanticPathExpander semanticPathExpander) { Contract.Requires(pathAccessLock.HasReadAccess(input.Path)); Contract.Requires(input.IsValid, "Argument input must be a valid file artifact"); Contract.Requires(pip != null, "Argument pip cannot be null"); SemanticPathInfo semanticPathInfo = semanticPathExpander.GetSemanticPathInfo(input); if (semanticPathInfo.IsValid && !semanticPathInfo.IsReadable) { LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidInputUnderNonReadableRoot, pip, input, semanticPathInfo.Root); return false; } /*/* TODO:410334 - Current builds have lots of source files being incorrectly registered under output mounts. if (semanticPathInfo.IsValid && semanticPathInfo.IsScrubbable && input.IsSourceFile) { LogEventWithPipProvenance(Events.Log.ScheduleFailAddPipInvalidSourceInputUnderScrubbableRoot, pip, input, semanticPathInfo.Root); return false; }*/ FileArtifact latestExistingArtifact = TryGetLatestFileArtifactForPath(input.Path); bool hasBeenUsed = latestExistingArtifact.IsValid; if (hasBeenUsed) { if (input.IsSourceFile && !latestExistingArtifact.IsSourceFile) { PipId latestProducerId = PipProducers[latestExistingArtifact].ToPipId(); Pip latestProducer = PipTable.HydratePip(latestProducerId, PipQueryContext.PipGraphIsValidInputFileArtifact1); LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidInputSincePathIsWrittenAndThusNotSource, pip, latestProducer, input); return false; } if (IsTemporaryOutput(latestExistingArtifact)) { // Output artifact should not be temporary output of the pip PipId latestProducerId = PipProducers[latestExistingArtifact].ToPipId(); Pip latestProducer = PipTable.HydratePip(latestProducerId, PipQueryContext.PipGraphIsValidInputFileArtifact2); LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidInputSinceCorespondingOutputIsTemporary, pip, latestProducer, input); return false; } if (latestExistingArtifact != input) { PipId latestProducerId = PipProducers[latestExistingArtifact].ToPipId(); Pip latestProducer = PipTable.HydratePip(latestProducerId, PipQueryContext.PipGraphIsValidInputFileArtifact3); LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidInputSinceInputIsRewritten, pip, latestProducer, input); return false; } } else { if (input.IsOutputFile) { LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidInputSinceInputIsOutputWithNoProducer, pip, input); return false; } } return true; } /// <summary> /// Checks if input directory artifact is valid. /// </summary> /// <param name="pathAccessLock">The access lock acquired by the enclosing operation for read access to the file</param> /// <param name="inputDirectory">Artifact that has been specified as an input of the pip</param> /// <param name="pip">The pip which has specified the given output</param> /// <returns></returns> private bool IsValidInputDirectoryArtifact(LockManager.PathAccessGroupLock pathAccessLock, DirectoryArtifact inputDirectory, Pip pip) { Contract.Requires(pathAccessLock.HasReadAccess(inputDirectory.Path)); if (!SealDirectoryTable.TryGetSealForDirectoryArtifact(inputDirectory, out _)) { LogEventWithPipProvenance( Logger.SourceDirectoryUsedAsDependency, pip, inputDirectory.Path); return false; } return true; } /// <summary> /// Returns true if specified <paramref name="fileArtifact" /> was used as a temporary output artifact by /// one of the pips. /// </summary> private bool IsTemporaryOutput(FileArtifact fileArtifact) { return fileArtifact.IsValid && m_temporaryOutputFiles.Contains(fileArtifact); } /// <summary> /// Verifies that the given path is under a writable root /// </summary> private static bool IsWritablePath(AbsolutePath path, SemanticPathExpander semanticPathExpander, out SemanticPathInfo semanticPathInfo) { semanticPathInfo = semanticPathExpander.GetSemanticPathInfo(path); return !semanticPathInfo.IsValid || semanticPathInfo.IsWritable; } /// <summary> /// Validates that a temp directory environment variable value is a valid path and is under a writable root /// </summary> private bool ValidateTempDirectory(Pip pip, in EnvironmentVariable tempEnvironmentVariable, SemanticPathExpander semanticPathExpander) { AbsolutePath path; string pathString = tempEnvironmentVariable.Value.ToString(Context.PathTable); if (!AbsolutePath.TryCreate(Context.PathTable, pathString, out path)) { LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidTempDirectoryInvalidPath, pip, pathString, tempEnvironmentVariable.Name); return false; } SemanticPathInfo semanticPathInfo; if (!IsWritablePath(path, semanticPathExpander, out semanticPathInfo)) { LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidTempDirectoryUnderNonWritableRoot, pip, path, semanticPathInfo.Root, tempEnvironmentVariable.Name); return false; } return true; } /// <summary> /// Checks if a given file artifact is a valid output file artifact. /// </summary> /// <param name="pathAccessLock">the access lock acquired by the enclosing operation for write access to the file</param> /// <param name="output">Artifact that has been specified as an output of the pip</param> /// <param name="correspondingInput">An artifact with the same path that is used as input to the pip (if present)</param> /// <param name="pip">The pip which has specified the given output</param> /// <param name="semanticPathExpander">the semantic path information for the pip</param> /// <remarks> /// The path write lock must be held when calling this method. /// </remarks> private bool IsValidOutputFileArtifact( LockManager.PathAccessGroupLock pathAccessLock, FileArtifact output, FileArtifact correspondingInput, Pip pip, SemanticPathExpander semanticPathExpander) { Contract.Requires(pathAccessLock.HasWriteAccess(output.Path)); Contract.Requires(output.IsValid, "Argument output must be a valid file artifact"); Contract.Requires(pip != null, "Argument pip cannot be null"); if (!output.IsOutputFile) { Contract.Assume(output.IsSourceFile); LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputSinceOutputIsSource, pip, output); return false; } SemanticPathInfo semanticPathInfo; if (!IsWritablePath(output.Path, semanticPathExpander, out semanticPathInfo)) { LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputUnderNonWritableRoot, pip, output, semanticPathInfo.Root); return false; } // We cannot schedule new writes to a directory which has been fully sealed (this is dual to requiring an exhaustive set // of contents when sealing the directory in the first place). // This doesn't mean that an opaque directory can't have an explicit output. Such an explicit output is allowed so long as // both the opaque directory and the explicit output is produced by the same pip. // Note that we don't allow an explicit output under any kind of source sealed directory. Producing such an output // can alter the membership of the directory. // Shared opaque directories are an exception: any pip can write declared outputs under any shared opaque directory. This // is mainly because shared opaque directories are not really 'sealed' beyond dynamically observed writes that constitutes // a version of the content of the directory attributed to a given pip if (IsPathInsideFullySealedDirectory(output, pip)) { return false; } FileArtifact latestExistingArtifact = TryGetLatestFileArtifactForPath(output.Path); bool hasBeenUsed = latestExistingArtifact.IsValid; if ((hasBeenUsed && latestExistingArtifact.IsSourceFile) || (correspondingInput.IsValid && correspondingInput.IsSourceFile)) { // Can't rewrite a source file. // TODO:[3089]: We should instead detect this error by enforcing that source files never occur under the output root, // and that outputs only occur under the output root. Though note that that would // break the QuickBuild + MSBuild + CoreXT case where most teams at MSFT // build outputs into $(OutDir) within the enlistment, at least for local // builds. For cloud builds the outdir is virtualized and breaking on // writes to the enlistment make sense. LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputSinceOutputIsSource, pip, output); return false; } if (hasBeenUsed && latestExistingArtifact.IsOutputFile) { // The output artifact refers to a path which has already been written (possibly with a different rewrite count). // The following cases are for validating rewrite chains, to the extent we support them. NodeId producingNodeId = PipProducers[latestExistingArtifact]; PipId producingPipId = producingNodeId.ToPipId(); if (correspondingInput.IsValid) { if (correspondingInput.RewriteCount + 1 != output.RewriteCount) { // We don't allow time-travel when rewriting (e.g. using version N as input to generate version N + 2). LogEventWithPipProvenance(Logger.ScheduleFailAddPipRewrittenOutputMismatchedWithInput, pip, output); return false; } } if (latestExistingArtifact.RewriteCount == 1 && output.RewriteCount == 1) { // Simple double write. Pip producingPip = PipTable.HydratePip(producingPipId, PipQueryContext.PipGraphIsValidOutputFileArtifactRewrite1); LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputDueToSimpleDoubleWrite, pip, producingPip, output); return false; } if (latestExistingArtifact.RewriteCount >= output.RewriteCount) { // Can only rewrite the latest version. Pip producingPip = PipTable.HydratePip(producingPipId, PipQueryContext.PipGraphIsValidOutputFileArtifactRewrite2); LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputSinceRewritingOldVersion, pip, producingPip, output); return false; } if ((output.RewriteCount - latestExistingArtifact.RewriteCount) > 1) { // We skipped a version, so the created pip invented a write count somehow. LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputSinceOutputHasUnexpectedlyHighWriteCount, pip, output); return false; } if (m_outputFileArtifactsUsedAsInputs.Contains(latestExistingArtifact)) { // Only the final output of a rewrite chain can be used as an input to arbitrary pips (can't swap in old versions). PipId sealingPipId = SealDirectoryTable.TryFindSealDirectoryPipContainingFileArtifact(PipTable, latestExistingArtifact); if (!sealingPipId.IsValid) { // TODO: Would be nice to indicate the related consumer here LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputSincePreviousVersionUsedAsInput, pip, output); } else { var sealingPip = (SealDirectory)PipTable.HydratePip(sealingPipId, PipQueryContext.PipGraphIsValidOutputFileArtifactSealing2); LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidOutputSinceFileHasBeenPartiallySealed, pip, sealingPip, output, sealingPip.Directory); } return false; } Pip latestProducingPip = PipTable.HydratePip(producingPipId, PipQueryContext.PipGraphIsValidOutputFileArtifactRewrite3); Process latestProducingProcess = latestProducingPip as Process; Process currentProducingProcess = pip as Process; if ((latestProducingProcess != null && latestProducingProcess.AllowPreserveOutputs) || (currentProducingProcess != null && currentProducingProcess.AllowPreserveOutputs)) { // Log for rewriting preserved output. LogEventWithPipProvenance(Logger.ScheduleAddPipInvalidOutputDueToRewritingPreservedOutput, pip, latestProducingPip, output); } } else { // Here, we've established that this artifact has not been used before at all, so we need only perform validation for single-writes. if (output.RewriteCount > 1) { // This path has not been seen before, yet we see a write-count greater than one. Versions are missing. LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputSinceOutputHasUnexpectedlyHighWriteCount, pip, output); return false; } } Contract.Assert(latestExistingArtifact.RewriteCount + 1 == output.RewriteCount); return true; } /// <summary> /// Checks that no reads/writes are performed inside a fully sealed directory. /// </summary> /// <remarks> /// Shared opaque directories are not considered fully sealed directory /// </remarks> /// <param name="path">File/Directory artifact to check.</param> /// <param name="pip">Producing pip (used for error reporting only).</param> private bool IsPathInsideFullySealedDirectory(AbsolutePath path, Pip pip) { DirectoryArtifact fullSealArtifact = SealDirectoryTable.TryFindFullySealedDirectoryArtifactForFile(path); if (fullSealArtifact.IsValid && !fullSealArtifact.IsSharedOpaque) { SealDirectoryTable.TryGetSealForDirectoryArtifact(fullSealArtifact, out PipId sealingPipId); Contract.Assert(sealingPipId.IsValid); Pip sealingPip = PipTable.HydratePip(sealingPipId, PipQueryContext.PipGraphIsValidOutputFileArtifactSealing1); LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidOutputSinceDirectoryHasBeenSealed, pip, sealingPip, FileArtifact.CreateSourceFile(path), fullSealArtifact.Path); return true; } return false; } /// <summary> /// Checks to see if this is a valid output directory. /// </summary> /// <remarks> /// - An output directory must have not been produced by another pip. /// - An output directory must be under a writable mount /// </remarks> private bool IsValidOutputDirectory(DirectoryArtifact directory, Pip pip, SemanticPathExpander semanticPathExpander) { NodeId producingPipNode; // An output directory must have not been produced by another pip. if (OutputDirectoryProducers.TryGetValue(directory, out producingPipNode)) { Pip producingPip = PipTable.HydratePip(producingPipNode.ToPipId(), PipQueryContext.PipGraphIsValidOutputDirectory1); LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidOutputSinceDirectoryHasBeenProducedByAnotherPip, pip, producingPip, directory.Path); return false; } // An output directory must be under a writable mount if (!IsWritablePath(directory.Path, semanticPathExpander, out var semanticPathInfo)) { LogEventWithPipProvenance(Logger.ScheduleFailAddPipInvalidOutputUnderNonWritableRoot, pip, directory.Path, semanticPathInfo.Root); return false; } return true; } /// <summary> /// Checks to see if this is a valid dynamic seal directory. /// </summary> /// <remarks> /// A directory sealed dynamically must not exist within another fully sealed or output directory. They may not overlap with another dynamically sealed directory. /// </remarks> private bool IsValidSealedOutputDirectory(DirectoryArtifact directory, Pip pip) { DirectoryArtifact fullSealArtifact = SealDirectoryTable.TryFindFullySealedContainingDirectoryArtifact(directory); if (fullSealArtifact.IsValid) { SealDirectoryTable.TryGetSealForDirectoryArtifact(fullSealArtifact, out PipId sealingPipId); Contract.Assert(sealingPipId.IsValid); Pip sealingPip = PipTable.HydratePip(sealingPipId, PipQueryContext.PipGraphIsValidOutputFileArtifactSealing1); LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidOutputSinceDirectoryHasBeenSealed, pip, sealingPip, FileArtifact.CreateSourceFile(directory.Path), fullSealArtifact.Path); return false; } return true; } #endregion Validation #region Pip Addition private NodeId CreateNodeForPip(Pip pip) { Contract.Requires(!IsImmutable); NodeId node = MutableDataflowGraph.CreateNode(); PipId pipId = PipTable.Add(node.Value, pip); Contract.Assert(pipId.ToNodeId() == node); return node; } /// <summary> /// Adds a copy-file pip into this schedule. /// </summary> public bool AddCopyFile(CopyFile copyFile, PipId valuePipId = default) { Contract.Requires(copyFile != null, "Argument copyFile cannot be null"); Contract.Assert(!IsImmutable); using (LockManager.PathAccessGroupLock pathAccessLock = LockManager.AcquirePathAccessLock(copyFile)) { if (PipExists(copyFile)) { return true; } if (!IsValidCopyFile(copyFile, pathAccessLock)) { return false; } // Possibly create a source pip (note that all nodes are created in a topological order). if (copyFile.Source.IsSourceFile) { EnsurePipExistsForSourceArtifact(pathAccessLock, copyFile.Source); } NodeId copyFileNode = CreateNodeForPip(copyFile); AddOutput(pathAccessLock, copyFileNode, copyFile.Destination.WithAttributes(FileExistence.Required)); using (var edgeScope = MutableDataflowGraph.AcquireExclusiveIncomingEdgeScope(copyFileNode)) { AddInput(pathAccessLock, copyFileNode, copyFile.Source, edgeScope: edgeScope); } // Link to value pip if (valuePipId.IsValid) { AddPipProducerConsumerDependency(copyFileNode, valuePipId.ToNodeId(), ignoreTopologicalCheck: true); } else { AddValueDependency(copyFileNode, copyFile.Provenance); } } ComputeAndStorePipStaticFingerprint(copyFile); return true; } /// <summary> /// Adds a write-file pip into this schedule. /// </summary> public bool AddWriteFile(WriteFile writeFile, PipId valuePipId = default) { Contract.Requires(writeFile != null, "Argument writeFile cannot be null"); Contract.Assert(!IsImmutable); using (LockManager.PathAccessGroupLock pathAccessLock = LockManager.AcquirePathAccessLock(writeFile)) { if (PipExists(writeFile)) { return true; } if (!IsValidWriteFile(writeFile, pathAccessLock)) { return false; } NodeId writeFileNode = CreateNodeForPip(writeFile); AddOutput(pathAccessLock, writeFileNode, writeFile.Destination.WithAttributes(FileExistence.Required)); // Link to value pip if (valuePipId.IsValid) { AddPipProducerConsumerDependency(writeFileNode, valuePipId.ToNodeId(), ignoreTopologicalCheck: true); } else { AddValueDependency(writeFileNode, writeFile.Provenance); } } ComputeAndStorePipStaticFingerprint(writeFile); return true; } /// <summary> /// Add a process pip into this schedule. /// </summary> public bool AddProcess(Process process, PipId valuePipId = default) { Contract.Requires(process != null, "Argument process cannot be null"); Contract.Assert(!IsImmutable); using (LockManager.PathAccessGroupLock pathAccessLock = LockManager.AcquirePathAccessLock(process)) { if (PipExists(process)) { return true; } Dictionary<AbsolutePath, FileArtifact> dependenciesByPath; Dictionary<AbsolutePath, FileArtifact> outputsByPath; if (!IsValidProcess(process, pathAccessLock, out dependenciesByPath, out outputsByPath)) { return false; } // CreateSourceFile all needed source pips (note that all nodes are created in a topological order). foreach (FileArtifact dependency in process.Dependencies) { if (dependency.IsSourceFile) { EnsurePipExistsForSourceArtifact(pathAccessLock, dependency); } } // Sets the PipId, references to process.PipId should not be made before here NodeId processNode = CreateNodeForPip(process); var edgeScope = process.IsStartOrShutdownKind || process.ServiceInfo?.Kind == ServicePipKind.ServiceFinalization ? null : MutableDataflowGraph.AcquireExclusiveIncomingEdgeScope(processNode); using (edgeScope) { // Process dependencies. foreach (FileArtifact dependency in process.Dependencies) { if (dependency.IsSourceFile) { m_sourceFiles.TryAdd(dependency.Path, process.PipId); } AddInput(pathAccessLock, processNode, dependency, edgeScope: edgeScope); } // Process order dependencies. foreach (PipId orderDependency in process.OrderDependencies) { AddPipToPipDependency(processNode, orderDependency, edgeScope); } // Process service dependencies. foreach (PipId serviceDependency in process.ServicePipDependencies) { ProcessServicePipDependency(process.PipId, processNode, serviceDependency, edgeScope); } foreach (DirectoryArtifact directoryDependency in process.DirectoryDependencies) { AddDirectoryInput(pathAccessLock, processNode, directoryDependency, edgeScope); } // Process outputs. foreach (FileArtifactWithAttributes output in process.FileOutputs) { AddOutput(pathAccessLock, processNode, output, edgeScope); FileArtifact rewrittenInput; if (dependenciesByPath.TryGetValue(output.Path, out rewrittenInput)) { RewritingPips.Add(process.PipId); NodeId rewrittenProducer; if (PipProducers.TryGetValue(rewrittenInput, out rewrittenProducer)) { RewrittenPips.Add(rewrittenProducer.ToPipId()); } } } // Process temp directories. if (process.TempDirectory.IsValid) { TemporaryPaths.TryAdd(process.TempDirectory, process.PipId); } foreach (var tempDirectory in process.AdditionalTempDirectories) { TemporaryPaths.TryAdd(tempDirectory, process.PipId); } } foreach (var directory in process.DirectoryOutputs) { OutputDirectoryProducers.Add(directory, processNode); OutputDirectoryRoots.AddOrUpdate( key: directory.Path, data: directory.IsSharedOpaque, addValueFactory: (p, isShared) => isShared, updateValueFactory: (p, isShared, oldValue) => oldValue || isShared); } // Link to value pip if (valuePipId.IsValid) { AddPipProducerConsumerDependency(processNode, valuePipId.ToNodeId(), ignoreTopologicalCheck: true); } else { AddValueDependency(processNode, process.Provenance); } // If this is a service pip, remember its ServiceInfo if (process.IsService) { m_servicePipToServiceInfoMap[process.PipId] = process.ServiceInfo; } // Collect all untracked paths and scopes foreach (var untrackedPath in process.UntrackedPaths) { m_untrackedPathsAndScopes[untrackedPath] = process.PipId; } foreach (var untrackedScope in process.UntrackedScopes) { m_untrackedPathsAndScopes[untrackedScope] = process.PipId; } } ComputeAndStorePipStaticFingerprint(process); // Seal output directories unless we are patching graph (in which case // the patching procedure will add those SealDirectory pips) if (!SealDirectoryTable.IsPatching) { for (int i = 0; i < process.DirectoryOutputs.Length; i++) { var directory = process.DirectoryOutputs[i]; if (!SealDirectoryOutput(process, directory, directoryOutputsIndex: i)) { return false; } } // Re-validate to ensure that sealed directory outputs are not within each other. foreach (var directory in process.DirectoryOutputs) { if (!IsValidSealedOutputDirectory(directory, process)) { return false; } } // Shared opaque validations if (process.DirectoryOutputs.Length != 0) { var sharedOpaqueDirectories = process.DirectoryOutputs .Where(directoryArtifact => directoryArtifact.IsSharedOpaque) .Select(directoryArtifact => directoryArtifact.Path) .ToReadOnlySet(); // Validate that shared directories coming from the same pip are not pairwise nested foreach (var sharedOpaqueDirectory in sharedOpaqueDirectories) { if (!IsValidSharedOpaqueDirectory(sharedOpaqueDirectory, process, sharedOpaqueDirectories)) { return false; } } if (sharedOpaqueDirectories.Count > 0 && m_configuration.Sandbox.UnsafeSandboxConfiguration.PreserveOutputs != PreserveOutputsMode.Disabled) { LogEventWithPipProvenance( Logger.PreserveOutputsDoNotApplyToSharedOpaques, process); } } } return true; } /// <summary> /// Validates that there are no pair of shared opaque directories declared in the same pip that are nested within in each other. /// </summary> private bool IsValidSharedOpaqueDirectory(AbsolutePath sharedOpaqueDirectory, Process processPip, IReadOnlySet<AbsolutePath> sharedOpaqueDirectories) { // We start the search from the parent of the shared opaque directory, if that exists var parentSharedOpaqueDirectory = sharedOpaqueDirectory.GetParent(Context.PathTable); if (!parentSharedOpaqueDirectory.IsValid) { return true; } foreach (var current in Context.PathTable.EnumerateHierarchyBottomUp(parentSharedOpaqueDirectory.Value)) { var parentAsPath = new AbsolutePath(current); if (sharedOpaqueDirectories.Contains(parentAsPath)) { LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidSharedOpaqueDirectoryDueToOverlap, processPip, sharedOpaqueDirectory, parentAsPath); return false; } } return true; } public bool AddIpcPip(IpcPip ipcPip, PipId valuePipId = default) { Contract.Requires(ipcPip != null, "Argument pip cannot be null"); using (LockManager.PathAccessGroupLock pathAccessLock = LockManager.AcquirePathAccessLock(ipcPip)) { if (PipExists(ipcPip)) { return true; } if (!IsValidIpc(ipcPip, pathAccessLock)) { return false; } // ensure HashSourceFile pips exists for source dependencies foreach (FileArtifact dependency in ipcPip.FileDependencies) { if (dependency.IsSourceFile) { EnsurePipExistsForSourceArtifact(pathAccessLock, dependency); } } NodeId node = CreateNodeForPip(ipcPip); var edgeScope = ipcPip.IsServiceFinalization ? null : MutableDataflowGraph.AcquireExclusiveIncomingEdgeScope(node); using (edgeScope) { // process file dependencies foreach (FileArtifact dependency in ipcPip.FileDependencies) { AddInput(pathAccessLock, node, dependency, edgeScope: edgeScope); } // process service dependencies. foreach (PipId serviceDependency in ipcPip.ServicePipDependencies) { ProcessServicePipDependency(ipcPip.PipId, node, serviceDependency, edgeScope: edgeScope); } foreach (DirectoryArtifact directoryDependency in ipcPip.DirectoryDependencies) { AddDirectoryInput(pathAccessLock, node, directoryDependency, edgeScope); } // process output file AddOutput(pathAccessLock, node, FileArtifactWithAttributes.FromFileArtifact(ipcPip.OutputFile, FileExistence.Required), edgeScope: edgeScope); } // Link to value pip if (valuePipId.IsValid) { AddPipProducerConsumerDependency(node, valuePipId.ToNodeId(), ignoreTopologicalCheck: true); } else { AddValueDependency(node, ipcPip.Provenance); } } return true; } /// <summary> /// Records the edge between the pip and the value associated with the provenance /// </summary> private void AddValueDependency(NodeId node, PipProvenance provenance) { Contract.Requires(!IsImmutable); Contract.Requires(node.IsValid); Contract.Requires(provenance != null); var tupleKey = (provenance.OutputValueSymbol, provenance.QualifierId, provenance.Token.Path); NodeId valueNode; bool nodeFound = Values.TryGetValue(tupleKey, out valueNode); // TODO: Reconcile this. I cannot enable this contract assumption. // Contract.Assume(nodeFound, "Must have had the value node already registered for this pip. This should have been done when the PipConstructionHelper was created."); if (nodeFound) { AddPipProducerConsumerDependency(node, valueNode, ignoreTopologicalCheck: true); } } /// <inheritdoc /> public bool AddOutputValue(ValuePip value) { Contract.Requires(value != null, "Argument outputValue cannot be null"); Contract.Assert(!IsImmutable); using (LockManager.AcquireGlobalSharedLockIfApplicable()) { if (PipExists(value)) { Contract.Assume( false, "Output values should only be evaluated once per qualifier and be added " + "before any value to value dependencies. Therefore adding a ValuePip for an output should never collide"); } NodeId valueNode = Values.GetOrAdd(value.Key, (value, this), (key, data) => CreateValuePip(data)).Item.Value; // Find parent specfile node NodeId specFileNode; bool specFileNodeFound = SpecFiles.TryGetValue(value.SpecFile, out specFileNode); if (!specFileNodeFound) { string valueFullName = value.Symbol.ToString(Context.SymbolTable); string owningSpecFile = value.SpecFile.Path.ToString(Context.PathTable); Contract.Assert( false, I($"Missing owning specfile node '{owningSpecFile}' for this value '{valueFullName}'. Did you call AddSpecFile properly?")); } AddPipProducerConsumerDependency(valueNode, specFileNode, ignoreTopologicalCheck: true); } return true; } /// <inheritdoc /> public bool AddValueValueDependency(in ValuePip.ValueDependency valueDependency) { Contract.Requires(valueDependency.ParentIdentifier.IsValid); Contract.Requires(valueDependency.ChildIdentifier.IsValid); Contract.Assert(!IsImmutable); using (LockManager.AcquireGlobalSharedLockIfApplicable()) { var parentTuple = ( valueDependency.ParentIdentifier, valueDependency.ParentQualifier, valueDependency.ParentLocation.Path); var childTuple = ( valueDependency.ChildIdentifier, valueDependency.ChildQualifier, valueDependency.ChildLocation.Path); NodeId parentNode = Values.GetOrAdd( parentTuple, (valueDependency.ParentLocation, this), (key, data) => CreateValuePip(key, data)).Item.Value; NodeId childNode = Values.GetOrAdd( childTuple, (valueDependency.ChildLocation, this), (key, data) => CreateValuePip(key, data)).Item.Value; AddPipProducerConsumerDependency(childNode, parentNode, ignoreTopologicalCheck: true); } return true; } private static NodeId CreateValuePip((ValuePip valuePip, Builder builder) data) { return data.builder.CreateNodeForPip(data.valuePip); } private static NodeId CreateValuePip((FullSymbol symbol, QualifierId qualifierId, AbsolutePath path) key, in (LocationData location, Builder builder) data) { return CreateValuePip((new ValuePip(key.symbol, key.qualifierId, data.location), data.builder)); } /// <inheritdoc /> public bool AddSpecFile(SpecFilePip specFile) { Contract.Requires(specFile != null, "Argument specFile cannot be null"); Contract.Assert(!IsImmutable); using (LockManager.AcquireGlobalSharedLockIfApplicable()) { if (PipExists(specFile)) { return true; } if (SpecFiles.ContainsKey(specFile.SpecFile)) { // Caller is responsible for handling and reporting this failure. return false; } NodeId specFileNode = CreateNodeForPip(specFile); SpecFiles.Add(specFile.SpecFile, specFileNode); // Find the parent module NodeId owningModuleNode; bool parentModuleFound = Modules.TryGetValue(specFile.OwningModule, out owningModuleNode); if (!parentModuleFound) { var specFilePath = specFile.SpecFile.Path.ToString(Context.PathTable); Contract.Assert( false, I($"Missing owning module for this specfile '{specFilePath}'. Did you call AddModule properly?")); } AddPipProducerConsumerDependency(specFileNode, owningModuleNode, ignoreTopologicalCheck: true); } return true; } /// <inheritdoc /> public bool AddModule(ModulePip module) { Contract.Requires(module != null, "Argument module cannot be null"); Contract.Assert(!IsImmutable); using (LockManager.AcquireGlobalSharedLockIfApplicable()) { if (PipExists(module)) { return true; } if (Modules.ContainsKey(module.Module)) { // Caller is responsible for handling and reporting this failure. return false; } NodeId moduleNode = CreateNodeForPip(module); Modules.Add(module.Module, moduleNode); } return true; } /// <inheritdoc /> public bool AddModuleModuleDependency(ModuleId moduleId, ModuleId dependency) { Contract.Assert(!IsImmutable); using (LockManager.AcquireGlobalSharedLockIfApplicable()) { NodeId moduleNode; if (!Modules.TryGetValue(moduleId, out moduleNode)) { // Caller is responsible for handling and reporting this failure. return false; } NodeId dependencyNode; if (!Modules.TryGetValue(dependency, out dependencyNode)) { // Caller is responsible for handling and reporting this failure. return false; } AddPipProducerConsumerDependency(dependencyNode, moduleNode, ignoreTopologicalCheck: true); } return true; } /// <inheritdoc/> public DirectoryArtifact ReserveSharedOpaqueDirectory(AbsolutePath directoryArtifactRoot) { return SealDirectoryTable.CreateSharedOpaqueDirectoryWithNewSealId(directoryArtifactRoot); } /// <inheritdoc/> public bool TryGetSealDirectoryKind(DirectoryArtifact directoryArtifact, out SealDirectoryKind kind) { Contract.Requires(directoryArtifact.IsValid); if (!SealDirectoryTable.TryGetSealForDirectoryArtifact(directoryArtifact, out var pipId)) { kind = default(SealDirectoryKind); return false; } kind = PipTable.GetSealDirectoryKind(pipId); return true; } /// <inheritdoc /> public DirectoryArtifact AddSealDirectory(SealDirectory sealDirectory, PipId valuePipId = default) { Contract.Requires(sealDirectory != null); Contract.Assert(!IsImmutable); var semanticPathExpander = SemanticPathExpander.GetModuleExpander(sealDirectory.Provenance.ModuleId); AbsolutePath root = sealDirectory.DirectoryRoot; if (sealDirectory.Kind.IsSourceSeal()) { var semanticPathInfo = semanticPathExpander.GetSemanticPathInfo(root); if (!semanticPathInfo.IsValid) { LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidSealDirectorySourceNotUnderMount, sealDirectory, root); return DirectoryArtifact.Invalid; } if (!semanticPathInfo.IsReadable) { LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidSealDirectorySourceNotUnderReadableMount, sealDirectory, root, semanticPathInfo.Root, semanticPathInfo.RootName.StringId); return DirectoryArtifact.Invalid; } } foreach (FileArtifact artifact in sealDirectory.Contents) { if (!artifact.IsValid || !artifact.Path.IsWithin(Context.PathTable, root)) { LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidSealDirectoryContentSinceNotUnderRoot, sealDirectory, artifact, root); return DirectoryArtifact.Invalid; } } DirectoryArtifact artifactForNewSeal; using (LockManager.PathAccessGroupLock pathAccessLock = LockManager.AcquirePathAccessLock(sealDirectory)) { Contract.Assume(!PipExists(sealDirectory), "Attempted to schedule a pip twice"); Contract.Assume( (sealDirectory.Kind == SealDirectoryKind.SharedOpaque && sealDirectory.IsInitialized) || (sealDirectory.Kind != SealDirectoryKind.SharedOpaque && SealDirectoryTable.IsPatching == sealDirectory.IsInitialized), "A shared opaque directory is always initialized. Otherwise, if patching -> sealDirectory must already be initialized; otherwise, it must not be initialized"); foreach (FileArtifact artifact in sealDirectory.Contents) { // Note that IsValidInputFileArtifact logs its own errors. if (!IsValidInputFileArtifact(pathAccessLock, artifact, sealDirectory, semanticPathExpander)) { return DirectoryArtifact.Invalid; } } // We're now committed to sealing the directory. // CreateSourceFile all needed source pips (note that all nodes are created in a topological order). foreach (FileArtifact artifact in sealDirectory.Contents) { if (artifact.IsSourceFile) { // Lazy source hashing: We often defer hashing sealed source files until the first access. See TryQuerySealedPathContentHash. // Note that an eagerly-running pip may already exist for this artifact, in which case it remains eager. EnsurePipExistsForSourceArtifact(pathAccessLock, artifact); } } // The directory being sealed possibly already has one or more seals. We create a view collection if not. // We're registering a new view in the collection; this generates a new ID. // The DirectoryArtifact ends up unique since it combines the directory and a unique-within-directory seal id. // Note that we do not collapse identical views into one artifact / ID. // For the shared dynamic case, the seal directory has // already been initialized if (sealDirectory.Kind == SealDirectoryKind.SharedOpaque) { Contract.Assume(sealDirectory.Directory.IsSharedOpaque); artifactForNewSeal = sealDirectory.Directory; } else { // For the regular dynamic case, the directory artifact is always // created with sealId 0. For other cases, we reserve it artifactForNewSeal = sealDirectory.Kind == SealDirectoryKind.Opaque ? OutputDirectory.Create(sealDirectory.DirectoryRoot) : SealDirectoryTable.ReserveDirectoryArtifact(sealDirectory); sealDirectory.SetDirectoryArtifact(artifactForNewSeal); } Contract.Assume( sealDirectory.IsInitialized, "Pip must be fully initialized (by assigning a seal ID and artifact) before creating a node for it (and adding it to the pip table)"); // CreateSourceFile a node for the pip and add it to the pip table. This assigns a PipId. NodeId sealDirectoryNode = CreateNodeForPip(sealDirectory); Contract.Assume(sealDirectory.PipId.IsValid); // Now that we have assigned a PipId and a DirectoryArtifact, sealDirectory is complete and immutable. // We can now establish the directory artifact -> pip ID mapping. SealDirectoryTable.AddSeal(sealDirectory); // For the case of composite directories, there is no process pip that produces them, so // we keep the equivalent of OutputDirectoryProducers in CompositeOutputDirectoryProducers. // So we update it here, once the pip id has been assigned if (sealDirectory.IsComposite) { CompositeOutputDirectoryProducers.Add(sealDirectory.Directory, sealDirectoryNode); } using (var edgeScope = MutableDataflowGraph.AcquireExclusiveIncomingEdgeScope(sealDirectoryNode)) { if (!sealDirectory.Kind.IsDynamicKind()) { foreach (FileArtifact artifact in sealDirectory.Contents) { // Lazy source hashing: Maybe we created a lazy source pip. This edge should not make it eager. See above. AddInput( pathAccessLock, sealDirectoryNode, artifact, edgeScope: edgeScope); } } else { // If the seal directory is a composite one, then there is no process producing it if (!sealDirectory.IsComposite) { NodeId producerNode; if (!OutputDirectoryProducers.TryGetValue(artifactForNewSeal, out producerNode)) { Contract.Assert(false, I($"Producer of output directory '{artifactForNewSeal.Path.ToString(Context.PathTable)}' must have been added")); } if (!MutableDataflowGraph.ContainsNode(producerNode)) { Contract.Assert( false, I($"Producer of output directory '{artifactForNewSeal.Path.ToString(Context.PathTable)}' must have been added to the mutable data flow graph")); } AddPipProducerConsumerDependency(producerNode, sealDirectoryNode, ignoreTopologicalCheck: false, edgeScope: edgeScope); } else { // If the seal directory is composed of other seal directories, we add a producer-consumer edge for each of them foreach (var directoryElement in sealDirectory.ComposedDirectories) { // The directory to compose should be a shared opaque. This is the only // kind of composite directory we support for now if (!directoryElement.IsSharedOpaque) { LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidComposedSealDirectoryIsNotSharedOpaque, sealDirectory, root, directoryElement.Path); return DirectoryArtifact.Invalid; } // The directory to compose should be within the proposed root if (!directoryElement.Path.IsWithin(Context.PathTable, artifactForNewSeal.Path)) { LogEventWithPipProvenance( Logger.ScheduleFailAddPipInvalidComposedSealDirectoryNotUnderRoot, sealDirectory, root, directoryElement.Path); return DirectoryArtifact.Invalid; } // First check if the element is a regular shared opaque, i.e. it is part of the directory outputs // populated by process pips if (!OutputDirectoryProducers.TryGetValue(directoryElement, out NodeId directoryElementProducer)) { // Otherwise, the element has to be a composite shared opaque if (!CompositeOutputDirectoryProducers.TryGetValue(directoryElement, out directoryElementProducer)) { Contract.Assert(false, I($"Producer of output directory '{directoryElement.Path.ToString(Context.PathTable)}' must have been added")); } } AddPipProducerConsumerDependency(directoryElementProducer, sealDirectoryNode, ignoreTopologicalCheck: false, edgeScope: edgeScope); } } } } // Link to value pip if (valuePipId.IsValid) { AddPipProducerConsumerDependency(sealDirectoryNode, valuePipId.ToNodeId(), ignoreTopologicalCheck: true); } else { AddValueDependency(sealDirectoryNode, sealDirectory.Provenance); } // Update the source sealed directory root map // If a directory artifact for the corresponding root was already added, then we // don't try to store it again since we don't care about keeping all of them, just // a directory artifact is enough for user-facing reporting purposes if (sealDirectory.Kind.IsSourceSeal()) { SourceSealedDirectoryRoots.TryAdd(root, artifactForNewSeal); } } ComputeAndStorePipStaticFingerprint(sealDirectory); return artifactForNewSeal; } /// <summary> /// Ensures that a <see cref="HashSourceFile" /> pip exists for the given source artifact. /// </summary> /// <remarks> /// Source file artifacts are 'produced' (actually, hashed) by internal HashSourceFile pips. /// We do not know about source files until they are first found as an input, so we JIT up a corresponding pip /// if needed (or re-use the existing one). /// </remarks> private void EnsurePipExistsForSourceArtifact(LockManager.PathAccessGroupLock pathAccessLock, FileArtifact artifact) { Contract.Requires(!IsImmutable); Contract.Requires(pathAccessLock.HasReadAccess(artifact.Path)); Contract.Requires(artifact.IsSourceFile); if (PipProducers.ContainsKey(artifact)) { return; } using (pathAccessLock.AcquirePathInnerExclusiveLock(artifact.Path)) { if (PipProducers.ContainsKey(artifact)) { return; } // Note that this will fail if this path has already been used as an input (but that should have failed validation already). SetLatestFileArtifactForPath(artifact, expectFirstVersion: true); NodeId node; if (ScheduleConfiguration.SkipHashSourceFile) { node = m_dummyHashSourceFileNode; } else { var sourceFileArtifactPip = new HashSourceFile(artifact); node = CreateNodeForPip(sourceFileArtifactPip); } PipProducers.Add(artifact, node); } } private void AddPipToPipDependency(NodeId pipNodeAfter, PipId pipIdBefore, MutableDirectedGraph.EdgeScope edgeScope) { Contract.Requires(!IsImmutable); Contract.Requires(MutableDataflowGraph.ContainsNode(pipNodeAfter)); Contract.Requires(PipTable.IsValid(pipIdBefore)); // TODO: This doesn't change any state for rewrite validation. Oops. Pip pipBefore = PipTable.HydratePip(pipIdBefore, PipQueryContext.PipGraphAddPipToPipDependency); AddPipProducerConsumerDependency(pipBefore.PipId.ToNodeId(), pipNodeAfter, edgeScope: edgeScope); } private void ProcessServicePipDependency(PipId serviceClientPipId, NodeId serviceClientNode, PipId servicePipId, MutableDirectedGraph.EdgeScope edgeScope) { Contract.Requires(!IsImmutable); Contract.Requires(PipTable.IsValid(servicePipId)); // remember service client var getOrAddResult = m_servicePipClients.GetOrAdd(servicePipId, new ConcurrentBigSet<PipId>()); getOrAddResult.Item.Value.Add(serviceClientPipId); AddPipProducerConsumerDependency(servicePipId.ToNodeId(), serviceClientNode, edgeScope: edgeScope); // add edges to finalization pips foreach (var finalizationPipId in m_servicePipToServiceInfoMap[servicePipId].FinalizationPipIds) { AddPipProducerConsumerDependency(serviceClientNode, finalizationPipId.ToNodeId(), ignoreTopologicalCheck: true); } } /// <summary> /// Adds producer-consumer dependency between pips. /// </summary> /// <remarks> /// This method conceptually adds a directed edge from the consumer to producer. /// </remarks> private void AddPipProducerConsumerDependency( NodeId producerNode, NodeId consumerNode, bool isLightEdge = false, bool ignoreTopologicalCheck = false, MutableDirectedGraph.EdgeScope edgeScope = null) { Contract.Requires(!IsImmutable); Contract.Requires(MutableDataflowGraph != null, "There should be a reference to the MutableDirectedGraph in order to add a dependency."); Contract.Requires( MutableDataflowGraph.ContainsNode(producerNode), "Argument producerNode must exist in the supporting dependency graph"); Contract.Requires( MutableDataflowGraph.ContainsNode(consumerNode), "Argument consumerNode must exist in the supporting dependency graph"); Contract.Assume(ignoreTopologicalCheck || consumerNode.Value >= producerNode.Value, "Node IDs must form a topological order for some graph traversals."); if (edgeScope != null) { edgeScope.AddEdge(producerNode, isLight: isLightEdge); } else { MutableDataflowGraph.AddEdge(producerNode, consumerNode, isLight: isLightEdge); } } /// <summary> /// Adds an input to a pip node. This registers a dependency edge if necessary and updates related bookkeeping. /// </summary> /// <param name="pathAccessLock">the access lock acquired by the enclosing operation for read access to the file</param> /// <param name="consumerNode">The node consuming <paramref name="inputArtifact" /></param> /// <param name="inputArtifact">The input dependency</param> /// <param name="edgeScope">Optional. The edge scope for adding a dependency edge to the producer.</param> /// <remarks> /// The path read lock must be held when calling this method. It is assumed (but not verified) /// that the input relation is valid; see <see cref="IsValidInputFileArtifact" />. /// </remarks> private void AddInput( LockManager.PathAccessGroupLock pathAccessLock, NodeId consumerNode, FileArtifact inputArtifact, MutableDirectedGraph.EdgeScope edgeScope = null) { Contract.Requires(!IsImmutable); Contract.Requires(pathAccessLock.HasReadAccess(inputArtifact.Path)); Contract.Requires(inputArtifact.IsValid); Contract.Requires(MutableDataflowGraph.ContainsNode(consumerNode), "Argument consumerNode must exist in the supporting dependency graph"); // In these branches we ensure that producerNode is set to the pip node that produces inputArtifact. NodeId producerNode; if (inputArtifact.IsSourceFile) { if (ScheduleConfiguration.SkipHashSourceFile) { return; } bool sourceProducerFound = PipProducers.TryGetValue(inputArtifact, out producerNode); Contract.Assume(sourceProducerFound, "Missing HashSourceFile producer; forgot to call EnsurePipExistsForSourceArtifact?"); PipId sourceProducerId = producerNode.ToPipId(); Contract.Assume(PipTable.GetPipType(sourceProducerId) == PipType.HashSourceFile); } else if (inputArtifact.IsOutputFile) { m_outputFileArtifactsUsedAsInputs.Add(inputArtifact); producerNode = PipProducers[inputArtifact]; } else { throw Contract.AssertFailure("Unexpected artifact type"); } Contract.Assert(producerNode != NodeId.Invalid); Contract.Assume(MutableDataflowGraph.ContainsNode(producerNode)); AddPipProducerConsumerDependency(producerNode, consumerNode, isLightEdge: inputArtifact.IsSourceFile, edgeScope: edgeScope); } /// <summary> /// Adds a directory input to a pip node. This registers a dependency edge if necessary and updates related bookkeeping. /// </summary> /// <param name="pathAccessLock">the access lock acquired by the enclosing operation for read access to the directory</param> /// <param name="consumerNode">The node consuming <paramref name="directory" /></param> /// <param name="directory">The input dependency</param> /// <param name="edgeScope">Optional. The edge scope for adding a dependency edge to the producer.</param> /// <remarks> /// The path read lock must be held when calling this method. /// </remarks> private void AddDirectoryInput( LockManager.PathAccessGroupLock pathAccessLock, NodeId consumerNode, DirectoryArtifact directory, MutableDirectedGraph.EdgeScope edgeScope) { Contract.Requires(!IsImmutable); Contract.Requires(pathAccessLock.HasReadAccess(directory.Path)); // Each sealed directory gets its own node. That means we don't care here what the contents are (and maybe we don't know them yet). SealDirectoryTable.TryGetSealForDirectoryArtifact(directory, out PipId producerId); Contract.Assert(producerId.IsValid); NodeId producerNode = producerId.ToNodeId(); Contract.Assume(MutableDataflowGraph.ContainsNode(producerNode)); AddPipProducerConsumerDependency(producerNode, consumerNode, edgeScope: edgeScope); } /// <summary> /// Seals output directory. /// </summary> private bool SealDirectoryOutput(Process producer, DirectoryArtifact directory, int directoryOutputsIndex) { Contract.Requires(!IsImmutable); Contract.Requires(OutputDirectory.IsOutputDirectory(directory)); var dynamicKind = directory.IsSharedOpaque ? SealDirectoryKind.SharedOpaque : SealDirectoryKind.Opaque; var provenance = producer.Provenance?.CloneWithSaltedSemiStableHash(HashCodeHelper.Combine( directoryOutputsIndex, (int)PipType.SealDirectory, (int)dynamicKind)); var sealedDirectory = new SealDirectory(directory.Path, s_emptySealContents, dynamicKind, provenance, producer.Tags, patterns: ReadOnlyArray<StringId>.Empty); // For the case of shared dynamic directory, the directory artifact already // has the proper seal id, so the seal directory can be initialized here if (dynamicKind == SealDirectoryKind.SharedOpaque) { sealedDirectory.SetDirectoryArtifact(directory); } var directoryArtifact = AddSealDirectory(sealedDirectory); return directoryArtifact.IsValid; } /// <summary> /// Adds an output to a pip node. This registers a dependency edge if necessary (from prior versions of a rewritten output) /// and updates related bookkeeping. Note that all outputs of a pip must be added before its inputs. /// </summary> /// <param name="pathAccessLock">the access lock acquired by the enclosing operation for write access to the file</param> /// <param name="producerNode">The node consuming <paramref name="outputArtifact" /></param> /// <param name="outputArtifact">The output produced by the node</param> /// <param name="edgeScope">Optional. The edge scope for adding a dependency edge to the producer of the rewritten file</param> /// <remarks> /// The path write lock must be held when calling this method. It is assumed (but not verified) /// that the input relation is valid; see <see cref="IsValidOutputFileArtifact" />. /// </remarks> private void AddOutput( LockManager.PathAccessGroupLock pathAccessLock, NodeId producerNode, FileArtifactWithAttributes outputArtifact, MutableDirectedGraph.EdgeScope edgeScope = null) { Contract.Requires(!IsImmutable); Contract.Requires(pathAccessLock.HasWriteAccess(outputArtifact.Path)); Contract.Requires(outputArtifact.IsValid); Contract.Requires( MutableDataflowGraph.ContainsNode(producerNode), "Argument producerNode must exist in the supporting dependency graph"); Contract.Assume(!outputArtifact.IsSourceFile); FileArtifact existingLatestVersion = TryGetLatestFileArtifactForPath(outputArtifact.Path); bool hasExistingVersion = existingLatestVersion.IsValid; Contract.Assume( !hasExistingVersion || existingLatestVersion.RewriteCount == outputArtifact.RewriteCount - 1, "Output artifact should have failed validation (incorrect rewrite count)."); // This node is rewriting an existing output. We must ensure that this node is not scheduled until the prior version is written. // To do so, we ensure that an edge exists from the last version's producer to this new producer (it may already exist due to an input dependency // or another output dependency). Note that adding this edge cannot possibly introduce a cycle, since this node is presumably in the process of being // added (thus no other nodes can yet depend on its outputs; in other words, nodes are added in a topological order). if (outputArtifact.RewriteCount > 1) { Contract.Assume(hasExistingVersion); Contract.Assume( PipProducers[existingLatestVersion] != producerNode, "Shouldn't already have registered an output dependency from this node to this path"); // TODO: I disabled the following assertion because // TODO: (1) this check should be part of validation, and // TODO: (2) to allow rewriting source, I need to add input before output (and this sounds more natural). // Outputs must be added before inputs only to support the following assertion. // Contract.Assume( // !m_outputFileArtifactsUsedAsInputs.Contains(existingLatestVersion), // "Previous version is already an input (rewrite chains can't fork); should have failed output validation"); AddInput(pathAccessLock, producerNode, existingLatestVersion, edgeScope: edgeScope); } FileArtifact simpleOutputArtifact = outputArtifact.ToFileArtifact(); SetLatestFileArtifactForPath(simpleOutputArtifact); PipProducers.Add(simpleOutputArtifact, producerNode); if (outputArtifact.IsTemporaryOutputFile) { // Storing temporary files separately to be able to validate inputs properly m_temporaryOutputFiles.Add(outputArtifact.ToFileArtifact()); TemporaryPaths.Add(outputArtifact.Path, producerNode.ToPipId()); } } /// <summary> /// Sets the latest file artifact (version) for a path. /// </summary> /// <remarks> /// The graph lock need not be held when calling this method. /// </remarks> private void SetLatestFileArtifactForPath(FileArtifact artifact, bool expectFirstVersion = false) { Contract.Requires(artifact.IsValid); Contract.Requires(artifact.RewriteCount >= 0); AbsolutePath path = artifact.Path; int rewriteCount = artifact.RewriteCount; if (expectFirstVersion) { LatestWriteCountsByPath.Add(path, rewriteCount); } else { LatestWriteCountsByPath[path] = rewriteCount; } } private void ComputeAndStorePipStaticFingerprint(Pip pip) { Contract.Requires(pip != null); if (!ShouldComputePipStaticFingerprints) { return; } string fingerprintText = null; ContentFingerprint fingerprint = m_pipStaticFingerprinter.FingerprintTextEnabled ? m_pipStaticFingerprinter.ComputeWeakFingerprint(pip, out fingerprintText) : m_pipStaticFingerprinter.ComputeWeakFingerprint(pip); m_pipStaticFingerprints.AddFingerprint(pip, fingerprint); if (fingerprintText != null) { Logger.Log.PipStaticFingerprint(LoggingContext, pip.GetDescription(Context), fingerprint.ToString(), fingerprintText); } } private ContentFingerprint GetSealDirectoryFingerprint(DirectoryArtifact directory) { Contract.Requires(directory.IsValid); return SealDirectoryTable.TryGetSealForDirectoryArtifact(directory, out PipId pipId) && PipTable.HydratePip(pipId, PipQueryContext.GetSealDirectoryFingerprint) is SealDirectory sealDirectory && m_pipStaticFingerprints.TryGetFingerprint(sealDirectory, out ContentFingerprint fingerprint) ? fingerprint : ContentFingerprint.Zero; } private ContentFingerprint GetDirectoryProducerFingerprint(DirectoryArtifact directory) { Contract.Requires(directory.IsValid); return OutputDirectoryProducers.TryGetValue(directory, out NodeId nodeId) && m_pipStaticFingerprints.TryGetFingerprint(nodeId.ToPipId(), out ContentFingerprint fingerprint) ? fingerprint : ContentFingerprint.Zero; } #endregion Pip Addition #region Event Logging private delegate void PipProvenanceEvent( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId); private delegate void PipProvenanceEventWithFilePath( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId, string filePath); // Handy for errors related to sealed directories, since there is a directory root associated with the file. private delegate void PipProvenanceEventWithFilePathAndDirectoryPath( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId, string filePath, string directoryPath); private delegate void PipProvenanceEventWithDirectoryPath( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId, string directoryPath); private delegate void PipProvenanceEventWithTwoDirectoryPaths( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId, string directoryPath, string anotherDirectoryPath); private delegate void PipProvenanceEventWithDirectoryPathAndName( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId, string directoryPath, string name); private delegate void PipProvenanceEventWithDirectoryPathAndRootPathAndName( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId, string directoryPath, string rootPath, string name); private delegate void PipProvenanceEventWithDirectoryPathAndRootPath( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId, string directoryPath, string rootPath); private delegate void PipProvenanceEventWithFilePathAndRelatedPip( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId, string outputFile, long producingPipSemiStableHash, string producingPipDesc, string producingPipValueId); // Handy for errors related to sealed directories, since there is a directory root associated with the file. private delegate void PipProvenanceEventWithFilePathAndDirectoryPathAndRelatedPip( LoggingContext loggingContext, string file, int line, int column, long pipSemiStableHash, string pipDesc, string pipValueId, string outputFile, string directoryPath, long producingPipSemiStableHash, string producingPipDesc, string producingPipValueId); private PipProvenance m_dummyProvenance; private PipProvenance GetDummyProvenance() { Contract.Ensures(Contract.Result<PipProvenance>() != null); return m_dummyProvenance = m_dummyProvenance ?? PipProvenance.CreateDummy(Context); } private void LogEventWithPipProvenance(PipProvenanceEvent pipEvent, Pip pip) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable)); } private void LogEventWithPipProvenance(PipProvenanceEventWithFilePath pipEvent, Pip pip, FileArtifact relatedArtifact) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); Contract.Requires(relatedArtifact.IsValid); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable), relatedArtifact.Path.ToString(Context.PathTable)); } private void LogEventWithPipProvenance( PipProvenanceEventWithFilePathAndDirectoryPath pipEvent, Pip pip, FileArtifact relatedArtifact, AbsolutePath directoryPath) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); Contract.Requires(relatedArtifact.IsValid); Contract.Requires(directoryPath.IsValid); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable), relatedArtifact.Path.ToString(Context.PathTable), directoryPath.ToString(Context.PathTable)); } private void LogEventWithPipProvenance( PipProvenanceEventWithDirectoryPath pipEvent, Pip pip, AbsolutePath relatedPath) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); Contract.Requires(relatedPath.IsValid); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable), relatedPath.ToString(Context.PathTable)); } private void LogEventWithPipProvenance( PipProvenanceEventWithTwoDirectoryPaths pipEvent, Pip pip, AbsolutePath relatedPath, AbsolutePath anotherRelatedPath) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); Contract.Requires(relatedPath.IsValid); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable), relatedPath.ToString(Context.PathTable), anotherRelatedPath.ToString(Context.PathTable)); } private void LogEventWithPipProvenance( PipProvenanceEventWithDirectoryPathAndName pipEvent, Pip pip, string directoryPath, StringId name) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); Contract.Requires(!string.IsNullOrEmpty(directoryPath)); Contract.Requires(name.IsValid); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable), directoryPath, Context.StringTable.GetString(name)); } private void LogEventWithPipProvenance( PipProvenanceEventWithDirectoryPathAndRootPathAndName pipEvent, Pip pip, AbsolutePath relatedPath, AbsolutePath rootPath, StringId name) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); Contract.Requires(rootPath.IsValid); Contract.Requires(relatedPath.IsValid); Contract.Requires(name.IsValid); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable), relatedPath.ToString(Context.PathTable), rootPath.ToString(Context.PathTable), Context.StringTable.GetString(name)); } private void LogEventWithPipProvenance( PipProvenanceEventWithFilePathAndRelatedPip pipEvent, Pip pip, Pip relatedPip, FileArtifact relatedArtifact) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); Contract.Requires(relatedPip != null); Contract.Requires(relatedArtifact.IsValid); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); PipProvenance provenanceForRelated = relatedPip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable), relatedArtifact.Path.ToString(Context.PathTable), provenanceForRelated.SemiStableHash, relatedPip.GetDescription(Context), relatedPip.Provenance.OutputValueSymbol.ToString(Context.SymbolTable)); } private void LogEventWithPipProvenance( PipProvenanceEventWithFilePathAndRelatedPip pipEvent, Pip pip, Pip relatedPip, AbsolutePath relatedPath) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); Contract.Requires(relatedPip != null); Contract.Requires(relatedPath.IsValid); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); PipProvenance provenanceForRelated = relatedPip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable), relatedPath.ToString(Context.PathTable), provenanceForRelated.SemiStableHash, relatedPip.GetDescription(Context), relatedPip.Provenance.OutputValueSymbol.ToString(Context.SymbolTable)); } private void LogEventWithPipProvenance( PipProvenanceEventWithFilePathAndDirectoryPathAndRelatedPip pipEvent, Pip pip, Pip relatedPip, FileArtifact relatedArtifact, AbsolutePath directoryPath) { Contract.Requires(pipEvent != null); Contract.Requires(pip != null); Contract.Requires(relatedPip != null); Contract.Requires(relatedArtifact.IsValid); Contract.Requires(directoryPath.IsValid); PipProvenance provenance = pip.Provenance ?? GetDummyProvenance(); PipProvenance provenanceForRelated = relatedPip.Provenance ?? GetDummyProvenance(); pipEvent( LoggingContext, provenance.Token.Path.ToString(Context.PathTable), provenance.Token.Line, provenance.Token.Position, provenance.SemiStableHash, pip.GetDescription(Context), provenance.OutputValueSymbol.ToString(Context.SymbolTable), relatedArtifact.Path.ToString(Context.PathTable), directoryPath.ToString(Context.PathTable), provenanceForRelated.SemiStableHash, relatedPip.GetDescription(Context), provenanceForRelated.OutputValueSymbol.ToString(Context.SymbolTable)); } #endregion Event Logging #region IPipScheduleTraversal Members public override IEnumerable<Pip> RetrievePipImmediateDependencies(Pip pip) { // TODO: This lock would no longer be required if pips must be added in dependency first order // since the set of dependencies would not change after adding a pip using (LockManager.AcquireLock(pip.PipId)) { return base.RetrievePipImmediateDependencies(pip); } } public override IEnumerable<Pip> RetrievePipImmediateDependents(Pip pip) { // TODO: This lock would no longer be required if pips must be added in dependency first order // since the set of dependencies would not change after adding a pip using (LockManager.AcquireLock(pip.PipId)) { return base.RetrievePipImmediateDependents(pip).ToArray(); } } /// <inheritdoc /> public GraphPatchingStatistics PartiallyReloadGraph(HashSet<AbsolutePath> affectedSpecs) { Contract.Requires(affectedSpecs != null); throw new InvalidOperationException("This graph builder does not support graph patching"); } /// <inheritdoc /> public void SetSpecsToIgnore(IEnumerable<AbsolutePath> specsToIgnore) { throw new InvalidOperationException("This graph builder does not support graph patching"); } #endregion } } }
50.431953
217
0.526816
[ "MIT" ]
jbayardo/BuildXL
Public/Src/Engine/Scheduler/Graph/PipGraph.Builder.cs
170,460
C#
// *** WARNING: this file was generated by the Pulumi SDK Generator. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Threading.Tasks; using Pulumi.Serialization; namespace Pulumi.AzureNextGen.Batch.V20200301 { public static class GetBatchAccount { public static Task<GetBatchAccountResult> InvokeAsync(GetBatchAccountArgs args, InvokeOptions? options = null) => Pulumi.Deployment.Instance.InvokeAsync<GetBatchAccountResult>("azure-nextgen:batch/v20200301:getBatchAccount", args ?? new GetBatchAccountArgs(), options.WithVersion()); } public sealed class GetBatchAccountArgs : Pulumi.InvokeArgs { /// <summary> /// The name of the Batch account. /// </summary> [Input("accountName", required: true)] public string AccountName { get; set; } = null!; /// <summary> /// The name of the resource group that contains the Batch account. /// </summary> [Input("resourceGroupName", required: true)] public string ResourceGroupName { get; set; } = null!; public GetBatchAccountArgs() { } } [OutputType] public sealed class GetBatchAccountResult { /// <summary> /// The account endpoint used to interact with the Batch service. /// </summary> public readonly string AccountEndpoint; public readonly int ActiveJobAndJobScheduleQuota; /// <summary> /// Contains information about the auto-storage account associated with a Batch account. /// </summary> public readonly Outputs.AutoStoragePropertiesResponse AutoStorage; /// <summary> /// For accounts with PoolAllocationMode set to UserSubscription, quota is managed on the subscription so this value is not returned. /// </summary> public readonly int DedicatedCoreQuota; /// <summary> /// A list of the dedicated core quota per Virtual Machine family for the Batch account. For accounts with PoolAllocationMode set to UserSubscription, quota is managed on the subscription so this value is not returned. /// </summary> public readonly ImmutableArray<Outputs.VirtualMachineFamilyCoreQuotaResponse> DedicatedCoreQuotaPerVMFamily; /// <summary> /// Batch is transitioning its core quota system for dedicated cores to be enforced per Virtual Machine family. During this transitional phase, the dedicated core quota per Virtual Machine family may not yet be enforced. If this flag is false, dedicated core quota is enforced via the old dedicatedCoreQuota property on the account and does not consider Virtual Machine family. If this flag is true, dedicated core quota is enforced via the dedicatedCoreQuotaPerVMFamily property on the account, and the old dedicatedCoreQuota does not apply. /// </summary> public readonly bool DedicatedCoreQuotaPerVMFamilyEnforced; public readonly Outputs.EncryptionPropertiesResponse Encryption; /// <summary> /// Identifies the Azure key vault associated with a Batch account. /// </summary> public readonly Outputs.KeyVaultReferenceResponse KeyVaultReference; /// <summary> /// The location of the resource. /// </summary> public readonly string Location; /// <summary> /// For accounts with PoolAllocationMode set to UserSubscription, quota is managed on the subscription so this value is not returned. /// </summary> public readonly int LowPriorityCoreQuota; /// <summary> /// The name of the resource. /// </summary> public readonly string Name; /// <summary> /// The allocation mode for creating pools in the Batch account. /// </summary> public readonly string PoolAllocationMode; public readonly int PoolQuota; /// <summary> /// List of private endpoint connections associated with the Batch account /// </summary> public readonly ImmutableArray<Outputs.PrivateEndpointConnectionResponse> PrivateEndpointConnections; /// <summary> /// The provisioned state of the resource /// </summary> public readonly string ProvisioningState; /// <summary> /// If not specified, the default value is 'enabled'. /// </summary> public readonly string PublicNetworkAccess; /// <summary> /// The tags of the resource. /// </summary> public readonly ImmutableDictionary<string, string> Tags; /// <summary> /// The type of the resource. /// </summary> public readonly string Type; [OutputConstructor] private GetBatchAccountResult( string accountEndpoint, int activeJobAndJobScheduleQuota, Outputs.AutoStoragePropertiesResponse autoStorage, int dedicatedCoreQuota, ImmutableArray<Outputs.VirtualMachineFamilyCoreQuotaResponse> dedicatedCoreQuotaPerVMFamily, bool dedicatedCoreQuotaPerVMFamilyEnforced, Outputs.EncryptionPropertiesResponse encryption, Outputs.KeyVaultReferenceResponse keyVaultReference, string location, int lowPriorityCoreQuota, string name, string poolAllocationMode, int poolQuota, ImmutableArray<Outputs.PrivateEndpointConnectionResponse> privateEndpointConnections, string provisioningState, string publicNetworkAccess, ImmutableDictionary<string, string> tags, string type) { AccountEndpoint = accountEndpoint; ActiveJobAndJobScheduleQuota = activeJobAndJobScheduleQuota; AutoStorage = autoStorage; DedicatedCoreQuota = dedicatedCoreQuota; DedicatedCoreQuotaPerVMFamily = dedicatedCoreQuotaPerVMFamily; DedicatedCoreQuotaPerVMFamilyEnforced = dedicatedCoreQuotaPerVMFamilyEnforced; Encryption = encryption; KeyVaultReference = keyVaultReference; Location = location; LowPriorityCoreQuota = lowPriorityCoreQuota; Name = name; PoolAllocationMode = poolAllocationMode; PoolQuota = poolQuota; PrivateEndpointConnections = privateEndpointConnections; ProvisioningState = provisioningState; PublicNetworkAccess = publicNetworkAccess; Tags = tags; Type = type; } } }
40.806061
550
0.66434
[ "Apache-2.0" ]
test-wiz-sec/pulumi-azure-nextgen
sdk/dotnet/Batch/V20200301/GetBatchAccount.cs
6,733
C#
#pragma checksum "D:\Windows Universal\Weather\Weather\HourelyPage.xaml" "{406ea660-64cf-4c82-b6f0-42d48172a799}" "8CA16204CCCD2C4ADA0C126B6DDD0829" //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace Weather { partial class HourelyPage : global::Windows.UI.Xaml.Controls.Page { [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage9; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock9; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock9; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock9; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage9; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock9; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock9; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage8; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock8; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock8; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock8; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage8; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock8; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock8; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage7; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock7; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock7; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock7; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage7; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock7; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock7; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage6; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock6; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock6; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock6; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage6; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock6; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock6; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage5; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock5; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock5; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock5; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage5; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock5; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock5; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage4; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock4; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock4; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock4; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage4; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock4; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock4; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage3; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock3; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock3; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock3; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage3; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock3; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock3; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage2; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock2; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock2; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock2; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage2; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock2; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock2; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage1; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock1; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock1; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock1; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage1; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock1; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock1; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image ResultImage0; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock DescriptionTextBlock0; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindSpeedTextBlock0; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock WindDirectionTextBlock0; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Image WindDirectionImage0; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TempTextBlock0; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.TextBlock TimeTextBlock0; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Button CurrentWeatherButton; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Button HourelyWeatherkButton; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Button SevenDaysWeatherButton; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private global::Windows.UI.Xaml.Controls.Button ClockButton; [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] private bool _contentLoaded; /// <summary> /// InitializeComponent() /// </summary> [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 14.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public void InitializeComponent() { if (_contentLoaded) return; _contentLoaded = true; global::System.Uri resourceLocator = new global::System.Uri("ms-appx:///HourelyPage.xaml"); global::Windows.UI.Xaml.Application.LoadComponent(this, resourceLocator, global::Windows.UI.Xaml.Controls.Primitives.ComponentResourceLocation.Application); } } }
85.042553
168
0.72717
[ "MIT" ]
saharamini/Weather-Clock-UWP
source_code/Weather/obj/ARM/Release/HourelyPage.g.i.cs
15,990
C#
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.Globalization; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using System.Xml.Linq; using Moq; using Newtonsoft.Json.Linq; using NuGet.Commands; using NuGet.Common; using NuGet.Configuration; using NuGet.Frameworks; using NuGet.PackageManagement; using NuGet.Packaging; using NuGet.Packaging.Core; using NuGet.ProjectManagement; using NuGet.ProjectManagement.Projects; using NuGet.Protocol; using NuGet.Protocol.Core.Types; using NuGet.Resolver; using NuGet.Test.Utility; using NuGet.Versioning; using NuGet.VisualStudio; using Test.Utility; using Xunit; using Xunit.Abstractions; namespace NuGet.Test { public class NuGetPackageManagerTests { // Following are the various sets of packages that are small in size. To be used by the functional tests private readonly List<PackageIdentity> _noDependencyLibPackages = new List<PackageIdentity> { new PackageIdentity("Microsoft.AspNet.Razor", new NuGetVersion("2.0.30506")), new PackageIdentity("Microsoft.AspNet.Razor", new NuGetVersion("3.0.0")), new PackageIdentity("Microsoft.AspNet.Razor", new NuGetVersion("3.2.0-rc")), new PackageIdentity("Antlr", new NuGetVersion("3.5.0.2")) }; private readonly List<PackageIdentity> _packageWithDependents = new List<PackageIdentity> { new PackageIdentity("jQuery", new NuGetVersion("1.4.4")), new PackageIdentity("jQuery", new NuGetVersion("1.6.4")), new PackageIdentity("jQuery.Validation", new NuGetVersion("1.13.1")), new PackageIdentity("jQuery.UI.Combined", new NuGetVersion("1.11.2")) }; private readonly List<PackageIdentity> _packageWithDeepDependency = new List<PackageIdentity> { new PackageIdentity("Microsoft.Data.Edm", new NuGetVersion("5.6.2")), new PackageIdentity("Microsoft.WindowsAzure.ConfigurationManager", new NuGetVersion("1.8.0.0")), new PackageIdentity("Newtonsoft.Json", new NuGetVersion("5.0.8")), new PackageIdentity("System.Spatial", new NuGetVersion("5.6.2")), new PackageIdentity("Microsoft.Data.OData", new NuGetVersion("5.6.2")), new PackageIdentity("Microsoft.Data.Services.Client", new NuGetVersion("5.6.2")), new PackageIdentity("WindowsAzure.Storage", new NuGetVersion("4.3.0")) }; private readonly List<PackageIdentity> _morePackageWithDependents = new List<PackageIdentity> { new PackageIdentity("Microsoft.Bcl.Build", new NuGetVersion("1.0.14")), new PackageIdentity("Microsoft.Bcl.Build", new NuGetVersion("1.0.21")), new PackageIdentity("Microsoft.Bcl", new NuGetVersion("1.1.9")), new PackageIdentity("Microsoft.Net.Http", new NuGetVersion("2.2.22")), new PackageIdentity("Microsoft.Net.Http", new NuGetVersion("2.2.28")) }; private readonly List<PackageIdentity> _latestAspNetPackages = new List<PackageIdentity> { new PackageIdentity("Microsoft.AspNet.Mvc", new NuGetVersion("6.0.0-beta3")), new PackageIdentity("Microsoft.AspNet.Mvc.Razor", new NuGetVersion("6.0.0-beta3")), new PackageIdentity("Microsoft.AspNet.Mvc.Core", new NuGetVersion("6.0.0-beta3")) }; private readonly XunitLogger _logger; public NuGetPackageManagerTests(ITestOutputHelper output) { _logger = new XunitLogger(output); } // Install and uninstall a package while calling get installed on another thread [Fact] public async Task TestPacManInstallAndRequestInstalledPackages() { using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<Configuration.PackageSource>() { new Configuration.PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageContext = new SimpleTestPackageContext("packageA"); packageContext.AddFile("lib/net45/a.dll"); SimpleTestPackageUtility.CreateOPCPackage(packageContext, packageSource); var run = true; var getInstalledTask = Task.Run(async () => { // Get the list of installed packages while (run) { var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); } }); // Act // Install and Uninstall 50 times while polling for installed packages for (var i = 0; i < 50; i++) { // Install await nuGetPackageManager.InstallPackageAsync(projectA, "packageA", resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Uninstall await nuGetPackageManager.UninstallPackageAsync( projectA, "packageA", new UninstallationContext(removeDependencies: false, forceRemove: true), testNuGetProjectContext, token); } // Check for exceptions thrown by the get installed task run = false; await getInstalledTask; var installed = (await projectA.GetInstalledPackagesAsync(token)).ToList(); // Assert // Verify no exceptions and that the final package was removed Assert.Equal(0, installed.Count); } } } [Fact] public async Task TestPacManInstallPackage() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var installationCompatibility = new Mock<IInstallationCompatibility>(); nuGetPackageManager.InstallationCompatibility = installationCompatibility.Object; var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _noDependencyLibPackages[0]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); // Ensure that installation compatibility was checked. installationCompatibility.Verify( x => x.EnsurePackageCompatibilityAsync( msBuildNuGetProject, packageIdentity, It.IsAny<DownloadResourceResult>(), It.IsAny<CancellationToken>()), Times.Once); installationCompatibility.Verify( x => x.EnsurePackageCompatibility( It.IsAny<NuGetProject>(), It.IsAny<INuGetPathContext>(), It.IsAny<IEnumerable<NuGetProjectAction>>(), It.IsAny<RestoreResult>()), Times.Never); } } [Fact] public async Task PackagesConfigNuGetProjectGetInstalledPackagesListInvalidXml() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _noDependencyLibPackages[0]; // Create pacakges.config that is an invalid xml using (var w = new StreamWriter(File.Create(packagesConfigPath))) { w.Write("abc"); } // Act and Assert var ex = await Assert.ThrowsAsync<InvalidOperationException>(async () => { await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token); }); Assert.True(ex.Message.StartsWith("An error occurred while reading file")); } } [Fact] public async Task TestPacManInstallPackageAlreadyInstalledException() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _noDependencyLibPackages[0]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); InvalidOperationException alreadyInstalledException = null; try { await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); } catch (InvalidOperationException ex) { alreadyInstalledException = ex; } Assert.NotNull(alreadyInstalledException); Assert.Equal(string.Format("Package '{0}' already exists in project '{1}'", packageIdentity, msBuildNuGetProjectSystem.ProjectName), alreadyInstalledException.Message); Assert.Equal(alreadyInstalledException.InnerException.GetType(), typeof(PackageAlreadyInstalledException)); } } [Fact] public async Task TestPacManInstallDifferentPackageAfterInstall() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var firstPackageIdentity = _noDependencyLibPackages[0]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, firstPackageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); var secondPackageIdentity = _noDependencyLibPackages[3]; await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, secondPackageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(firstPackageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal(secondPackageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); } } [Fact] public async Task TestPacManInstallSamePackageAfterInstall() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var firstPackageIdentity = _noDependencyLibPackages[0]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, firstPackageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); var secondPackageIdentity = _noDependencyLibPackages[1]; await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, secondPackageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(secondPackageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); } } [Fact] public async Task TestPacManInstallPackageWithDependents() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDependents[2]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal(_packageWithDependents[0], packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); } } [Fact] public async Task TestPacManPreviewInstallOrderOfDependencies() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _morePackageWithDependents[3]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act var packageActions = (await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token)).ToList(); // Assert Assert.Equal(3, packageActions.Count); Assert.True(_morePackageWithDependents[0].Equals(packageActions[0].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[0].NuGetProjectActionType); Assert.Equal(sourceRepositoryProvider.GetRepositories().Single().PackageSource.Source, packageActions[0].SourceRepository.PackageSource.Source); Assert.True(_morePackageWithDependents[2].Equals(packageActions[1].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[1].NuGetProjectActionType); Assert.Equal(sourceRepositoryProvider.GetRepositories().Single().PackageSource.Source, packageActions[0].SourceRepository.PackageSource.Source); Assert.True(_morePackageWithDependents[3].Equals(packageActions[2].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[2].NuGetProjectActionType); Assert.Equal(sourceRepositoryProvider.GetRepositories().Single().PackageSource.Source, packageActions[0].SourceRepository.PackageSource.Source); } } [Fact] public async Task TestPacManPreviewInstallMvcPackageWithPrereleaseFlagFalse() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _latestAspNetPackages[0]; // Microsoft.AspNet.Mvc.6.0.0-beta3 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); var resolutionContext = new ResolutionContext(DependencyBehavior.Lowest, includePrelease: false, includeUnlisted: true, versionConstraints: VersionConstraints.None); // Act var packageActions = (await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, packageIdentity, resolutionContext, new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token)).ToList(); // Assert Assert.Equal(1, packageActions.Count); Assert.True(_latestAspNetPackages[0].Equals(packageActions[0].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[0].NuGetProjectActionType); Assert.Equal(sourceRepositoryProvider.GetRepositories().Single().PackageSource.Source, packageActions[0].SourceRepository.PackageSource.Source); } } [Fact] public async Task TestPacManUninstallPackage() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _noDependencyLibPackages[0]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); // Main Act var uninstallationContext = new UninstallationContext(); await nuGetPackageManager.UninstallPackageAsync(msBuildNuGetProject, packageIdentity.Id, uninstallationContext, testNuGetProjectContext, token); // Assert // Check that the packages.config file exists after the installation Assert.True(!File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity))); } } [Fact] public async Task TestPacManUninstallDependencyPackage() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDependents[2]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal(_packageWithDependents[0], packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); // Main Act Exception exception = null; try { var uninstallationContext = new UninstallationContext(); await nuGetPackageManager.UninstallPackageAsync(msBuildNuGetProject, "jQuery", uninstallationContext, testNuGetProjectContext, token); } catch (InvalidOperationException ex) { exception = ex; } catch (AggregateException ex) { exception = ExceptionUtility.Unwrap(ex); } Assert.NotNull(exception); Assert.True(exception is InvalidOperationException); Assert.Equal("Unable to uninstall 'jQuery.1.4.4' because 'jQuery.Validation.1.13.1' depends on it.", exception.Message); } } [Fact] public async Task TestPacManPreviewUninstallDependencyPackage() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDependents[2]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal(_packageWithDependents[0], packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); // Main Act Exception exception = null; try { var uninstallationContext = new UninstallationContext(); var packageActions = await nuGetPackageManager.PreviewUninstallPackageAsync(msBuildNuGetProject, "jQuery", uninstallationContext, testNuGetProjectContext, token); } catch (InvalidOperationException ex) { exception = ex; } catch (AggregateException ex) { exception = ExceptionUtility.Unwrap(ex); } Assert.NotNull(exception); Assert.True(exception is InvalidOperationException); Assert.Equal("Unable to uninstall 'jQuery.1.4.4' because 'jQuery.Validation.1.13.1' depends on it.", exception.Message); } } [Fact] public async Task TestPacManUninstallPackageOnMultipleProjects() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var projectB = testSolutionManager.AddNewMSBuildProject(); var packageIdentity = _noDependencyLibPackages[0]; // Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); await nuGetPackageManager.InstallPackageAsync(projectB, packageIdentity, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); var projectBInstalled = (await projectB.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, projectAInstalled.Count); Assert.Equal(1, projectBInstalled.Count); // Main Act var uninstallationContext = new UninstallationContext(); await nuGetPackageManager.UninstallPackageAsync(projectA, packageIdentity.Id, uninstallationContext, testNuGetProjectContext, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); projectBInstalled = (await projectB.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, projectAInstalled.Count); Assert.Equal(1, projectBInstalled.Count); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity))); } } [Fact] public async Task TestPacManInstallHigherSpecificVersion() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var packageIdentity1 = _packageWithDependents[1]; // Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity0, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, projectAInstalled.Count); Assert.Equal(packageIdentity0, projectAInstalled[0].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity1, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, projectAInstalled.Count); Assert.Equal(packageIdentity1, projectAInstalled[0].PackageIdentity); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); } } [Fact] public async Task TestPacManInstallLowerSpecificVersion() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var packageIdentity1 = _packageWithDependents[1]; // Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity1, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, projectAInstalled.Count); Assert.Equal(packageIdentity1, projectAInstalled[0].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity0, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, projectAInstalled.Count); Assert.Equal(packageIdentity0, projectAInstalled[0].PackageIdentity); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); } } [Fact] public async Task TestPacManInstallLatestVersion() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var resolvedPackage = await NuGetPackageManager.GetLatestVersionAsync( packageIdentity0.Id, projectA, resolutionContext, sourceRepositoryProvider.GetRepositories().First(), Common.NullLogger.Instance, token); var packageLatest = new PackageIdentity(packageIdentity0.Id, resolvedPackage.LatestVersion); // Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity0, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, projectAInstalled.Count); Assert.Equal(packageIdentity0, projectAInstalled[0].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageLatest))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity0.Id, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, projectAInstalled.Count); Assert.Equal(packageLatest, projectAInstalled[0].PackageIdentity); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageLatest))); } } [Fact] public async Task TestPacManInstallLatestVersionForPackageReference() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var packageIdentity0 = _packageWithDependents[0]; // Act var resolvedPackage = await NuGetPackageManager.GetLatestVersionAsync( new PackageReference(packageIdentity0, NuGetFramework.AnyFramework), NuGetFramework.AnyFramework, resolutionContext, sourceRepositoryProvider.GetRepositories(), NullLogger.Instance, token); // Assert Assert.NotNull(resolvedPackage.LatestVersion); } [Fact] public async Task TestPacManInstallLatestVersionOfDependencyPackage() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var dependentPackage = _packageWithDependents[2]; var resolvedPackage = await NuGetPackageManager.GetLatestVersionAsync( packageIdentity0.Id, projectA, resolutionContext, sourceRepositoryProvider.GetRepositories().First(), Common.NullLogger.Instance, token); var packageLatest = new PackageIdentity(packageIdentity0.Id, resolvedPackage.LatestVersion); // Act await nuGetPackageManager.InstallPackageAsync(projectA, dependentPackage, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageIdentity0, projectAInstalled[0].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.Equal(dependentPackage, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(dependentPackage))); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageLatest))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity0.Id, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageLatest, projectAInstalled[0].PackageIdentity); Assert.Equal(dependentPackage, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(dependentPackage))); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageLatest))); } } [Fact] public async Task TestPacManInstallHigherSpecificVersionOfDependencyPackage() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var packageIdentity1 = _packageWithDependents[1]; var dependentPackage = _packageWithDependents[2]; // Act await nuGetPackageManager.InstallPackageAsync(projectA, dependentPackage, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageIdentity0, projectAInstalled[0].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.Equal(dependentPackage, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(dependentPackage))); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity1, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageIdentity1, projectAInstalled[0].PackageIdentity); Assert.Equal(dependentPackage, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(dependentPackage))); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); } } [Fact] public async Task TestPacManInstallLowerSpecificVersionOfDependencyPackage() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(DependencyBehavior.Highest, false, true, VersionConstraints.None); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var dependentPackage = _packageWithDependents[2]; var resolvedPackage = await NuGetPackageManager.GetLatestVersionAsync( packageIdentity0.Id, projectA, resolutionContext, sourceRepositoryProvider.GetRepositories().First(), Common.NullLogger.Instance, token); var packageLatest = new PackageIdentity(packageIdentity0.Id, resolvedPackage.LatestVersion); // Act await nuGetPackageManager.InstallPackageAsync(projectA, dependentPackage, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageLatest, projectAInstalled[0].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageLatest))); Assert.Equal(dependentPackage, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(dependentPackage))); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity0, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageIdentity0, projectAInstalled[0].PackageIdentity); Assert.Equal(dependentPackage, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(dependentPackage))); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageLatest))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); } } [Fact] public async Task TestPacManInstallPackageWhichUpdatesParent() { // https://github.com/NuGet/Home/issues/127 // Repro step: // 1.Install-Package jquery.validation -Version 1.8 // 2.Update-package jquery -version 2.0.3 // Expected: jquery.validation was updated to 1.8.0.1 // jquery 1.8 is unique because it allows only a single version of jquery // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var jqueryValidation18 = new PackageIdentity("jquery.validation", NuGetVersion.Parse("1.8")); var jquery203 = new PackageIdentity("jquery", NuGetVersion.Parse("2.0.3")); // Act await nuGetPackageManager.InstallPackageAsync(projectA, jqueryValidation18, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(jqueryValidation18, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(jqueryValidation18))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, jquery203, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(new PackageIdentity("jquery.validation", NuGetVersion.Parse("1.8.0.1")), projectAInstalled[1].PackageIdentity); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(jqueryValidation18))); } } [Fact] public async Task TestPacManInstallPackageWhichUpdatesDependency() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var packageIdentity1 = _packageWithDependents[1]; var packageIdentity2 = _packageWithDependents[2]; var packageIdentity3 = _packageWithDependents[3]; // Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity2, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageIdentity0, projectAInstalled[0].PackageIdentity); Assert.Equal(packageIdentity2, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity2))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity3, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(3, projectAInstalled.Count); Assert.Equal(packageIdentity1, projectAInstalled[0].PackageIdentity); Assert.Equal(packageIdentity2, projectAInstalled[2].PackageIdentity); Assert.Equal(packageIdentity3, projectAInstalled[1].PackageIdentity); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity2))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity3))); } } [Fact] public async Task TestPacManPreviewUpdatePackageFollowingForceUninstall() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(3, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(3, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(4, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(4, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("c", new NuGetVersion(2, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("d", new NuGetVersion(2, 0, 0)), fwk45, true), // No package "e" even though "d" depends on it (the user must have done an uninstall-package with a -force option) }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var targets = new List<PackageIdentity> { new PackageIdentity("b", new NuGetVersion(2, 0, 0)), new PackageIdentity("c", new NuGetVersion(3, 0, 0)), }; var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( targets, new List<NuGetProject> { nuGetProject }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Expected(expected, "a", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); Expected(expected, "b", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); Expected(expected, "c", new NuGetVersion(2, 0, 0), new NuGetVersion(3, 0, 0)); Assert.True(Compare(resulting, expected)); } } [Fact] public async Task TestPacManInstallPackageWhichUsesExistingDependency() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var packageIdentity1 = _packageWithDependents[1]; var packageIdentity2 = _packageWithDependents[2]; var packageIdentity3 = _packageWithDependents[3]; // Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity3, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageIdentity1, projectAInstalled[0].PackageIdentity); Assert.Equal(packageIdentity3, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity3))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity2, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(3, projectAInstalled.Count); Assert.Equal(packageIdentity1, projectAInstalled[0].PackageIdentity); Assert.Equal(packageIdentity2, projectAInstalled[2].PackageIdentity); Assert.Equal(packageIdentity3, projectAInstalled[1].PackageIdentity); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity2))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity3))); } } [Fact] public async Task TestPacManInstallPackageWhichUpdatesExistingDependencyDueToDependencyBehavior() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(DependencyBehavior.Highest, false, true, VersionConstraints.None); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var packageIdentity1 = _packageWithDependents[1]; var packageIdentity2 = _packageWithDependents[2]; var packageIdentity3 = _packageWithDependents[3]; var resolvedPackage = await NuGetPackageManager.GetLatestVersionAsync( packageIdentity0.Id, projectA, resolutionContext, sourceRepositoryProvider.GetRepositories().First(), Common.NullLogger.Instance, token); var packageLatest = new PackageIdentity(packageIdentity0.Id, resolvedPackage.LatestVersion); // Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity3, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageLatest, projectAInstalled[0].PackageIdentity); Assert.Equal(packageIdentity3, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageLatest))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity3))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity2, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(3, projectAInstalled.Count); Assert.Equal(packageLatest, projectAInstalled[0].PackageIdentity); Assert.Equal(packageIdentity2, projectAInstalled[2].PackageIdentity); Assert.Equal(packageIdentity3, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageLatest))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity2))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity3))); } } [Fact] public async Task TestPacManPreviewUninstallWithRemoveDependencies() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var packageIdentity1 = _packageWithDependents[1]; var packageIdentity2 = _packageWithDependents[2]; var packageIdentity3 = _packageWithDependents[3]; // Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity2, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageIdentity0, projectAInstalled[0].PackageIdentity); Assert.Equal(packageIdentity2, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity2))); // Main Act var uninstallationContext = new UninstallationContext(removeDependencies: true); var packageActions = (await nuGetPackageManager.PreviewUninstallPackageAsync(projectA, packageIdentity2.Id, uninstallationContext, testNuGetProjectContext, token)).ToList(); Assert.Equal(2, packageActions.Count); Assert.Equal(packageIdentity2, packageActions[0].PackageIdentity); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[0].NuGetProjectActionType); Assert.Null(packageActions[0].SourceRepository); Assert.Equal(packageIdentity0, packageActions[1].PackageIdentity); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[1].NuGetProjectActionType); Assert.Null(packageActions[1].SourceRepository); } } [Fact] public async Task TestPacManUninstallWithRemoveDependenciesWithVDependency() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var projectA = testSolutionManager.AddNewMSBuildProject(); var packageIdentity0 = _packageWithDependents[0]; var packageIdentity1 = _packageWithDependents[1]; var packageIdentity2 = _packageWithDependents[2]; var packageIdentity3 = _packageWithDependents[3]; // Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity2, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert var projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAInstalled.Count); Assert.Equal(packageIdentity0, projectAInstalled[0].PackageIdentity); Assert.Equal(packageIdentity2, projectAInstalled[1].PackageIdentity); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity2))); // Main Act await nuGetPackageManager.InstallPackageAsync(projectA, packageIdentity3, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert projectAInstalled = (await projectA.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(3, projectAInstalled.Count); Assert.Equal(packageIdentity1, projectAInstalled[0].PackageIdentity); Assert.Equal(packageIdentity2, projectAInstalled[2].PackageIdentity); Assert.Equal(packageIdentity3, projectAInstalled[1].PackageIdentity); Assert.False(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity0))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity1))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity2))); Assert.True(File.Exists(packagePathResolver.GetInstalledPackageFilePath(packageIdentity3))); // Main Act Exception exception = null; try { var uninstallationContext = new UninstallationContext(removeDependencies: true); await nuGetPackageManager.UninstallPackageAsync(projectA, packageIdentity2.Id, uninstallationContext, testNuGetProjectContext, token); } catch (InvalidOperationException ex) { exception = ex; } catch (AggregateException ex) { exception = ExceptionUtility.Unwrap(ex); } Assert.NotNull(exception); Assert.True(exception is InvalidOperationException); Assert.Equal("Unable to uninstall 'jQuery.1.6.4' because 'jQuery.UI.Combined.1.11.2' depends on it.", exception.Message); } } [Fact] public async Task TestPacManUninstallWithForceRemove() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDependents[2]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal(_packageWithDependents[0], packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); // Main Act var uninstallationContext = new UninstallationContext(removeDependencies: false, forceRemove: true); await nuGetPackageManager.UninstallPackageAsync(msBuildNuGetProject, "jQuery", uninstallationContext, testNuGetProjectContext, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); } } [Fact] public async Task TestPacManInstallWithIgnoreDependencies() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDependents[2]; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(DependencyBehavior.Ignore, false, true, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); } } [Fact] public async Task TestPacManThrowsPackageNotFound() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = new PackageIdentity("DoesNotExist", new NuGetVersion("1.0.0")); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act Exception exception = null; try { await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); } catch (Exception ex) { exception = ex; } Assert.NotNull(exception); Assert.True(exception is InvalidOperationException); Assert.Contains("Package 'DoesNotExist 1.0.0' is not found", exception.Message); } } [Fact] public async Task TestPacManThrowsLatestVersionNotFound() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = "DoesNotExist"; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act Exception exception = null; try { await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); } catch (Exception ex) { exception = ex; } Assert.NotNull(exception); Assert.True(exception is InvalidOperationException); Assert.Equal("No latest version found for 'DoesNotExist' for the given source repositories and resolution context", exception.Message); } } [Fact] public async Task TestPacManInstallPackageWithDeepDependency() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDeepDependency[6]; // WindowsAzure.Storage.4.3.0 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(7, packagesInPackagesConfig.Count); var installedPackages = _packageWithDeepDependency.OrderBy(f => f.Id).ToList(); for (var i = 0; i < 7; i++) { Assert.Equal(installedPackages[i], packagesInPackagesConfig[i].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[i].TargetFramework); } } } [Fact] public async Task TestPacManInstallPackageBindingRedirectsWithDeepDependency() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDeepDependency[6]; // WindowsAzure.Storage.4.3.0 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(7, packagesInPackagesConfig.Count); Assert.Equal(1, msBuildNuGetProjectSystem.BindingRedirectsCallCount); } } [Fact] public async Task TestPacManInstallPackageBindingRedirectsDisabledWithDeepDependency() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDeepDependency[6]; // WindowsAzure.Storage.4.3.0 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext { BindingRedirectsDisabled = true }, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(7, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.BindingRedirectsCallCount); } } [Fact] public Task TestPacManGetInstalledPackagesByDependencyOrder() { return TestPacManGetInstalledPackagesByDependencyOrderInternal(deletePackages: false); } [Fact] public Task TestPacManGetUnrestoredPackagesByDependencyOrderDeleteTrue() { return TestPacManGetInstalledPackagesByDependencyOrderInternal(deletePackages: true); } private async Task TestPacManGetInstalledPackagesByDependencyOrderInternal(bool deletePackages) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDeepDependency[6]; // WindowsAzure.Storage.4.3.0 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); if (deletePackages) { TestFileSystemUtility.DeleteRandomTestFolder(packagesFolderPath); } // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(7, packagesInPackagesConfig.Count); var installedPackages = _packageWithDeepDependency.OrderBy(f => f.Id).ToList(); for (var i = 0; i < 7; i++) { Assert.True(installedPackages[i].Equals(packagesInPackagesConfig[i].PackageIdentity)); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[i].TargetFramework); } // Main Assert var installedPackagesInDependencyOrder = (await nuGetPackageManager.GetInstalledPackagesInDependencyOrder (msBuildNuGetProject, token)).ToList(); if (deletePackages) { Assert.Equal(0, installedPackagesInDependencyOrder.Count); } else { Assert.Equal(7, installedPackagesInDependencyOrder.Count); for (var i = 0; i < 7; i++) { Assert.Equal(_packageWithDeepDependency[i], installedPackagesInDependencyOrder[i], PackageIdentity.Comparer); } } } } [Fact] public async Task TestPacManPreviewInstallPackageWithDeepDependency() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDeepDependency[6]; // WindowsAzure.Storage.4.3.0 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act var packageActions = (await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token)).ToList(); // Assert Assert.Equal(7, packageActions.Count); var soleSourceRepository = sourceRepositoryProvider.GetRepositories().Single(); for (var i = 0; i < 7; i++) { Assert.Equal(_packageWithDeepDependency[i], packageActions[i].PackageIdentity, PackageIdentity.Comparer); Assert.Equal(NuGetProjectActionType.Install, packageActions[i].NuGetProjectActionType); Assert.Equal(soleSourceRepository.PackageSource.Source, packageActions[i].SourceRepository.PackageSource.Source); } } } [Fact] public async Task TestPacManPreviewUninstallPackageWithDeepDependency() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _packageWithDeepDependency[6]; // WindowsAzure.Storage.4.3.0 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(7, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[6].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[6].TargetFramework); // Main Act var packageActions = (await nuGetPackageManager.PreviewUninstallPackageAsync(msBuildNuGetProject, _packageWithDeepDependency[6], new UninstallationContext(removeDependencies: true), new TestNuGetProjectContext(), token)).ToList(); Assert.Equal(7, packageActions.Count); var soleSourceRepository = sourceRepositoryProvider.GetRepositories().Single(); Assert.Equal(_packageWithDeepDependency[6], packageActions[0].PackageIdentity); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[0].NuGetProjectActionType); Assert.Equal(_packageWithDeepDependency[2], packageActions[1].PackageIdentity); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[1].NuGetProjectActionType); Assert.Equal(_packageWithDeepDependency[5], packageActions[2].PackageIdentity); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[2].NuGetProjectActionType); Assert.Equal(_packageWithDeepDependency[4], packageActions[3].PackageIdentity); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[3].NuGetProjectActionType); Assert.Equal(_packageWithDeepDependency[1], packageActions[4].PackageIdentity); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[4].NuGetProjectActionType); Assert.Equal(_packageWithDeepDependency[3], packageActions[5].PackageIdentity); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[5].NuGetProjectActionType); Assert.Equal(_packageWithDeepDependency[0], packageActions[6].PackageIdentity); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[6].NuGetProjectActionType); } } //[Fact] public async Task TestPacManInstallPackageTargetingASPNetCore50() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject("projectName", NuGetFramework.Parse("aspenetcore50")); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _latestAspNetPackages[0]; // Microsoft.AspNet.Mvc.6.0.0-beta3 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); var resolutionContext = new ResolutionContext(DependencyBehavior.Lowest, includePrelease: true, includeUnlisted: true, versionConstraints: VersionConstraints.None); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, resolutionContext, new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); } } [Fact] public async Task TestPacManInstallMvcTargetingNet45() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _latestAspNetPackages[0]; // Microsoft.AspNet.Mvc.6.0.0-beta3 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); var resolutionContext = new ResolutionContext(DependencyBehavior.Lowest, includePrelease: true, includeUnlisted: true, versionConstraints: VersionConstraints.None); // Act var exception = await Assert.ThrowsAsync<InvalidOperationException>( async () => await nuGetPackageManager.InstallPackageAsync( msBuildNuGetProject, packageIdentity, resolutionContext, new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token)); var errorMessage = string.Format(CultureInfo.CurrentCulture, "Could not install package '{0}'. You are trying to install this package into a project that targets '{1}', but the package does not contain any assembly references or content files that are compatible with that framework. For more information, contact the package author.", packageIdentity.Id + " " + packageIdentity.Version.ToNormalizedString(), msBuildNuGetProject.ProjectSystem.TargetFramework); Assert.Equal(errorMessage, exception.Message); } } [Fact] public async Task TestPacManPreviewUpdatePackagesSimple() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity0 = _packageWithDependents[0]; // jQuery.1.4.4 var resolutionContext = new ResolutionContext(); var resolvedPackage = await NuGetPackageManager.GetLatestVersionAsync( packageIdentity0.Id, msBuildNuGetProject, new ResolutionContext(), sourceRepositoryProvider.GetRepositories().First(), Common.NullLogger.Instance, token); var packageLatest = new PackageIdentity(packageIdentity0.Id, resolvedPackage.LatestVersion); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity0, resolutionContext, new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity0, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); // Main Act var packageActions = (await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<NuGetProject> { msBuildNuGetProject }, new ResolutionContext(DependencyBehavior.Highest, false, true, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), token)).ToList(); // Assert Assert.Equal(2, packageActions.Count); Assert.True(packageIdentity0.Equals(packageActions[0].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[0].NuGetProjectActionType); Assert.True(packageLatest.Equals(packageActions[1].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[1].NuGetProjectActionType); Assert.Equal(sourceRepositoryProvider.GetRepositories().Single().PackageSource.Source, packageActions[1].SourceRepository.PackageSource.Source); } } [Fact] public async Task TestPacManPreviewUpdatePackageWithTargetPrereleaseInProject() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0, "beta"), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0, "beta"), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0, "beta")), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( "a", new List<NuGetProject> { nuGetProject }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Expected(expected, "a", new NuGetVersion(1, 0, 0), new NuGetVersion(3, 0, 0)); Assert.True(Compare(resulting, expected)); } } [Fact] public async Task TestPacManPreviewUpdatePackageNotExistsInProject() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0, "beta"), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0, "beta"), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0, "beta")), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( "c", new List<NuGetProject> { nuGetProject }, new ResolutionContext( dependencyBehavior: DependencyBehavior.Lowest, includePrelease: false, includeUnlisted: false, versionConstraints: VersionConstraints.ExactMajor | VersionConstraints.ExactMinor | VersionConstraints.ExactPatch | VersionConstraints.ExactRelease, gatherCache: new GatherCache(), sourceCacheContext: NullSourceCacheContext.Instance), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert Assert.True(!result.Any()); } } [Fact] public async Task TestPacManPreviewUpdatePackageALLPrereleaseInProject() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0, "beta"), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0, "beta"), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0, "beta")), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<NuGetProject> { nuGetProject }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Expected(expected, "a", new NuGetVersion(1, 0, 0), new NuGetVersion(3, 0, 0)); Expected(expected, "b", new NuGetVersion(1, 0, 0, "beta"), new NuGetVersion(2, 0, 0, "beta")); Assert.True(Compare(resulting, expected)); } } [Fact] public async Task TestPacManPreviewUpdatePrereleasePackageNoPreFlagSpecified() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0, "beta"), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0, "beta"), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0, "beta")), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( "b", new List<NuGetProject> { nuGetProject }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Assert.True(Compare(resulting, expected)); } } [Fact] public async Task TestPacManPreviewUpdateMulti() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(3, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(4, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(4, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("c", new NuGetVersion(2, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("d", new NuGetVersion(2, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("e", new NuGetVersion(1, 0, 0)), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var targets = new List<PackageIdentity> { new PackageIdentity("b", new NuGetVersion(2, 0, 0)), new PackageIdentity("c", new NuGetVersion(3, 0, 0)), new PackageIdentity("d", new NuGetVersion(3, 0, 0)), }; var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( targets, new List<NuGetProject> { nuGetProject }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Expected(expected, "a", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); Expected(expected, "b", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); Expected(expected, "c", new NuGetVersion(2, 0, 0), new NuGetVersion(3, 0, 0)); Expected(expected, "d", new NuGetVersion(2, 0, 0), new NuGetVersion(3, 0, 0)); Assert.True(Compare(resulting, expected)); } } [Fact] public async Task TestPacMan_PreviewUpdatePackagesAsync_MultiProjects() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("c", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("c", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackagesA = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0)), fwk45, true), }; var installedPackagesB = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("d", new NuGetVersion(1, 0, 0)), fwk45, true), }; var nuGetProjectA = new TestNuGetProject(installedPackagesA); var nuGetProjectB = new TestNuGetProject(installedPackagesB); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var targets = new List<PackageIdentity> { new PackageIdentity("b", new NuGetVersion(2, 0, 0)) }; var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( targets, new List<NuGetProject> { nuGetProjectA, nuGetProjectB }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Expected(expected, "a", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); Expected(expected, "b", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); Assert.True(Compare(resulting, expected)); } } [Fact] public async Task TestPacMan_PreviewUpdatePackagesAsync_MultiProjects_MultiDependencies() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("c", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("c", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackagesA = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0)), fwk45, true), }; var installedPackagesB = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("c", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("d", new NuGetVersion(1, 0, 0)), fwk45, true), }; var nuGetProjectA = new TestNuGetProject("projectA", installedPackagesA); var nuGetProjectB = new TestNuGetProject("projectB", installedPackagesB); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var targets = new List<PackageIdentity> { new PackageIdentity("b", new NuGetVersion(2, 0, 0)), new PackageIdentity("d", new NuGetVersion(2, 0, 0)) }; var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( targets, new List<NuGetProject> { nuGetProjectA, nuGetProjectB }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Where(a => a.NuGetProjectActionType == NuGetProjectActionType.Install) .Select(a => Tuple.Create(a.Project as TestNuGetProject, a.PackageIdentity)).ToArray(); var expected = new List<Tuple<TestNuGetProject, PackageIdentity>>(); expected.Add(Tuple.Create(nuGetProjectA, new PackageIdentity("a", new NuGetVersion(2, 0, 0)))); expected.Add(Tuple.Create(nuGetProjectA, new PackageIdentity("b", new NuGetVersion(2, 0, 0)))); expected.Add(Tuple.Create(nuGetProjectB, new PackageIdentity("c", new NuGetVersion(2, 0, 0)))); expected.Add(Tuple.Create(nuGetProjectB, new PackageIdentity("d", new NuGetVersion(2, 0, 0)))); Assert.Equal(4, resulting.Length); Assert.True(PreviewResultsCompare(resulting, expected)); } } [Fact] public async Task TestPacManPreviewInstallPackageFollowingForceUninstall() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(3, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(3, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(4, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(4, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("c", new NuGetVersion(2, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("d", new NuGetVersion(2, 0, 0)), fwk45, true), // No package "e" even though "d" depends on it (the user must have done an uninstall-package with a -force option) }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var target = new PackageIdentity("f", new NuGetVersion(3, 0, 0)); var result = await nuGetPackageManager.PreviewInstallPackageAsync( nuGetProject, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Expected(expected, target.Id, target.Version); Assert.True(Compare(resulting, expected)); } } [Fact] public async Task TestPacManPreviewInstallPackageWithNonTargetDependency() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(1, 0, 0), true, new NuGetVersion(1, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(2, 0, 0), true, new NuGetVersion(2, 0, 0), true)), new Packaging.Core.PackageDependency("c", new VersionRange(new NuGetVersion(2, 0, 0), true, new NuGetVersion(2, 0, 0), true))}, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("d", new VersionRange(new NuGetVersion(1, 0, 0), true, new NuGetVersion(1, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("d", new VersionRange(new NuGetVersion(2, 0, 0), true, new NuGetVersion(2, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(1, 0, 0), true, new NuGetVersion(1, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("e", new VersionRange(new NuGetVersion(2, 0, 0), true, new NuGetVersion(2, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(4, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("d", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("e", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("f", new NuGetVersion(1, 0, 0)), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var target = new PackageIdentity("d", new NuGetVersion(2, 0, 0)); var result = await nuGetPackageManager.PreviewInstallPackageAsync( nuGetProject, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToList(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Expected(expected, target.Id, new NuGetVersion(1, 0, 0), target.Version); Expected(expected, "e", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); Expected(expected, "b", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); Expected(expected, "a", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); Expected(expected, "c", new NuGetVersion(2, 0, 0)); Assert.True(Compare(resulting, expected)); } } [Fact] public async Task TestPacManPreviewUpdateMultiWithConflict() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0), true, new NuGetVersion(1, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0), true, new NuGetVersion(2, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(3, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(3, 0, 0), true, new NuGetVersion(3, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("c", new NuGetVersion(2, 0, 0)), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var targets = new List<PackageIdentity> { new PackageIdentity("a", new NuGetVersion(2, 0, 0)), new PackageIdentity("b", new NuGetVersion(3, 0, 0)), }; try { await nuGetPackageManager.PreviewUpdatePackagesAsync( targets, new List<NuGetProject> { nuGetProject }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); Assert.True(false); } catch (Exception e) { Assert.IsType(typeof(InvalidOperationException), e); } } } [Fact] public async Task TestPacManPreviewUpdateMultiWithDowngradeConflict() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0), true, new NuGetVersion(1, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0), true, new NuGetVersion(3, 0, 0), true)) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(3, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0), true, new NuGetVersion(3, 0, 0), false)) }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(2, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(3, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("c", new NuGetVersion(2, 0, 0)), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var targets = new List<PackageIdentity> { new PackageIdentity("a", new NuGetVersion(3, 0, 0)), new PackageIdentity("c", new NuGetVersion(3, 0, 0)), }; try { await nuGetPackageManager.PreviewUpdatePackagesAsync( targets, new List<NuGetProject> { nuGetProject }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); Assert.True(false); } catch (Exception e) { Assert.IsType(typeof(InvalidOperationException), e); } } } // [Fact] -- This test performs update but verifies for a specific version // This is not going to work as newer versions are uploaded public async Task TestPacManPreviewUpdatePackages() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _morePackageWithDependents[3]; // Microsoft.Net.Http.2.2.22 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(3, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[2].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[2].TargetFramework); Assert.Equal(_morePackageWithDependents[0], packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal(_morePackageWithDependents[2], packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); // Main Act var packageActions = (await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<NuGetProject> { msBuildNuGetProject }, new ResolutionContext(DependencyBehavior.Highest, false, true, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), token)).ToList(); // Assert Assert.Equal(4, packageActions.Count); Assert.True(_morePackageWithDependents[0].Equals(packageActions[0].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[0].NuGetProjectActionType); Assert.True(_morePackageWithDependents[3].Equals(packageActions[1].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[1].NuGetProjectActionType); Assert.True(_morePackageWithDependents[1].Equals(packageActions[2].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[2].NuGetProjectActionType); Assert.Equal(sourceRepositoryProvider.GetRepositories().Single().PackageSource.Source, packageActions[2].SourceRepository.PackageSource.Source); Assert.True(_morePackageWithDependents[4].Equals(packageActions[3].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[3].NuGetProjectActionType); Assert.Equal(sourceRepositoryProvider.GetRepositories().Single().PackageSource.Source, packageActions[3].SourceRepository.PackageSource.Source); } } [Fact] public async Task TestPacManPreviewReinstallPackages() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = _morePackageWithDependents[3]; // Microsoft.Net.Http.2.2.22 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(3, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[2].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[2].TargetFramework); Assert.Equal(_morePackageWithDependents[0], packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal(_morePackageWithDependents[2], packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); var resolutionContext = new ResolutionContext( DependencyBehavior.Highest, false, true, VersionConstraints.ExactMajor | VersionConstraints.ExactMinor | VersionConstraints.ExactPatch | VersionConstraints.ExactRelease); // Main Act var packageActions = (await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<NuGetProject> { msBuildNuGetProject }, resolutionContext, new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), token)).ToList(); // Assert var singlePackageSource = sourceRepositoryProvider.GetRepositories().Single().PackageSource.Source; Assert.Equal(6, packageActions.Count); Assert.True(_morePackageWithDependents[3].Equals(packageActions[0].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[0].NuGetProjectActionType); Assert.True(_morePackageWithDependents[2].Equals(packageActions[1].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[1].NuGetProjectActionType); Assert.True(_morePackageWithDependents[0].Equals(packageActions[2].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[2].NuGetProjectActionType); Assert.True(_morePackageWithDependents[0].Equals(packageActions[3].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[3].NuGetProjectActionType); Assert.Equal(singlePackageSource, packageActions[3].SourceRepository.PackageSource.Source); Assert.True(_morePackageWithDependents[2].Equals(packageActions[4].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[4].NuGetProjectActionType); Assert.Equal(singlePackageSource, packageActions[4].SourceRepository.PackageSource.Source); Assert.True(_morePackageWithDependents[3].Equals(packageActions[5].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[5].NuGetProjectActionType); Assert.Equal(singlePackageSource, packageActions[5].SourceRepository.PackageSource.Source); } } [Fact] public async Task TestPacManReinstallPackages() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var folderNuGetProject = msBuildNuGetProject.FolderNuGetProject; var packageIdentity = _morePackageWithDependents[3]; // Microsoft.Net.Http.2.2.22 // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(3, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[2].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[2].TargetFramework); Assert.Equal(_morePackageWithDependents[0], packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal(_morePackageWithDependents[2], packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); var installedPackageIdentities = (await msBuildNuGetProject.GetInstalledPackagesAsync(token)) .Select(pr => pr.PackageIdentity); var resolutionContext = new ResolutionContext( DependencyBehavior.Highest, false, true, VersionConstraints.ExactMajor | VersionConstraints.ExactMinor | VersionConstraints.ExactPatch | VersionConstraints.ExactRelease); // Act var packageActions = (await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<NuGetProject> { msBuildNuGetProject }, resolutionContext, new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), token)).ToList(); // Assert var singlePackageSource = sourceRepositoryProvider.GetRepositories().Single().PackageSource.Source; Assert.Equal(6, packageActions.Count); Assert.True(_morePackageWithDependents[3].Equals(packageActions[0].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[0].NuGetProjectActionType); Assert.True(_morePackageWithDependents[2].Equals(packageActions[1].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[1].NuGetProjectActionType); Assert.True(_morePackageWithDependents[0].Equals(packageActions[2].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, packageActions[2].NuGetProjectActionType); Assert.True(_morePackageWithDependents[0].Equals(packageActions[3].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[3].NuGetProjectActionType); Assert.Equal(singlePackageSource, packageActions[3].SourceRepository.PackageSource.Source); Assert.True(_morePackageWithDependents[2].Equals(packageActions[4].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[4].NuGetProjectActionType); Assert.Equal(singlePackageSource, packageActions[4].SourceRepository.PackageSource.Source); Assert.True(_morePackageWithDependents[3].Equals(packageActions[5].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, packageActions[5].NuGetProjectActionType); Assert.Equal(singlePackageSource, packageActions[5].SourceRepository.PackageSource.Source); // Main Act await nuGetPackageManager.ExecuteNuGetProjectActionsAsync(msBuildNuGetProject, packageActions, new TestNuGetProjectContext(), NullSourceCacheContext.Instance, token); // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(3, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[2].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[2].TargetFramework); Assert.Equal(_morePackageWithDependents[0], packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal(_morePackageWithDependents[2], packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); Assert.True(File.Exists(folderNuGetProject.GetInstalledPackageFilePath(packageIdentity))); Assert.True(File.Exists(folderNuGetProject.GetInstalledPackageFilePath(_morePackageWithDependents[0]))); Assert.True(File.Exists(folderNuGetProject.GetInstalledPackageFilePath(_morePackageWithDependents[2]))); } } [Fact] public async Task TestPacManReinstallSpecificPackage() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(1, 0, 0))), new Packaging.Core.PackageDependency("c", new VersionRange(new NuGetVersion(2, 0, 0))), }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(2, 0, 0))), new Packaging.Core.PackageDependency("c", new VersionRange(new NuGetVersion(2, 0, 0))), }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(3, 0, 0), new[] { new Packaging.Core.PackageDependency("a", new VersionRange(new NuGetVersion(3, 0, 0))), }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("d", new VersionRange(new NuGetVersion(2, 0, 0))), }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("d", new VersionRange(new NuGetVersion(2, 0, 0))), }, true, null), new SourcePackageDependencyInfo("c", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("d", new NuGetVersion(4, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("e", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("f", new NuGetVersion(4, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("a", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("b", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("c", new NuGetVersion(2, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("d", new NuGetVersion(2, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("e", new NuGetVersion(1, 0, 0)), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("f", new NuGetVersion(3, 0, 0)), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var resolutionContext = new ResolutionContext( DependencyBehavior.Highest, false, true, VersionConstraints.ExactMajor | VersionConstraints.ExactMinor | VersionConstraints.ExactPatch | VersionConstraints.ExactRelease); var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( "b", new List<NuGetProject> { nuGetProject }, resolutionContext, new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Expected(expected, "a", new NuGetVersion(1, 0, 0), new NuGetVersion(1, 0, 0)); Expected(expected, "b", new NuGetVersion(1, 0, 0), new NuGetVersion(1, 0, 0)); Expected(expected, "c", new NuGetVersion(2, 0, 0), new NuGetVersion(2, 0, 0)); Expected(expected, "d", new NuGetVersion(2, 0, 0), new NuGetVersion(2, 0, 0)); // note e and f are not touched Assert.True(Compare(resulting, expected)); } } [Fact] public async Task TestPacManOpenReadmeFile() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var packagePathResolver = new PackagePathResolver(packagesFolderPath); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageIdentity = new PackageIdentity("elmah", new NuGetVersion("1.2.2")); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act // Set the direct install on the execution context of INuGetProjectContext before installing a package var testNuGetProjectContext = new TestNuGetProjectContext(); testNuGetProjectContext.TestExecutionContext = new TestExecutionContext(packageIdentity); await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentity, new ResolutionContext(), testNuGetProjectContext, sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); Assert.Equal(1, testNuGetProjectContext.TestExecutionContext.FilesOpened.Count); Assert.True(string.Equals(Path.Combine(packagePathResolver.GetInstallPath(packageIdentity), "ReadMe.txt"), testNuGetProjectContext.TestExecutionContext.FilesOpened.First(), StringComparison.OrdinalIgnoreCase)); } } [Fact] public async Task TestPacManPreviewInstallPackageIdUnexpectedDowngrade() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject("TestProjectName"); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var packageId = "Newtonsoft.Json"; var testNuGetProjectContext = new TestNuGetProjectContext(); var primarySourceRepository = sourceRepositoryProvider.GetRepositories().First(); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); var resolutionContext = new ResolutionContext( DependencyBehavior.Lowest, includePrelease: false, includeUnlisted: false, versionConstraints: VersionConstraints.None); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageId, resolutionContext, testNuGetProjectContext, primarySourceRepository, null, token); // Check that the packages.config file does not exist Assert.True(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(1, msBuildNuGetProjectSystem.References.Count); Exception exception = null; try { var packageActions = (await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, packageId, resolutionContext, testNuGetProjectContext, primarySourceRepository, null, token)).ToList(); } catch (Exception ex) { exception = ex; } Assert.NotNull(exception); Assert.True(exception is InvalidOperationException); Assert.Contains("Package 'Newtonsoft.Json.", exception.Message); Assert.Contains("already exists in project 'TestProjectName'", exception.Message); } } [Fact] public async Task TestPacManPreviewInstallPackageThrowsDependencyDowngrade() { // Arrange var packageIdentityA = new PackageIdentity("DotNetOpenAuth.OAuth.Core", new NuGetVersion("4.3.2.13293")); var packageIdentityB1 = new PackageIdentity("DotNetOpenAuth.Core", new NuGetVersion("4.3.2.13293")); var packageIdentityB2 = new PackageIdentity("DotNetOpenAuth.Core", new NuGetVersion("4.3.4.13329")); var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var testNuGetProjectContext = new TestNuGetProjectContext(); var primarySourceRepository = sourceRepositoryProvider.GetRepositories().First(); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, packageIdentityB2, new ResolutionContext(DependencyBehavior.Lowest, includePrelease: true, includeUnlisted: false, versionConstraints: VersionConstraints.None), testNuGetProjectContext, primarySourceRepository, null, token); // Check that the packages.config file does not exist Assert.True(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Exception exception = null; try { var packageActions = (await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, packageIdentityA, new ResolutionContext(), testNuGetProjectContext, primarySourceRepository, null, token)).ToList(); } catch (Exception ex) { exception = ex; } Assert.NotNull(exception); Assert.True(exception is InvalidOperationException); Assert.Equal( string.Format("Unable to resolve dependencies. '{0} {1}' is not compatible with '{2} {3} constraint: {4} (= {5})'.", packageIdentityB2.Id, packageIdentityB2.Version, packageIdentityA.Id, packageIdentityA.Version, packageIdentityB1.Id, packageIdentityB1.Version), exception.Message); } } [Fact] public async Task TestPacManPreviewInstallDependencyVersionHighestAndPrerelease() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var primarySourceRepository = sourceRepositoryProvider.GetRepositories().First(); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var dotnetrdfPackageIdentity = new PackageIdentity("dotnetrdf", new NuGetVersion("1.0.8-prerelease1")); var resolutionContext = new ResolutionContext(DependencyBehavior.Highest, includePrelease: true, includeUnlisted: false, versionConstraints: VersionConstraints.None); var newtonsoftJsonPackageId = "newtonsoft.json"; // Act var resolvedPackage = await NuGetPackageManager.GetLatestVersionAsync( newtonsoftJsonPackageId, msBuildNuGetProject, resolutionContext, primarySourceRepository, Common.NullLogger.Instance, CancellationToken.None); var newtonsoftJsonPackageIdentity = new PackageIdentity(newtonsoftJsonPackageId, resolvedPackage.LatestVersion); var nuGetProjectActions = (await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, dotnetrdfPackageIdentity, resolutionContext, new TestNuGetProjectContext(), primarySourceRepository, null, CancellationToken.None)).ToList(); // Assert Assert.Equal(4, nuGetProjectActions.Count); var newtonsoftJsonAction = nuGetProjectActions.Where(a => a.PackageIdentity.Equals(newtonsoftJsonPackageIdentity)).FirstOrDefault(); Assert.NotNull(newtonsoftJsonAction); } } [Fact] public async Task TestPacManUpdateDependencyToPrereleaseVersion() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var primarySourceRepository = sourceRepositoryProvider.GetRepositories().First(); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var webgreasePackageIdentity = new PackageIdentity("WebGrease", new NuGetVersion("1.6.0")); var resolutionContext = new ResolutionContext(DependencyBehavior.Lowest, includePrelease: true, includeUnlisted: true, versionConstraints: VersionConstraints.None); var newtonsoftJsonPackageId = "newtonsoft.json"; // Act var resolvedPackage = await NuGetPackageManager.GetLatestVersionAsync( newtonsoftJsonPackageId, msBuildNuGetProject, resolutionContext, primarySourceRepository, Common.NullLogger.Instance, CancellationToken.None); var newtonsoftJsonLatestPrereleasePackageIdentity = new PackageIdentity(newtonsoftJsonPackageId, resolvedPackage.LatestVersion); await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, webgreasePackageIdentity, resolutionContext, new TestNuGetProjectContext(), primarySourceRepository, null, CancellationToken.None); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(3, packagesInPackagesConfig.Count); // Main Act - Update newtonsoft.json to latest pre-release await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, newtonsoftJsonLatestPrereleasePackageIdentity, resolutionContext, new TestNuGetProjectContext(), primarySourceRepository, null, CancellationToken.None); } } [Fact] public async Task TestPacManPreviewInstallWithAllowedVersionsConstraint() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var newtonsoftJsonPackageId = "newtonsoft.json"; var newtonsoftJsonPackageIdentity = new PackageIdentity(newtonsoftJsonPackageId, NuGetVersion.Parse("4.5.11")); var primarySourceRepository = sourceRepositoryProvider.GetRepositories().Single(); var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, newtonsoftJsonPackageIdentity, resolutionContext, testNuGetProjectContext, primarySourceRepository, null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(newtonsoftJsonPackageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); var installedPackages = await msBuildNuGetProject.GetInstalledPackagesAsync(token); var newtonsoftJsonPackageReference = installedPackages.Where(pr => pr.PackageIdentity.Equals(newtonsoftJsonPackageIdentity)).FirstOrDefault(); Assert.Null(newtonsoftJsonPackageReference.AllowedVersions); const string newPackagesConfig = @"<?xml version='1.0' encoding='utf-8'?> <packages> <package id='Newtonsoft.Json' version='4.5.11' allowedVersions='[4.0,5.0)' targetFramework='net45' /> </packages> "; File.WriteAllText(packagesConfigPath, newPackagesConfig); // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(newtonsoftJsonPackageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); installedPackages = await msBuildNuGetProject.GetInstalledPackagesAsync(token); newtonsoftJsonPackageReference = installedPackages.Where(pr => pr.PackageIdentity.Equals(newtonsoftJsonPackageIdentity)).FirstOrDefault(); Assert.NotNull(newtonsoftJsonPackageReference.AllowedVersions); Exception exception = null; try { // Main Act await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, newtonsoftJsonPackageId, resolutionContext, testNuGetProjectContext, primarySourceRepository, null, token); } catch (Exception ex) { exception = ex; } Assert.NotNull(exception); } } [Fact] public async Task TestPacManPreviewUpdateWithAllowedVersionsConstraint() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var newtonsoftJsonPackageId = "newtonsoft.json"; var newtonsoftJsonPackageIdentity = new PackageIdentity(newtonsoftJsonPackageId, NuGetVersion.Parse("4.5.11")); var primarySourceRepository = sourceRepositoryProvider.GetRepositories().Single(); var resolutionContext = new ResolutionContext(DependencyBehavior.Highest, false, true, VersionConstraints.None); var testNuGetProjectContext = new TestNuGetProjectContext(); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, newtonsoftJsonPackageIdentity, resolutionContext, testNuGetProjectContext, primarySourceRepository, null, token); await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, new PackageIdentity("Microsoft.Web.Infrastructure", new NuGetVersion("1.0.0.0")), resolutionContext, testNuGetProjectContext, primarySourceRepository, null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(newtonsoftJsonPackageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); var installedPackages = await msBuildNuGetProject.GetInstalledPackagesAsync(token); var newtonsoftJsonPackageReference = installedPackages.Where(pr => pr.PackageIdentity.Equals(newtonsoftJsonPackageIdentity)).FirstOrDefault(); Assert.Null(newtonsoftJsonPackageReference.AllowedVersions); const string newPackagesConfig = @"<?xml version='1.0' encoding='utf-8'?> <packages> <package id='Microsoft.Web.Infrastructure' version='1.0.0.0' targetFramework='net45' /> <package id='Newtonsoft.Json' version='4.5.11' allowedVersions='[4.0,5.0)' targetFramework='net45' /> </packages> "; File.WriteAllText(packagesConfigPath, newPackagesConfig); // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(newtonsoftJsonPackageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); installedPackages = await msBuildNuGetProject.GetInstalledPackagesAsync(token); newtonsoftJsonPackageReference = installedPackages.Where(pr => pr.PackageIdentity.Equals(newtonsoftJsonPackageIdentity)).FirstOrDefault(); Assert.NotNull(newtonsoftJsonPackageReference.AllowedVersions); // Main Act var nuGetProjectActions = (await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<NuGetProject> { msBuildNuGetProject }, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), token)).ToList(); // Microsoft.Web.Infrastructure has no updates. However, newtonsoft.json has updates but does not satisfy the version range // Hence, no nuget project actions to perform Assert.Equal(0, nuGetProjectActions.Count); } } [Fact] public async Task TestPacManPreviewUpdate_AllowedVersionsConstraint_RestrictHighestVersion() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var newtonsoftJsonPackageId = "newtonsoft.json"; var newtonsoftJsonPackageIdentity = new PackageIdentity(newtonsoftJsonPackageId, NuGetVersion.Parse("4.5.11")); var primarySourceRepository = sourceRepositoryProvider.GetRepositories().Single(); var resolutionContext = new ResolutionContext(DependencyBehavior.Lowest, false, true, VersionConstraints.None); var testNuGetProjectContext = new TestNuGetProjectContext(); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, newtonsoftJsonPackageIdentity, resolutionContext, testNuGetProjectContext, primarySourceRepository, null, token); await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, new PackageIdentity("Microsoft.Web.Infrastructure", new NuGetVersion("1.0.0.0")), resolutionContext, testNuGetProjectContext, primarySourceRepository, null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(newtonsoftJsonPackageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); var installedPackages = await msBuildNuGetProject.GetInstalledPackagesAsync(token); var newtonsoftJsonPackageReference = installedPackages.Where(pr => pr.PackageIdentity.Equals(newtonsoftJsonPackageIdentity)).FirstOrDefault(); Assert.Null(newtonsoftJsonPackageReference.AllowedVersions); const string newPackagesConfig = @"<?xml version='1.0' encoding='utf-8'?> <packages> <package id='Microsoft.Web.Infrastructure' version='1.0.0.0' targetFramework='net45' /> <package id='Newtonsoft.Json' version='4.5.11' allowedVersions='[4.0,6.0)' targetFramework='net45' /> </packages> "; File.WriteAllText(packagesConfigPath, newPackagesConfig); // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(newtonsoftJsonPackageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); installedPackages = await msBuildNuGetProject.GetInstalledPackagesAsync(token); newtonsoftJsonPackageReference = installedPackages.Where(pr => pr.PackageIdentity.Equals(newtonsoftJsonPackageIdentity)).FirstOrDefault(); Assert.NotNull(newtonsoftJsonPackageReference.AllowedVersions); var newtonsoftJsonPackageIdentityAfterUpdate = new PackageIdentity(newtonsoftJsonPackageId, NuGetVersion.Parse("5.0.8")); // Main Act var nuGetProjectActions = (await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<NuGetProject> { msBuildNuGetProject }, resolutionContext, testNuGetProjectContext, sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), token)).ToList(); // Microsoft.Web.Infrastructure has no updates. However, newtonsoft.json has updates but should pick it as per the version constraint // Hence, 4.5.11 will be uninstalled and 5.0.8 will be installed Assert.Equal(2, nuGetProjectActions.Count); var newtonsoftJsonAction = nuGetProjectActions.Where(a => a.PackageIdentity.Equals(newtonsoftJsonPackageIdentityAfterUpdate)).FirstOrDefault(); Assert.NotNull(newtonsoftJsonAction); } } [Fact] public async Task TestPacManPreviewUpdateWithNoSource() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider(new List<NuGet.Configuration.PackageSource>()); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var newtonsoftJsonPackageId = "newtonsoft.json"; var newtonsoftJsonPackageIdentity = new PackageIdentity(newtonsoftJsonPackageId, NuGetVersion.Parse("4.5.11")); var resolutionContext = new ResolutionContext(DependencyBehavior.Highest, false, true, VersionConstraints.None); var testNuGetProjectContext = new TestNuGetProjectContext(); // Act // Update ALL - this should not fail - it should no-op var nuGetProjectActions = (await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<NuGetProject> { msBuildNuGetProject }, resolutionContext, testNuGetProjectContext, Enumerable.Empty<SourceRepository>(), Enumerable.Empty<SourceRepository>(), token)).ToList(); // Hence, no nuget project actions to perform Assert.Equal(0, nuGetProjectActions.Count); } } [Fact] public async Task TestPacManInstallAspNetRazorJa() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var aspnetrazorjaPackageIdentity = new PackageIdentity("Microsoft.AspNet.Razor.ja", new NuGetVersion("3.2.3")); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, aspnetrazorjaPackageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(aspnetrazorjaPackageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); } } [Fact] public async Task TestPacManInstallMicrosoftWebInfrastructure1000FromV2() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var version = new NuGetVersion("1.0.0.0"); var microsoftWebInfrastructurePackageIdentity = new PackageIdentity("Microsoft.Web.Infrastructure", version); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, microsoftWebInfrastructurePackageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(microsoftWebInfrastructurePackageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); var microsoftWebInfrastructure1000FolderPath = Path.Combine(packagesFolderPath, "Microsoft.Web.Infrastructure.1.0.0.0"); Assert.True(Directory.Exists(microsoftWebInfrastructure1000FolderPath)); } } [Fact] public async Task TestPacManInstallMicrosoftWebInfrastructure1000FromV3() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var version = new NuGetVersion("1.0.0.0"); var microsoftWebInfrastructurePackageIdentity = new PackageIdentity("Microsoft.Web.Infrastructure", version); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, microsoftWebInfrastructurePackageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(microsoftWebInfrastructurePackageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); var microsoftWebInfrastructure1000FolderPath = Path.Combine(packagesFolderPath, "Microsoft.Web.Infrastructure.1.0.0.0"); Assert.True(Directory.Exists(microsoftWebInfrastructure1000FolderPath)); } } [Fact] public async Task TestPacManInstallElmah11FromV2() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var version = new NuGetVersion("1.1"); var elmahPackageIdentity = new PackageIdentity("elmah", version); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, elmahPackageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(elmahPackageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); var microsoftWebInfrastructure1000FolderPath = Path.Combine(packagesFolderPath, "elmah.1.1"); Assert.True(Directory.Exists(microsoftWebInfrastructure1000FolderPath)); } } [Fact] public async Task TestPacManInstallElmah11FromV3() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var version = new NuGetVersion("1.1"); var elmahPackageIdentity = new PackageIdentity("elmah", version); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, elmahPackageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(elmahPackageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); var microsoftWebInfrastructure1000FolderPath = Path.Combine(packagesFolderPath, "elmah.1.1"); Assert.True(Directory.Exists(microsoftWebInfrastructure1000FolderPath)); } } [Fact] public async Task TestPacManInstall_SharpDX_DXGI_v263_WithNonReferencesInLibFolder() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var version = new NuGetVersion("2.6.3"); var sharpDXDXGIv263Package = new PackageIdentity("SharpDX.DXGI", version); // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act await nuGetPackageManager.InstallPackageAsync(msBuildNuGetProject, sharpDXDXGIv263Package, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(packagesConfigPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.True(packagesInPackagesConfig.Where(p => p.PackageIdentity.Equals(sharpDXDXGIv263Package)).Any()); } } [Fact] public async Task TestPacManInstallPackageUnlistedFromV3() { // Arrange var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, false, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, false, null), new SourcePackageDependencyInfo("b", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), }; var resourceProviders = new List<Lazy<INuGetResourceProvider>>(); resourceProviders.Add(new Lazy<INuGetResourceProvider>(() => new TestDependencyInfoProvider(packages))); resourceProviders.Add(new Lazy<INuGetResourceProvider>(() => new TestMetadataProvider(packages))); var packageSource = new Configuration.PackageSource("http://a"); var packageSourceProvider = new TestPackageSourceProvider(new[] { packageSource }); var sourceRepositoryProvider = new SourceRepositoryProvider(packageSourceProvider, resourceProviders); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var target = "a"; // Pre-Assert // Check that the packages.config file does not exist Assert.False(File.Exists(packagesConfigPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); Assert.Equal(0, msBuildNuGetProjectSystem.References.Count); // Act var nugetProjectActions = await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); var result = nugetProjectActions.ToList(); var resultIdentities = result.Select(p => p.PackageIdentity); Assert.True(resultIdentities.Contains(new PackageIdentity("a", new NuGetVersion(1, 0, 0)))); Assert.True(resultIdentities.Contains(new PackageIdentity("b", new NuGetVersion(3, 0, 0)))); // and all the actions are Install foreach (var nugetProjectAction in result) { Assert.Equal(nugetProjectAction.NuGetProjectActionType, NuGetProjectActionType.Install); } } } [Fact] public async Task TestPacManInstallPackageListedFromV3() { // Arrange var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(3, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), }; var resourceProviders = new List<Lazy<INuGetResourceProvider>>(); resourceProviders.Add(new Lazy<INuGetResourceProvider>(() => new TestDependencyInfoProvider(packages))); resourceProviders.Add(new Lazy<INuGetResourceProvider>(() => new TestMetadataProvider(packages))); var packageSource = new Configuration.PackageSource("http://a"); var packageSourceProvider = new TestPackageSourceProvider(new[] { packageSource }); var sourceRepositoryProvider = new SourceRepositoryProvider(packageSourceProvider, resourceProviders); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var target = "a"; // Act var nugetProjectActions = await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); var result = nugetProjectActions.ToList(); var resultIdentities = result.Select(p => p.PackageIdentity); Assert.True(resultIdentities.Contains(new PackageIdentity("a", new NuGetVersion(2, 0, 0)))); Assert.True(resultIdentities.Contains(new PackageIdentity("b", new NuGetVersion(1, 0, 0)))); // and all the actions are Install foreach (var nugetProjectAction in result) { Assert.Equal(nugetProjectAction.NuGetProjectActionType, NuGetProjectActionType.Install); } } } [Fact] public async Task TestPacManInstallPackage571FromV3() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var target = new PackageIdentity("Umbraco", NuGetVersion.Parse("5.1.0.175")); // Act var nugetProjectActions = await nuGetPackageManager.PreviewInstallPackageAsync(msBuildNuGetProject, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); var result = nugetProjectActions.ToList(); var resultIdentities = result.Select(p => p.PackageIdentity); Assert.True(resultIdentities.Contains(new PackageIdentity("Umbraco", new NuGetVersion("5.1.0.175")))); // and all the actions are Install foreach (var nugetProjectAction in result) { Assert.Equal(nugetProjectAction.NuGetProjectActionType, NuGetProjectActionType.Install); } } } [Fact] public async Task TestPacManInstallPackageEFFromV3() { // Arrange //var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider(new[] { TestSourceRepositoryUtility.V3PackageSource, new NuGet.Configuration.PackageSource("https://www.myget.org/F/aspnetvnext/api/v2/"), }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject("TestProject", NuGetFramework.Parse("net452")); var target = new PackageIdentity("EntityFramework", NuGetVersion.Parse("7.0.0-beta4")); // Act var nugetProjectActions = await nuGetPackageManager.PreviewInstallPackageAsync( msBuildNuGetProject, target, new ResolutionContext(DependencyBehavior.Lowest, true, false, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), sourceRepositoryProvider.GetRepositories(), token); var result = nugetProjectActions.ToList(); var resultIdentities = result.Select(p => p.PackageIdentity); Assert.True(resultIdentities.Contains(target)); // and all the actions are Install foreach (var nugetProjectAction in result) { Assert.Equal(nugetProjectAction.NuGetProjectActionType, NuGetProjectActionType.Install); } } } [Fact] public async Task TestPacManInstallPackagePrereleaseDependenciesFromV2() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var target = new PackageIdentity("DependencyTestA", NuGetVersion.Parse("1.0.0")); // Act var nugetProjectActions = await nuGetPackageManager.PreviewInstallPackageAsync( msBuildNuGetProject, target, new ResolutionContext(DependencyBehavior.Lowest, false, false, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); var result = nugetProjectActions.ToList(); var resultIdentities = result.Select(p => p.PackageIdentity); Assert.True(resultIdentities.Contains(target)); Assert.True(resultIdentities.Contains(new PackageIdentity("DependencyTestB", NuGetVersion.Parse("1.0.0")))); // and all the actions are Install foreach (var nugetProjectAction in result) { Assert.Equal(nugetProjectAction.NuGetProjectActionType, NuGetProjectActionType.Install); } } } [Fact] public async Task TestPacManInstallPackagePrereleaseDependenciesFromV2IncludePrerelease() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var target = new PackageIdentity("DependencyTestA", NuGetVersion.Parse("1.0.0")); // Act var nugetProjectActions = await nuGetPackageManager.PreviewInstallPackageAsync( msBuildNuGetProject, target, new ResolutionContext(DependencyBehavior.Lowest, true, false, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); var result = nugetProjectActions.ToList(); var resultIdentities = result.Select(p => p.PackageIdentity); Assert.True(resultIdentities.Contains(target)); Assert.True(resultIdentities.Contains(new PackageIdentity("DependencyTestB", NuGetVersion.Parse("1.0.0-a")))); // and all the actions are Install foreach (var nugetProjectAction in result) { Assert.Equal(nugetProjectAction.NuGetProjectActionType, NuGetProjectActionType.Install); } } } [Fact] public async Task TestPacManInstallPackagePrerelease() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var msBuildNuGetProjectSystem = msBuildNuGetProject.ProjectSystem as TestMSBuildNuGetProjectSystem; var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; var target = new PackageIdentity("Microsoft.ApplicationInsights.Web", NuGetVersion.Parse("0.16.1-build00418")); // Act var nugetProjectActions = await nuGetPackageManager.PreviewInstallPackageAsync( msBuildNuGetProject, target, new ResolutionContext(DependencyBehavior.Lowest, false, false, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); var result = nugetProjectActions.ToList(); var resultIdentities = result.Select(p => p.PackageIdentity); Assert.True(resultIdentities.Contains(target)); // and all the actions are Install foreach (var nugetProjectAction in result) { Assert.Equal(nugetProjectAction.NuGetProjectActionType, NuGetProjectActionType.Install); } } } [Fact] public async Task TestPacManInstallPackageOverExisting() { // Arrange var fwk46 = NuGetFramework.Parse("net46"); var fwk45 = NuGetFramework.Parse("net45"); var fwk4 = NuGetFramework.Parse("net4"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("51Degrees.mobi", NuGetVersion.Parse("2.1.15.1")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("AspNetMvc", NuGetVersion.Parse("4.0.20710.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("AttributeRouting", NuGetVersion.Parse("3.5.6")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("AttributeRouting.Core", NuGetVersion.Parse("3.5.6")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("AttributeRouting.Core.Web", NuGetVersion.Parse("3.5.6")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("AutoMapper", NuGetVersion.Parse("3.3.1")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Castle.Core", NuGetVersion.Parse("1.1.0")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Castle.DynamicProxy", NuGetVersion.Parse("2.1.0")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Clay", NuGetVersion.Parse("1.0")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("colorbox", NuGetVersion.Parse("1.4.29")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("elmah", NuGetVersion.Parse("1.2.0.1")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("elmah.corelibrary", NuGetVersion.Parse("1.2")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("EntityFramework", NuGetVersion.Parse("6.1.3")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("fasterflect", NuGetVersion.Parse("2.1.0")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("foolproof", NuGetVersion.Parse("0.9.4517")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Glimpse", NuGetVersion.Parse("0.87")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Glimpse.Elmah", NuGetVersion.Parse("0.9.3")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Glimpse.Mvc3", NuGetVersion.Parse("0.87")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("jQuery", NuGetVersion.Parse("1.4.1")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("knockout.mapper.TypeScript.DefinitelyTyped", NuGetVersion.Parse("0.0.4")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Knockout.Mapping", NuGetVersion.Parse("2.4.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("knockout.mapping.TypeScript.DefinitelyTyped", NuGetVersion.Parse("0.0.9")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("knockout.TypeScript.DefinitelyTyped", NuGetVersion.Parse("0.5.1")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Knockout.Validation", NuGetVersion.Parse("1.0.1")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("knockoutjs", NuGetVersion.Parse("2.0.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("LINQtoCSV", NuGetVersion.Parse("1.2.0.0")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("log4net", NuGetVersion.Parse("2.0.3")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Microsoft.AspNet.Mvc", NuGetVersion.Parse("4.0.40804.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Microsoft.AspNet.Razor", NuGetVersion.Parse("2.0.30506.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Microsoft.AspNet.WebPages", NuGetVersion.Parse("2.0.30506.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Microsoft.Web.Infrastructure", NuGetVersion.Parse("1.0.0.0")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("MiniProfiler", NuGetVersion.Parse("3.1.1.140")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("MiniProfiler.EF6", NuGetVersion.Parse("3.0.11")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("MiniProfiler.MVC4", NuGetVersion.Parse("3.0.11")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Mvc3CodeTemplatesCSharp", NuGetVersion.Parse("3.0.11214.0")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("MvcDiagnostics", NuGetVersion.Parse("3.0.10714.0")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Newtonsoft.Json", NuGetVersion.Parse("6.0.8")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Ninject", NuGetVersion.Parse("3.2.2.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Ninject.Web.Common", NuGetVersion.Parse("3.2.3.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("OpenPop.NET", NuGetVersion.Parse("2.0.5.1063")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("PreMailer.Net", NuGetVersion.Parse("1.1.2")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Rejuicer", NuGetVersion.Parse("1.3.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("T4MVCExtensions", NuGetVersion.Parse("3.15.2")), fwk46, true), new NuGet.Packaging.PackageReference(new PackageIdentity("T4MvcJs", NuGetVersion.Parse("1.0.13")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("Twia.ReSharper", NuGetVersion.Parse("9.0.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("valueinjecter", NuGetVersion.Parse("2.3.3")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("WebActivator", NuGetVersion.Parse("1.5")), fwk4, true), new NuGet.Packaging.PackageReference(new PackageIdentity("YUICompressor.NET", NuGetVersion.Parse("1.6.0.2")), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); var target = "t4mvc"; // Act var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); var testSettings = NullSettings.Instance; using (var testSolutionManager = new TestSolutionManager(true)) { var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var nugetProjectActions = await nuGetPackageManager.PreviewInstallPackageAsync( nuGetProject, new PackageIdentity(target, new NuGetVersion(3, 17, 5)), new ResolutionContext(DependencyBehavior.Lowest, false, false, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, CancellationToken.None); Assert.True(nugetProjectActions.Select(pa => pa.PackageIdentity.Id).Contains(target, StringComparer.OrdinalIgnoreCase)); } } [Fact(Skip = "Test was skipped as part of 475ad399 and is currently broken.")] public async Task TestPacManInstallPackageDowngrade() { // Arrange var fwk46 = NuGetFramework.Parse("net46"); var fwk45 = NuGetFramework.Parse("net45"); var fwk4 = NuGetFramework.Parse("net4"); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(new PackageIdentity("ResolverTestA", NuGetVersion.Parse("3.0.0")), fwk45, true), new NuGet.Packaging.PackageReference(new PackageIdentity("ResolverTestB", NuGetVersion.Parse("3.0.0")), fwk45, true), }; var nuGetProject = new TestNuGetProject(installedPackages); var target = "FixedTestA"; // Act var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV2OnlySourceRepositoryProvider(); var testSettings = NullSettings.Instance; using (var testSolutionManager = new TestSolutionManager(true)) { var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var nugetProjectActions = await nuGetPackageManager.PreviewInstallPackageAsync( nuGetProject, target, new ResolutionContext(DependencyBehavior.Lowest, false, false, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, CancellationToken.None); Assert.True(nugetProjectActions.Select(pa => pa.PackageIdentity.Id).Contains(target, StringComparer.OrdinalIgnoreCase)); } } // [Fact] public async Task TestPacManUpdatePackagePreservePackagesConfigAttributes() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; using (var writer = new StreamWriter(packagesConfigPath)) { writer.WriteLine(@"<packages> <package id=""NuGet.Versioning"" version=""1.0.1"" targetFramework=""net45"" allowedVersions=""[1.0.0, 2.0.0]"" developmentDependency=""true"" future=""abc"" /> </packages>"); } var packageIdentity = new PackageIdentity("nuget.versioning", NuGetVersion.Parse("1.0.5")); var packageOld = new PackageIdentity("nuget.versioning", NuGetVersion.Parse("1.0.1")); // Act using (var cacheContext = new SourceCacheContext()) { await nuGetPackageManager.RestorePackageAsync( packageOld, new TestNuGetProjectContext(), new PackageDownloadContext(cacheContext), sourceRepositoryProvider.GetRepositories(), token); var actions = await nuGetPackageManager.PreviewInstallPackageAsync( msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); await nuGetPackageManager.InstallPackageAsync( msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), token); var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject .GetInstalledPackagesAsync(token)) .ToList(); var packagesConfigXML = XDocument.Load(packagesConfigPath); var entry = packagesConfigXML.Element(XName.Get("packages")).Elements(XName.Get("package")).Single(); // Assert Assert.Equal(2, actions.Count()); Assert.Equal(1, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[0].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[0].TargetFramework); Assert.Equal("[1.0.0, 2.0.0]", entry.Attribute(XName.Get("allowedVersions")).Value); Assert.Equal("true", entry.Attribute(XName.Get("developmentDependency")).Value); Assert.Equal("abc", entry.Attribute(XName.Get("future")).Value); } } } [Fact] public async Task TestPacManUpdatePackagePreservePackagesConfigAttributesMultiplePackages() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var msBuildNuGetProject = testSolutionManager.AddNewMSBuildProject(); var packagesConfigPath = msBuildNuGetProject.PackagesConfigNuGetProject.FullPath; using (var writer = new StreamWriter(packagesConfigPath)) { writer.WriteLine(@"<packages> <package id=""NuGet.Versioning"" version=""1.0.1"" targetFramework=""net45"" allowedVersions=""[1.0.0, 2.0.0]"" developmentDependency=""true"" future=""abc"" /> <package id=""newtonsoft.json"" version=""6.0.8"" targetFramework=""net45"" /> </packages>"); } var packageIdentity = new PackageIdentity("nuget.versioning", NuGetVersion.Parse("1.0.5")); var packageOld = new PackageIdentity("nuget.versioning", NuGetVersion.Parse("1.0.1")); // Act using (var cacheContext = new SourceCacheContext()) { var packageDownloadContext = new PackageDownloadContext(cacheContext); await nuGetPackageManager.RestorePackageAsync( packageOld, new TestNuGetProjectContext(), packageDownloadContext, sourceRepositoryProvider.GetRepositories(), token); await nuGetPackageManager.RestorePackageAsync( new PackageIdentity("newtonsoft.json", NuGetVersion.Parse("6.0.8")), new TestNuGetProjectContext(), packageDownloadContext, sourceRepositoryProvider.GetRepositories(), token); var actions = await nuGetPackageManager.PreviewInstallPackageAsync( msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); await nuGetPackageManager.InstallPackageAsync( msBuildNuGetProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), token); var packagesInPackagesConfig = (await msBuildNuGetProject.PackagesConfigNuGetProject .GetInstalledPackagesAsync(token)) .OrderBy(package => package.PackageIdentity.Id) .ToList(); var packagesConfigXML = XDocument.Load(packagesConfigPath); var entry = packagesConfigXML.Element(XName.Get("packages")) .Elements(XName.Get("package")) .Single(package => package.Attribute(XName.Get("id")).Value .Equals("nuget.versioning", StringComparison.OrdinalIgnoreCase)); // Assert Assert.Equal(2, actions.Count()); Assert.Equal(2, packagesInPackagesConfig.Count); Assert.Equal(packageIdentity, packagesInPackagesConfig[1].PackageIdentity); Assert.Equal(msBuildNuGetProject.ProjectSystem.TargetFramework, packagesInPackagesConfig[1].TargetFramework); Assert.Equal("[1.0.0, 2.0.0]", entry.Attribute(XName.Get("allowedVersions")).Value); Assert.Equal("true", entry.Attribute(XName.Get("developmentDependency")).Value); Assert.Equal("abc", entry.Attribute(XName.Get("future")).Value); } } } [Fact] public async Task TestPacManGetLatestVersion_GatherCache() { // Arrange var packageIdentity = new PackageIdentity("a", new NuGetVersion(1, 0, 0)); var bVersionRange = VersionRange.Parse("[0.5.0, 2.0.0)"); var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo( packageIdentity.Id, packageIdentity.Version, new[] { new Packaging.Core.PackageDependency("b", bVersionRange) }, listed: true, source: null), }; var resourceProviders = new List<Lazy<INuGetResourceProvider>>(); resourceProviders.Add(new Lazy<INuGetResourceProvider>(() => new TestDependencyInfoProvider(packages))); resourceProviders.Add(new Lazy<INuGetResourceProvider>(() => new TestMetadataProvider(packages))); var packageSource = new Configuration.PackageSource("http://a"); var packageSourceProvider = new TestPackageSourceProvider(new[] { packageSource }); var sourceRepositoryProvider = new SourceRepositoryProvider(packageSourceProvider, resourceProviders); var resolutionContext = new ResolutionContext(); // Act var latestVersion = await NuGetPackageManager.GetLatestVersionAsync( "a", NuGetFramework.AnyFramework, resolutionContext, sourceRepositoryProvider.GetRepositories().First(), Common.NullLogger.Instance, CancellationToken.None); // Assert var gatherCache = resolutionContext.GatherCache; var gatherCacheResult = gatherCache.GetPackage(packageSource, packageIdentity, NuGetFramework.AnyFramework); Assert.Single(gatherCacheResult.Packages); var packageInfo = gatherCacheResult.Packages.Single(); Assert.Single(packageInfo.Dependencies); var packageDependency = packageInfo.Dependencies.Single(); Assert.Equal("b", packageDependency.Id); Assert.Equal(bVersionRange.ToString(), packageDependency.VersionRange.ToString()); } [Fact] public async Task TestDirectDownloadByPackagesConfig() { // Arrange using (var testFolderPath = TestDirectory.Create()) using (var directDownloadDirectory = TestDirectory.Create()) { // Create a nuget.config file with a test global packages folder var globalPackageFolderPath = Path.Combine(testFolderPath, "GlobalPackagesFolder"); File.WriteAllText( Path.Combine(testFolderPath, "nuget.config"), @"<?xml version=""1.0"" encoding=""utf-8""?> <configuration> <config> <add key=""globalPackagesFolder"" value=""" + globalPackageFolderPath + @""" /> </config > </configuration>"); // Create a packages.config var packagesConfigPath = Path.Combine(testFolderPath, "packages.config"); using (var writer = new StreamWriter(packagesConfigPath)) { writer.WriteLine(@"<packages><package id=""Newtonsoft.Json"" version=""6.0.8"" /></packages>"); } var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); var settings = new Settings(testFolderPath); var packagesFolderPath = Path.Combine(testFolderPath, "packages"); var token = CancellationToken.None; var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, settings, packagesFolderPath); var packageIdentity = new PackageIdentity("Newtonsoft.Json", NuGetVersion.Parse("6.0.8")); // Act using (var cacheContext = new SourceCacheContext()) { var downloadContext = new PackageDownloadContext( cacheContext, directDownloadDirectory, directDownload: true); await nuGetPackageManager.RestorePackageAsync( packageIdentity, new TestNuGetProjectContext(), downloadContext, sourceRepositoryProvider.GetRepositories(), token); } // Assert // Verify that the package was not cached in the Global Packages Folder var globalPackage = GlobalPackagesFolderUtility.GetPackage(packageIdentity, globalPackageFolderPath); Assert.Null(globalPackage); } } [Fact] public async Task TestPacMan_InstallPackage_BatchEvent_Raised() { using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<PackageSource>() { new PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var projectA = testSolutionManager.AddNewMSBuildProject("testA"); // Add package var target = new PackageIdentity("packageA", NuGetVersion.Parse("1.0.0")); AddToPackagesFolder(target, packageSource); // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); var projectName = string.Empty; // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); projectName = args.Name; }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; // Act await nuGetPackageManager.InstallPackageAsync(projectA, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(projectA.PackagesConfigNuGetProject.FullPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation var packagesInPackagesConfig = (await projectA.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); // Check batch events data Assert.True(batchStartIds.Count == 1); Assert.True(batchEndIds.Count == 1); Assert.Equal(batchStartIds[0], batchEndIds[0]); Assert.Equal("testA", projectName); } } } [Fact] public async Task TestPacMan_UpdatePackage_BatchEvent_Raised() { // Arrange using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<PackageSource>() { new PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var projectA = testSolutionManager.AddNewMSBuildProject("testA"); // Add package var target = new PackageIdentity("packageA", NuGetVersion.Parse("1.0.0")); AddToPackagesFolder(target, packageSource); // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; // Act await nuGetPackageManager.InstallPackageAsync(projectA, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check that the packages.config file exists after the installation Assert.True(File.Exists(projectA.PackagesConfigNuGetProject.FullPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation var packagesInPackagesConfig = (await projectA.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); // Check batch events data Assert.True(batchStartIds.Count == 1); Assert.True(batchEndIds.Count == 1); Assert.Equal(batchStartIds[0], batchEndIds[0]); // Update var updatePackage = new PackageIdentity("packageA", NuGetVersion.Parse("2.0.0")); AddToPackagesFolder(updatePackage, packageSource); // Act await nuGetPackageManager.InstallPackageAsync(projectA, updatePackage, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Check the number of packages and packages returned by PackagesConfigProject after the installation packagesInPackagesConfig = (await projectA.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); // Check batch events data Assert.True(batchStartIds.Count == 2); Assert.True(batchEndIds.Count == 2); Assert.Equal(batchStartIds[1], batchEndIds[1]); Assert.NotEqual(batchStartIds[0], batchStartIds[1]); } } } [Fact] public async Task TestPacMan_UninstallPackage_BatchEvent_Raised() { // Arrange using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<Configuration.PackageSource>() { new Configuration.PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var projectA = testSolutionManager.AddNewMSBuildProject("testA"); // Add package var target = new PackageIdentity("packageA", NuGetVersion.Parse("1.0.0")); AddToPackagesFolder(target, packageSource); // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; // Act await nuGetPackageManager.InstallPackageAsync(projectA, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Main Act var uninstallationContext = new UninstallationContext(); await nuGetPackageManager.UninstallPackageAsync(projectA, target.Id, uninstallationContext, new TestNuGetProjectContext(), token); // Assert // Check batch events data Assert.True(batchStartIds.Count == 2); Assert.True(batchEndIds.Count == 2); Assert.Equal(batchStartIds[0], batchEndIds[0]); Assert.Equal(batchStartIds[1], batchEndIds[1]); Assert.NotEqual(batchStartIds[0], batchStartIds[1]); } } } [Fact] public async Task TestPacMan_ExecuteMultipleNugetActions_BatchEvent_Raised() { // Arrange using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<Configuration.PackageSource>() { new Configuration.PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var actions = new List<NuGetProjectAction>(); var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var projectA = testSolutionManager.AddNewMSBuildProject("testA"); // Add package var packageA1 = new PackageIdentity("packageA", NuGetVersion.Parse("1.0.0")); var packageA2 = new PackageIdentity("packageA", NuGetVersion.Parse("2.0.0")); var packageB1 = new PackageIdentity("packageB", NuGetVersion.Parse("1.0.0")); AddToPackagesFolder(packageA1, packageSource); AddToPackagesFolder(packageA2, packageSource); AddToPackagesFolder(packageB1, packageSource); // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); var projectName = string.Empty; await nuGetPackageManager.InstallPackageAsync(projectA, packageA1, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); projectName = args.Name; }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; // nuget actions actions.Add(NuGetProjectAction.CreateInstallProjectAction(packageA2, sourceRepositoryProvider.GetRepositories().First(), projectA)); actions.Add(NuGetProjectAction.CreateUninstallProjectAction(packageB1, projectA)); // Main Act await nuGetPackageManager.ExecuteNuGetProjectActionsAsync(projectA, actions, new TestNuGetProjectContext(), NullSourceCacheContext.Instance, token); //Assert // Check batch events data Assert.True(batchStartIds.Count == 1); Assert.True(batchEndIds.Count == 1); Assert.Equal(batchStartIds[0], batchEndIds[0]); Assert.Equal("testA", projectName); } } } [Fact] public async Task TestPacMan_InstallPackagesInMultipleProjects_BatchEvent_Raised() { // Arrange using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<Configuration.PackageSource>() { new Configuration.PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var projectA = testSolutionManager.AddNewMSBuildProject("testA"); var projectB = testSolutionManager.AddNewMSBuildProject("testB"); // Add package var target = new PackageIdentity("packageA", NuGetVersion.Parse("1.0.0")); AddToPackagesFolder(target, packageSource); // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); var projectNames = new List<string>(); // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); projectNames.Add(args.Name); }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; // Act await nuGetPackageManager.InstallPackageAsync(projectA, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); await nuGetPackageManager.InstallPackageAsync(projectB, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert Project1 // Check that the packages.config file exists after the installation Assert.True(File.Exists(projectA.PackagesConfigNuGetProject.FullPath)); Assert.True(File.Exists(projectB.PackagesConfigNuGetProject.FullPath)); // Check the number of packages and packages returned by PackagesConfigProject after the installation var packagesInPackagesConfig = (await projectA.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfig.Count); var packagesInPackagesConfigB = (await projectB.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(1, packagesInPackagesConfigB.Count); // Check batch events data Assert.True(batchStartIds.Count == 2); Assert.True(batchEndIds.Count == 2); Assert.Equal(batchStartIds[0], batchEndIds[0]); Assert.Equal(batchStartIds[1], batchEndIds[1]); Assert.NotEqual(batchStartIds[0], batchStartIds[1]); Assert.True(projectNames.Count == 2); Assert.Equal("testA", projectNames[0]); Assert.Equal("testB", projectNames[1]); } } } [Fact] public async Task TestPacMan_ExecuteNugetActions_NoOP_BatchEvent() { // Arrange using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<Configuration.PackageSource>() { new Configuration.PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var projectA = testSolutionManager.AddNewMSBuildProject("testA"); // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; // Main Act await nuGetPackageManager.ExecuteNuGetProjectActionsAsync(projectA, new List<NuGetProjectAction>(), new TestNuGetProjectContext(), NullSourceCacheContext.Instance, token); // Check that the packages.config file exists after the installation Assert.False(File.Exists(projectA.PackagesConfigNuGetProject.FullPath)); // Check that there are no packages returned by PackagesConfigProject var packagesInPackagesConfig = (await projectA.PackagesConfigNuGetProject.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(0, packagesInPackagesConfig.Count); // Check batch events data Assert.True(batchStartIds.Count == 1); Assert.True(batchEndIds.Count == 1); Assert.Equal(batchStartIds[0], batchEndIds[0]); } } } [Fact] public async Task TestPacMan_InstallPackage_Fail_BatchEvent_Raised() { // Arrange using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<Configuration.PackageSource>() { new Configuration.PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var projectA = testSolutionManager.AddNewMSBuildProject("testA", NuGetFramework.Parse("netcoreapp10")); // Add package var target = new PackageIdentity("packageA", NuGetVersion.Parse("1.0.0")); AddToPackagesFolder(target, packageSource); // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; Exception exception = null; try { // Act await nuGetPackageManager.InstallPackageAsync(projectA, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); } catch (Exception ex) { exception = ex; } // Assert Assert.NotNull(exception); // Check batch events data Assert.True(batchStartIds.Count == 1); Assert.True(batchEndIds.Count == 1); Assert.Equal(batchStartIds[0], batchEndIds[0]); } } } [Fact] public async Task TestPacMan_DownloadPackageTask_Fail_BatchEvent_NotRaised() { // Arrange using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<Configuration.PackageSource>() { new Configuration.PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var projectA = testSolutionManager.AddNewMSBuildProject("testA"); // Add package var target = new PackageIdentity("packageA", NuGetVersion.Parse("1.0.0")); AddToPackagesFolder(target, packageSource); var projectActions = new List<NuGetProjectAction>(); projectActions.Add( NuGetProjectAction.CreateInstallProjectAction(target, null, projectA)); // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; Exception exception = null; try { // Act await nuGetPackageManager.ExecuteNuGetProjectActionsAsync(projectA, projectActions, new TestNuGetProjectContext(), NullSourceCacheContext.Instance, token); } catch (Exception ex) { exception = ex; } // Assert Assert.NotNull(exception); // Check batch events data Assert.True(batchStartIds.Count == 0); Assert.True(batchEndIds.Count == 0); } } } [Fact] public async Task TestPacMan_DownloadPackageResult_Fail_BatchEvent_Raised() { // Arrange using (var packageSource = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<Configuration.PackageSource>() { new Configuration.PackageSource(packageSource.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var projectA = testSolutionManager.AddNewMSBuildProject("testA"); var projectActions = new List<NuGetProjectAction>(); projectActions.Add( NuGetProjectAction.CreateInstallProjectAction( new PackageIdentity("inValidPackageA", new NuGetVersion("1.0.0")), sourceRepositoryProvider.GetRepositories().First(), projectA)); // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; Exception exception = null; try { // Act await nuGetPackageManager.ExecuteNuGetProjectActionsAsync(projectA, projectActions, new TestNuGetProjectContext(), NullSourceCacheContext.Instance, token); } catch (Exception ex) { exception = ex; } // Assert Assert.NotNull(exception); // Check batch events data Assert.True(batchStartIds.Count == 1); Assert.True(batchEndIds.Count == 1); Assert.Equal(batchStartIds[0], batchEndIds[0]); } } } [Fact] public async Task TestPacMan_InstallPackage_BuildIntegratedProject_BatchEvent_NotRaised() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var settingsdir = TestDirectory.Create()) using (var testSolutionManager = new TestSolutionManager(true)) { var Settings = new Settings(settingsdir); foreach (var source in sourceRepositoryProvider.GetRepositories()) { Settings.AddOrUpdate(ConfigurationConstants.PackageSources, source.PackageSource.AsSourceItem()); } var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, Settings, testSolutionManager, deleteOnRestartManager); var installationCompatibility = new Mock<IInstallationCompatibility>(); nuGetPackageManager.InstallationCompatibility = installationCompatibility.Object; var buildIntegratedProject = testSolutionManager.AddBuildIntegratedProject(); var packageIdentity = _packageWithDependents[0]; // batch handlers var batchStartIds = new List<string>(); var batchEndIds = new List<string>(); // add batch events handler nuGetPackageManager.BatchStart += (o, args) => { batchStartIds.Add(args.Id); }; nuGetPackageManager.BatchEnd += (o, args) => { batchEndIds.Add(args.Id); }; // Act await nuGetPackageManager.InstallPackageAsync(buildIntegratedProject, packageIdentity, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories().First(), null, token); // Assert // Check batch events data Assert.True(batchStartIds.Count == 0); Assert.True(batchEndIds.Count == 0); } } [Fact] public async Task TestPacMan_PreviewUpdatePackage_DeepDependencies() { // Arrange // Set up Package Dependencies var dependencies = new List<PackageDependency>(); for (var j = 1; j < 3; j++) { for (var i = 2; i <= 30; i++) { dependencies.Add(new PackageDependency($"Package{i}", new VersionRange(new NuGetVersion(j, 0, 0)))); } } // Set up Package Source var packages = new List<SourcePackageDependencyInfo>(); var next = 1; for (var i = 1; i < 3; i++) { for (var j = 1; j < 30; j++) { next = j + 1; packages.Add(new SourcePackageDependencyInfo($"Package{j}", new NuGetVersion(i, 0, 0), dependencies.Where( dep => dep.Id.CompareTo($"Package{j}") > 0 && dep.VersionRange.MinVersion.Equals(new NuGetVersion(i, 0, 0))), true, null)); } packages.Add(new SourcePackageDependencyInfo($"Package{next}", new NuGetVersion(i, 0, 0), new PackageDependency[] { }, true, null)); } var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackages = new List<PackageReference>(); for (var i = 1; i <= 30; i++) { installedPackages.Add(new PackageReference( new PackageIdentity($"Package{i}", new NuGetVersion(1, 0, 0)), fwk45, true)); } var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var targets = new List<PackageIdentity> { new PackageIdentity("Package1", new NuGetVersion(2, 0, 0)), new PackageIdentity("Package2", new NuGetVersion(2, 0, 0)), new PackageIdentity("Package3", new NuGetVersion(2, 0, 0)), new PackageIdentity("Package4", new NuGetVersion(2, 0, 0)), new PackageIdentity("Package5", new NuGetVersion(2, 0, 0)), new PackageIdentity("Package6", new NuGetVersion(2, 0, 0)), new PackageIdentity("Package7", new NuGetVersion(2, 0, 0)), new PackageIdentity("Package8", new NuGetVersion(2, 0, 0)), }; var result = await nuGetPackageManager.PreviewUpdatePackagesAsync( targets, new[] { nuGetProject }, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); for (var i = 1; i <= 30; i++) { Expected(expected, $"Package{i}", new NuGetVersion(1, 0, 0), new NuGetVersion(2, 0, 0)); } Assert.True(Compare(resulting, expected)); } } public async Task TestPacMan_ExecuteNuGetProjectActionsAsync_MultipleBuildIntegratedProjects() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, testSettings, testSolutionManager, deleteOnRestartManager); var installationCompatibility = new Mock<IInstallationCompatibility>(); nuGetPackageManager.InstallationCompatibility = installationCompatibility.Object; var buildIntegratedProjectA = testSolutionManager.AddBuildIntegratedProject("projectA") as BuildIntegratedNuGetProject; var buildIntegratedProjectB = testSolutionManager.AddBuildIntegratedProject("projectB") as BuildIntegratedNuGetProject; var buildIntegratedProjectC = testSolutionManager.AddBuildIntegratedProject("projectC") as BuildIntegratedNuGetProject; var packageIdentity = _packageWithDependents[0]; var projectActions = new List<NuGetProjectAction>(); projectActions.Add( NuGetProjectAction.CreateInstallProjectAction( packageIdentity, sourceRepositoryProvider.GetRepositories().First(), buildIntegratedProjectA)); projectActions.Add( NuGetProjectAction.CreateInstallProjectAction( packageIdentity, sourceRepositoryProvider.GetRepositories().First(), buildIntegratedProjectB)); // Act await nuGetPackageManager.ExecuteNuGetProjectActionsAsync( new List<NuGetProject>() { buildIntegratedProjectA, buildIntegratedProjectB }, projectActions, new TestNuGetProjectContext(), NullSourceCacheContext.Instance, token); // Assert var projectAPackages = (await buildIntegratedProjectA.GetInstalledPackagesAsync(token)).ToList(); var projectBPackages = (await buildIntegratedProjectB.GetInstalledPackagesAsync(token)).ToList(); var projectCPackages = (await buildIntegratedProjectC.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAPackages.Count); Assert.Equal(2, projectBPackages.Count); Assert.Equal(1, projectCPackages.Count); } } [Fact] public async Task TestPacMan_ExecuteNuGetProjectActionsAsync_MixedProjects() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); using (var settingsdir = TestDirectory.Create()) using (var testSolutionManager = new TestSolutionManager(true)) { var Settings = new Settings(settingsdir); foreach (var source in sourceRepositoryProvider.GetRepositories()) { Settings.AddOrUpdate(ConfigurationConstants.PackageSources, source.PackageSource.AsSourceItem()); } var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, Settings, testSolutionManager, deleteOnRestartManager); var installationCompatibility = new Mock<IInstallationCompatibility>(); nuGetPackageManager.InstallationCompatibility = installationCompatibility.Object; var projectA = testSolutionManager.AddBuildIntegratedProject("projectA") as BuildIntegratedNuGetProject; var projectB = testSolutionManager.AddNewMSBuildProject("projectB"); var projectC = testSolutionManager.AddBuildIntegratedProject("projectC") as BuildIntegratedNuGetProject; var packageIdentity = _packageWithDependents[0]; var projectActions = new List<NuGetProjectAction>(); projectActions.Add( NuGetProjectAction.CreateInstallProjectAction( packageIdentity, sourceRepositoryProvider.GetRepositories().First(), projectA)); projectActions.Add( NuGetProjectAction.CreateInstallProjectAction( packageIdentity, sourceRepositoryProvider.GetRepositories().First(), projectB)); projectActions.Add( NuGetProjectAction.CreateInstallProjectAction( packageIdentity, sourceRepositoryProvider.GetRepositories().First(), projectC)); // Act await nuGetPackageManager.ExecuteNuGetProjectActionsAsync( new List<NuGetProject>() { projectA, projectB, projectC }, projectActions, new TestNuGetProjectContext(), NullSourceCacheContext.Instance, token); // Assert var projectAPackages = (await projectA.GetInstalledPackagesAsync(token)).ToList(); var projectBPackages = (await projectB.GetInstalledPackagesAsync(token)).ToList(); var projectCPackages = (await projectC.GetInstalledPackagesAsync(token)).ToList(); Assert.Equal(2, projectAPackages.Count); Assert.Equal(1, projectBPackages.Count); Assert.Equal(2, projectCPackages.Count); } } [Fact] public async Task TestPacMan_PreviewUpdatePackage_IgnoreDependency() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(2, 0, 0))), new Packaging.Core.PackageDependency("c", new VersionRange(new NuGetVersion(1, 0, 0)))}, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null) }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackage1 = new PackageIdentity("a", new NuGetVersion(1, 0, 0)); var installedPackage2 = new PackageIdentity("b", new NuGetVersion(1, 0, 0)); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(installedPackage1, fwk45, true), new NuGet.Packaging.PackageReference(installedPackage2, fwk45, true) }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var targetPackage = new PackageIdentity("a", new NuGetVersion(2, 0, 0)); var result = (await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<PackageIdentity> { targetPackage }, new List<NuGetProject> { nuGetProject }, new ResolutionContext(DependencyBehavior.Ignore, false, true, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None)).ToList(); // Assert Assert.Equal(2, result.Count); Assert.True(installedPackage1.Equals(result[0].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Uninstall, result[0].NuGetProjectActionType); Assert.True(targetPackage.Equals(result[1].PackageIdentity)); Assert.Equal(NuGetProjectActionType.Install, result[1].NuGetProjectActionType); } } [Fact] public async Task TestPacMan_PreviewInstallPackage_PackagesConfig_RaiseTelemetryEvents() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null) }; var sourceRepositoryProvider = CreateSource(packages); // set up telemetry service var telemetrySession = new Mock<ITelemetrySession>(); var telemetryEvents = new ConcurrentQueue<TelemetryEvent>(); telemetrySession .Setup(x => x.PostEvent(It.IsAny<TelemetryEvent>())) .Callback<TelemetryEvent>(x => telemetryEvents.Enqueue(x)); var nugetProjectContext = new TestNuGetProjectContext(); var telemetryService = new NuGetVSTelemetryService(telemetrySession.Object); TelemetryActivity.NuGetTelemetryService = telemetryService; // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); var nugetProject = solutionManager.AddNewMSBuildProject(); // Main Act var target = new PackageIdentity("a", new NuGetVersion(1, 0, 0)); await nuGetPackageManager.PreviewInstallPackageAsync( nugetProject, target, new ResolutionContext(), nugetProjectContext, sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert Assert.Equal(3, telemetryEvents.Count); VerifyPreviewActionsTelemetryEvents_PackagesConfig(telemetryEvents.Select(p => (string)p["SubStepName"])); } } [Fact] public async Task TestPacMan_PreviewInstallPackage_BuildIntegrated_RaiseTelemetryEvents() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); // set up telemetry service var telemetrySession = new Mock<ITelemetrySession>(); var telemetryEvents = new ConcurrentQueue<TelemetryEvent>(); telemetrySession .Setup(x => x.PostEvent(It.IsAny<TelemetryEvent>())) .Callback<TelemetryEvent>(x => telemetryEvents.Enqueue(x)); var nugetProjectContext = new TestNuGetProjectContext(); var telemetryService = new NuGetVSTelemetryService(telemetrySession.Object); TelemetryActivity.NuGetTelemetryService = telemetryService; // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); var buildIntegratedProject = solutionManager.AddBuildIntegratedProject(); // Main Act var target = _packageWithDependents[0]; await nuGetPackageManager.PreviewInstallPackageAsync( buildIntegratedProject, target, new ResolutionContext(), nugetProjectContext, sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert Assert.Equal(17, telemetryEvents.Count); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ProjectRestoreInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateRestoreGraph").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateAssetsFile").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ValidateRestoreGraphs").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreResult").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "RestoreNoOpInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreTargetGraph").Count()); Assert.Equal(1, telemetryEvents.Where(p => p.Name == "NugetActionSteps").Count()); Assert.True(telemetryEvents.Where(p => p.Name == "NugetActionSteps"). Any(p => (string)p["SubStepName"] == TelemetryConstants.PreviewBuildIntegratedStepName)); } } [Fact] public async Task TestPacMan_PreviewInstallPackage_BuildIntegrated_RaiseTelemetryEventsWithErrorCode() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); // set up telemetry service var telemetrySession = new Mock<ITelemetrySession>(); var telemetryEvents = new ConcurrentQueue<TelemetryEvent>(); telemetrySession .Setup(x => x.PostEvent(It.IsAny<TelemetryEvent>())) .Callback<TelemetryEvent>(x => telemetryEvents.Enqueue(x)); var nugetProjectContext = new TestNuGetProjectContext(); var telemetryService = new NuGetVSTelemetryService(telemetrySession.Object); TelemetryActivity.NuGetTelemetryService = telemetryService; // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, Settings.LoadSpecificSettings(solutionManager.SolutionDirectory, "NuGet.Config"), solutionManager, new TestDeleteOnRestartManager()); var json = new JObject { ["dependencies"] = new JObject(), ["frameworks"] = new JObject { ["net46"] = new JObject() } }; var buildIntegratedProject = solutionManager.AddBuildIntegratedProject(json: json); // Act var target = new PackageIdentity("NuGet.Versioning", new NuGetVersion("99.9.9")); await nuGetPackageManager.PreviewInstallPackageAsync( buildIntegratedProject, target, new ResolutionContext(), nugetProjectContext, sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert Assert.Equal(17, telemetryEvents.Count); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ProjectRestoreInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateRestoreGraph").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateAssetsFile").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ValidateRestoreGraphs").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreResult").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "RestoreNoOpInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreTargetGraph").Count()); Assert.Equal(1, telemetryEvents.Where(p => p.Name == "NugetActionSteps").Count()); Assert.True(telemetryEvents.Where(p => p.Name == "NugetActionSteps"). Any(p => (string)p["SubStepName"] == TelemetryConstants.PreviewBuildIntegratedStepName)); Assert.True((string)telemetryEvents .Where(p => p.Name == "ProjectRestoreInformation"). Last()["ErrorCodes"] == NuGetLogCode.NU1102.ToString()); } } [Fact] public async Task TestPacMan_PreviewInstallPackage_BuildIntegrated_RaiseTelemetryEventsWithDupedErrorCodes() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); // set up telemetry service var telemetrySession = new Mock<ITelemetrySession>(); var telemetryEvents = new ConcurrentQueue<TelemetryEvent>(); telemetrySession .Setup(x => x.PostEvent(It.IsAny<TelemetryEvent>())) .Callback<TelemetryEvent>(x => telemetryEvents.Enqueue(x)); var nugetProjectContext = new TestNuGetProjectContext(); var telemetryService = new NuGetVSTelemetryService(telemetrySession.Object); TelemetryActivity.NuGetTelemetryService = telemetryService; // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, Settings.LoadSpecificSettings(solutionManager.SolutionDirectory, "NuGet.Config"), solutionManager, new TestDeleteOnRestartManager()); var json = new JObject { ["dependencies"] = new JObject() { new JProperty("NuGet.Frameworks", "99.0.0") }, ["frameworks"] = new JObject { ["net46"] = new JObject() } }; var buildIntegratedProject = solutionManager.AddBuildIntegratedProject(json: json); // Act var target = new PackageIdentity("NuGet.Versioning", new NuGetVersion("99.9.9")); await nuGetPackageManager.PreviewInstallPackageAsync( buildIntegratedProject, target, new ResolutionContext(), nugetProjectContext, sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert Assert.Equal(17, telemetryEvents.Count); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ProjectRestoreInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateRestoreGraph").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateAssetsFile").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ValidateRestoreGraphs").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreResult").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "RestoreNoOpInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreTargetGraph").Count()); Assert.Equal(1, telemetryEvents.Where(p => p.Name == "NugetActionSteps").Count()); Assert.True(telemetryEvents.Where(p => p.Name == "NugetActionSteps"). Any(p => (string)p["SubStepName"] == TelemetryConstants.PreviewBuildIntegratedStepName)); Assert.True((string)telemetryEvents .Where(p => p.Name == "ProjectRestoreInformation"). Last()["ErrorCodes"] == NuGetLogCode.NU1102.ToString()); } } [Fact] public async Task TestPacMan_PreviewInstallPackage_BuildIntegrated_RaiseTelemetryEventsWithWarningCode() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); // set up telemetry service var telemetrySession = new Mock<ITelemetrySession>(); var telemetryEvents = new ConcurrentQueue<TelemetryEvent>(); telemetrySession .Setup(x => x.PostEvent(It.IsAny<TelemetryEvent>())) .Callback<TelemetryEvent>(x => telemetryEvents.Enqueue(x)); var nugetProjectContext = new TestNuGetProjectContext(); var telemetryService = new TestNuGetVSTelemetryService(telemetrySession.Object, _logger); TelemetryActivity.NuGetTelemetryService = telemetryService; // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, Settings.LoadSpecificSettings(solutionManager.SolutionDirectory, "NuGet.Config"), solutionManager, new TestDeleteOnRestartManager()); var json = new JObject { ["dependencies"] = new JObject(), ["frameworks"] = new JObject { ["net46"] = new JObject() } }; var buildIntegratedProject = solutionManager.AddBuildIntegratedProject(json: json); // Act var target = new PackageIdentity("NuGet.Versioning", new NuGetVersion("4.6.9")); lock (_logger) { // telemetry count has been flaky, these xunit logs should help track the extra source of events on CI // for issue https://github.com/NuGet/Home/issues/7105 _logger.LogInformation("Begin PreviewInstallPackageAsync"); } await nuGetPackageManager.PreviewInstallPackageAsync( buildIntegratedProject, target, new ResolutionContext(), nugetProjectContext, sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); lock (_logger) { _logger.LogInformation("End PreviewInstallPackageAsync"); } // Assert Assert.Equal(19, telemetryEvents.Count); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ProjectRestoreInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateRestoreGraph").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateAssetsFile").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ValidateRestoreGraphs").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreResult").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "RestoreNoOpInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreTargetGraph").Count()); Assert.Equal(1, telemetryEvents.Where(p => p.Name == "NugetActionSteps").Count()); Assert.True(telemetryEvents.Where(p => p.Name == "NugetActionSteps"). Any(p => (string)p["SubStepName"] == TelemetryConstants.PreviewBuildIntegratedStepName)); Assert.True((string)telemetryEvents .Where(p => p.Name == "ProjectRestoreInformation"). Last()["WarningCodes"] == NuGetLogCode.NU1603.ToString()); } } [Fact] public async Task TestPacMan_PreviewInstallPackage_BuildIntegrated_RaiseTelemetryEventsWithDupedWarningCodes() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); // set up telemetry service var telemetrySession = new Mock<ITelemetrySession>(); var telemetryEvents = new ConcurrentQueue<TelemetryEvent>(); telemetrySession .Setup(x => x.PostEvent(It.IsAny<TelemetryEvent>())) .Callback<TelemetryEvent>(x => telemetryEvents.Enqueue(x)); var nugetProjectContext = new TestNuGetProjectContext(); var telemetryService = new TestNuGetVSTelemetryService(telemetrySession.Object, _logger); TelemetryActivity.NuGetTelemetryService = telemetryService; // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, Settings.LoadSpecificSettings(solutionManager.SolutionDirectory, "NuGet.Config"), solutionManager, new TestDeleteOnRestartManager()); var json = new JObject { ["dependencies"] = new JObject() { new JProperty("NuGet.Frameworks", "4.6.9") }, ["frameworks"] = new JObject { ["net46"] = new JObject() } }; var buildIntegratedProject = solutionManager.AddBuildIntegratedProject(json: json); // Act var target = new PackageIdentity("NuGet.Versioning", new NuGetVersion("4.6.9")); await nuGetPackageManager.PreviewInstallPackageAsync( buildIntegratedProject, target, new ResolutionContext(), nugetProjectContext, sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert Assert.Equal(21, telemetryEvents.Count); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ProjectRestoreInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateRestoreGraph").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateAssetsFile").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ValidateRestoreGraphs").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreResult").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "RestoreNoOpInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreTargetGraph").Count()); Assert.Equal(1, telemetryEvents.Where(p => p.Name == "NugetActionSteps").Count()); Assert.True(telemetryEvents.Where(p => p.Name == "NugetActionSteps"). Any(p => (string)p["SubStepName"] == TelemetryConstants.PreviewBuildIntegratedStepName)); Assert.True((string)telemetryEvents .Where(p => p.Name == "ProjectRestoreInformation"). Last()["WarningCodes"] == NuGetLogCode.NU1603.ToString()); } } [Fact] public async Task TestPacMan_PreviewUpdatePackage_PackagesConfig_RaiseTelemetryEvents() { // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(1, 0, 0))) }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new[] { new Packaging.Core.PackageDependency("b", new VersionRange(new NuGetVersion(2, 0, 0))) }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("b", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null) }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackage1 = new PackageIdentity("a", new NuGetVersion(1, 0, 0)); var installedPackage2 = new PackageIdentity("b", new NuGetVersion(1, 0, 0)); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(installedPackage1, fwk45, true), new NuGet.Packaging.PackageReference(installedPackage2, fwk45, true) }; var nuGetProject = new TestNuGetProject(installedPackages); // set up telemetry service var telemetrySession = new Mock<ITelemetrySession>(); var telemetryEvents = new ConcurrentQueue<TelemetryEvent>(); telemetrySession .Setup(x => x.PostEvent(It.IsAny<TelemetryEvent>())) .Callback<TelemetryEvent>(x => telemetryEvents.Enqueue(x)); var nugetProjectContext = new TestNuGetProjectContext(); var telemetryService = new NuGetVSTelemetryService(telemetrySession.Object); TelemetryActivity.NuGetTelemetryService = telemetryService; // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, Settings.LoadSpecificSettings(solutionManager.SolutionDirectory, "NuGet.Config"), solutionManager, new TestDeleteOnRestartManager()); // Main Act var target = new PackageIdentity("a", new NuGetVersion(2, 0, 0)); await nuGetPackageManager.PreviewUpdatePackagesAsync( new List<PackageIdentity> { target }, new List<NuGetProject> { nuGetProject }, new ResolutionContext(), nugetProjectContext, sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None); // Assert Assert.Equal(3, telemetryEvents.Count); VerifyPreviewActionsTelemetryEvents_PackagesConfig(telemetryEvents.Select(p => (string)p["SubStepName"])); } } [Fact] public async Task TestPacMan_ExecuteNuGetProjectActions_PackagesConfig_RaiseTelemetryEvents() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); // set up telemetry service var telemetrySession = new Mock<ITelemetrySession>(); var telemetryEvents = new ConcurrentQueue<TelemetryEvent>(); telemetrySession .Setup(x => x.PostEvent(It.IsAny<TelemetryEvent>())) .Callback<TelemetryEvent>(x => telemetryEvents.Enqueue(x)); var nugetProjectContext = new TestNuGetProjectContext(); var telemetryService = new NuGetVSTelemetryService(telemetrySession.Object); TelemetryActivity.NuGetTelemetryService = telemetryService; // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, Settings.LoadSpecificSettings(solutionManager.SolutionDirectory, "NuGet.Config"), solutionManager, new TestDeleteOnRestartManager()); var nugetProject = solutionManager.AddNewMSBuildProject(); var target = _packageWithDependents[0]; var projectActions = new List<NuGetProjectAction>(); projectActions.Add( NuGetProjectAction.CreateInstallProjectAction( target, sourceRepositoryProvider.GetRepositories().First(), nugetProject)); // Act await nuGetPackageManager.ExecuteNuGetProjectActionsAsync( new List<NuGetProject>() { nugetProject }, projectActions, nugetProjectContext, NullSourceCacheContext.Instance, CancellationToken.None); // Assert Assert.Equal(5, telemetryEvents.Count); Assert.Equal(1, telemetryEvents.Where(p => p.Name == "PackagePreFetcherInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "PackageExtractionInformation").Count()); Assert.Equal(1, telemetryEvents.Where(p => p.Name == "NugetActionSteps").Count()); Assert.True(telemetryEvents.Where(p => p.Name == "NugetActionSteps"). Any(p => (string)p["SubStepName"] == TelemetryConstants.ExecuteActionStepName)); } } [Fact] public async Task TestPacMan_ExecuteNuGetProjectActions_BuildIntegrated_RaiseTelemetryEvents() { // Arrange var sourceRepositoryProvider = TestSourceRepositoryUtility.CreateV3OnlySourceRepositoryProvider(); // set up telemetry service var telemetrySession = new Mock<ITelemetrySession>(); var telemetryEvents = new ConcurrentQueue<TelemetryEvent>(); telemetrySession .Setup(x => x.PostEvent(It.IsAny<TelemetryEvent>())) .Callback<TelemetryEvent>(x => telemetryEvents.Enqueue(x)); var nugetProjectContext = new TestNuGetProjectContext(); var telemetryService = new NuGetVSTelemetryService(telemetrySession.Object); TelemetryActivity.NuGetTelemetryService = telemetryService; using (var settingsdir = TestDirectory.Create()) using (var testSolutionManager = new TestSolutionManager(true)) { var settings = Settings.LoadSpecificSettings(testSolutionManager.SolutionDirectory, "NuGet.Config"); foreach (var source in sourceRepositoryProvider.GetRepositories()) { settings.AddOrUpdate(ConfigurationConstants.PackageSources, source.PackageSource.AsSourceItem()); } var token = CancellationToken.None; var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, settings, testSolutionManager, deleteOnRestartManager); var installationCompatibility = new Mock<IInstallationCompatibility>(); nuGetPackageManager.InstallationCompatibility = installationCompatibility.Object; var buildIntegratedProject = testSolutionManager.AddBuildIntegratedProject(); var packageIdentity = _packageWithDependents[0]; var projectActions = new List<NuGetProjectAction>(); projectActions.Add( NuGetProjectAction.CreateInstallProjectAction( packageIdentity, sourceRepositoryProvider.GetRepositories().First(), buildIntegratedProject)); // Act await nuGetPackageManager.ExecuteNuGetProjectActionsAsync( new List<NuGetProject>() { buildIntegratedProject }, projectActions, nugetProjectContext, NullSourceCacheContext.Instance, token); // Assert Assert.Equal(38, telemetryEvents.Count); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ProjectRestoreInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateRestoreGraph").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "GenerateAssetsFile").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "ValidateRestoreGraphs").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreResult").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "RestoreNoOpInformation").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "CreateRestoreTargetGraph").Count()); Assert.Equal(2, telemetryEvents.Where(p => p.Name == "NugetActionSteps").Count()); Assert.True(telemetryEvents.Where(p => p.Name == "NugetActionSteps"). Any(p => (string)p["SubStepName"] == TelemetryConstants.PreviewBuildIntegratedStepName)); Assert.True(telemetryEvents.Where(p => p.Name == "NugetActionSteps"). Any(p => (string)p["SubStepName"] == TelemetryConstants.ExecuteActionStepName)); } } [Fact] public async Task TestPacManPreviewInstallPackage_WithGlobalPackageFolder() { using ( var packageSource1 = TestDirectory.Create()) { // Arrange var sourceRepositoryProvider1 = TestSourceRepositoryUtility.CreateSourceRepositoryProvider( new List<Configuration.PackageSource>() { new Configuration.PackageSource(packageSource1.Path) }); using (var testSolutionManager = new TestSolutionManager(true)) { var testSettings = NullSettings.Instance; var token = CancellationToken.None; var resolutionContext = new ResolutionContext(); var testNuGetProjectContext = new TestNuGetProjectContext(); var deleteOnRestartManager = new TestDeleteOnRestartManager(); var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider1, testSettings, testSolutionManager, deleteOnRestartManager); var packagesFolderPath = PackagesFolderPathUtility.GetPackagesFolderPath(testSolutionManager, testSettings); var projectA = testSolutionManager.AddBuildIntegratedProject(); var target = new PackageIdentity("packageA", NuGetVersion.Parse("1.0.0")); var packageAContext = new SimpleTestPackageContext() { Id = "packageA", Version = "1.0.0" }; var saveMode = PackageSaveMode.Nuspec | PackageSaveMode.Files | PackageSaveMode.Nupkg; await SimpleTestPackageUtility.CreateFolderFeedV3Async( packagesFolderPath, saveMode, packageAContext); // ACT var result = await nuGetPackageManager.PreviewInstallPackageAsync( projectA, target, new ResolutionContext(), new TestNuGetProjectContext(), sourceRepositoryProvider1.GetRepositories(), sourceRepositoryProvider1.GetRepositories(), token); // Assert var resulting = result.Select(a => Tuple.Create(a.PackageIdentity, a.NuGetProjectActionType)).ToArray(); var expected = new List<Tuple<PackageIdentity, NuGetProjectActionType>>(); Expected(expected, target.Id, target.Version); Assert.True(Compare(resulting, expected)); } } } [Fact] public async Task TestPacMan_PreviewUpdatePackage_UnlistedPackage() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, false, null) }; var sourceRepositoryProvider = CreateSource(packages); // Set up NuGetProject var fwk45 = NuGetFramework.Parse("net45"); var installedPackage1 = new PackageIdentity("a", new NuGetVersion(1, 0, 0)); var installedPackages = new List<NuGet.Packaging.PackageReference> { new NuGet.Packaging.PackageReference(installedPackage1, fwk45, true) }; var nuGetProject = new TestNuGetProject(installedPackages); // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); // Main Act var targetPackageId = "a"; var result = (await nuGetPackageManager.PreviewUpdatePackagesAsync( targetPackageId, new List<NuGetProject> { nuGetProject }, new ResolutionContext(DependencyBehavior.Lowest, false, false, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None)).ToList(); // Assert Assert.Equal(0, result.Count); } } [Fact] public async Task TestPacMan_BuildIntegratedProject_PreviewUpdatePackage() { // Arrange // Set up Package Source var packages = new List<SourcePackageDependencyInfo> { new SourcePackageDependencyInfo("a", new NuGetVersion(1, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null), new SourcePackageDependencyInfo("a", new NuGetVersion(2, 0, 0), new Packaging.Core.PackageDependency[] { }, true, null) }; var sourceRepositoryProvider = CreateSource(packages); var json = new JObject { ["dependencies"] = new JObject() { new JProperty("a", "1.0.0") }, ["frameworks"] = new JObject { ["net45"] = new JObject() } }; // Create Package Manager using (var solutionManager = new TestSolutionManager(true)) { var nuGetPackageManager = new NuGetPackageManager( sourceRepositoryProvider, NullSettings.Instance, solutionManager, new TestDeleteOnRestartManager()); var buildIntegratedProject = solutionManager.AddBuildIntegratedProject(json: json); // Main Act var targetPackageId = "a"; var result = (await nuGetPackageManager.PreviewUpdatePackagesAsync( targetPackageId, new List<NuGetProject> { buildIntegratedProject }, new ResolutionContext(DependencyBehavior.Lowest, false, false, VersionConstraints.None), new TestNuGetProjectContext(), sourceRepositoryProvider.GetRepositories(), sourceRepositoryProvider.GetRepositories(), CancellationToken.None)).ToList(); // Assert Assert.Equal(1, result.Count); Assert.Equal(NuGetProjectActionType.Install, result[0].NuGetProjectActionType); Assert.Equal(new PackageIdentity("a", new NuGetVersion(2, 0, 0)), result[0].PackageIdentity); } } private void VerifyPreviewActionsTelemetryEvents_PackagesConfig(IEnumerable<string> actual) { Assert.True(actual.Contains(TelemetryConstants.GatherDependencyStepName)); Assert.True(actual.Contains(TelemetryConstants.ResolveDependencyStepName)); Assert.True(actual.Contains(TelemetryConstants.ResolvedActionsStepName)); } private static void AddToPackagesFolder(PackageIdentity package, string root) { var dir = Path.Combine(root, $"{package.Id}.{package.Version.ToString()}"); Directory.CreateDirectory(dir); var context = new SimpleTestPackageContext() { Id = package.Id, Version = package.Version.ToString() }; context.AddFile("lib/net45/a.dll"); SimpleTestPackageUtility.CreateOPCPackage(context, dir); } private SourceRepositoryProvider CreateSource(List<SourcePackageDependencyInfo> packages) { var resourceProviders = new List<Lazy<INuGetResourceProvider>>(); resourceProviders.Add(new Lazy<INuGetResourceProvider>(() => new TestDependencyInfoProvider(packages))); resourceProviders.Add(new Lazy<INuGetResourceProvider>(() => new TestMetadataProvider(packages))); var packageSource = new Configuration.PackageSource("http://temp"); var packageSourceProvider = new TestPackageSourceProvider(new[] { packageSource }); return new SourceRepositoryProvider(packageSourceProvider, resourceProviders); } private static void Expected(List<Tuple<PackageIdentity, NuGetProjectActionType>> expected, string id, NuGetVersion oldVersion, NuGetVersion newVersion) { expected.Add(Tuple.Create(new PackageIdentity(id, oldVersion), NuGetProjectActionType.Uninstall)); expected.Add(Tuple.Create(new PackageIdentity(id, newVersion), NuGetProjectActionType.Install)); } private static void Expected(List<Tuple<PackageIdentity, NuGetProjectActionType>> expected, string id, NuGetVersion newVersion) { expected.Add(Tuple.Create(new PackageIdentity(id, newVersion), NuGetProjectActionType.Install)); } private static bool Compare( IEnumerable<Tuple<PackageIdentity, NuGetProjectActionType>> lhs, IEnumerable<Tuple<PackageIdentity, NuGetProjectActionType>> rhs) { var ok = true; ok &= RhsContainsAllLhs(lhs, rhs); ok &= RhsContainsAllLhs(rhs, lhs); return ok; } private static bool RhsContainsAllLhs( IEnumerable<Tuple<PackageIdentity, NuGetProjectActionType>> lhs, IEnumerable<Tuple<PackageIdentity, NuGetProjectActionType>> rhs) { foreach (var item in lhs) { if (!rhs.Contains(item, new ActionComparer())) { return false; } } return true; } private static bool PreviewResultsCompare( IEnumerable<Tuple<TestNuGetProject, PackageIdentity>> lhs, IEnumerable<Tuple<TestNuGetProject, PackageIdentity>> rhs) { var ok = true; ok &= RhsContainsAllLhs(lhs, rhs); ok &= RhsContainsAllLhs(rhs, lhs); return ok; } private static bool RhsContainsAllLhs( IEnumerable<Tuple<TestNuGetProject, PackageIdentity>> lhs, IEnumerable<Tuple<TestNuGetProject, PackageIdentity>> rhs) { foreach (var item in lhs) { if (!rhs.Contains(item, new PreviewResultComparer())) { return false; } } return true; } private class ActionComparer : IEqualityComparer<Tuple<PackageIdentity, NuGetProjectActionType>> { public bool Equals(Tuple<PackageIdentity, NuGetProjectActionType> x, Tuple<PackageIdentity, NuGetProjectActionType> y) { var f1 = x.Item1.Equals(y.Item1); var f2 = x.Item2 == y.Item2; return f1 && f2; } public int GetHashCode(Tuple<PackageIdentity, NuGetProjectActionType> obj) { return obj.GetHashCode(); } } private class PreviewResultComparer : IEqualityComparer<Tuple<TestNuGetProject, PackageIdentity>> { public bool Equals(Tuple<TestNuGetProject, PackageIdentity> x, Tuple<TestNuGetProject, PackageIdentity> y) { var f1 = x.Item1.Metadata[NuGetProjectMetadataKeys.Name].ToString().Equals( y.Item1.Metadata[NuGetProjectMetadataKeys.Name].ToString()); var f2 = x.Item2.Equals(y.Item2); return f1 && f2; } public int GetHashCode(Tuple<TestNuGetProject, PackageIdentity> obj) { return obj.GetHashCode(); } } private class TestNuGetVSTelemetryService : NuGetVSTelemetryService { private ITelemetrySession _telemetrySession; private XunitLogger _logger; public TestNuGetVSTelemetryService(ITelemetrySession telemetrySession, XunitLogger logger) { _telemetrySession = telemetrySession ?? throw new ArgumentNullException(nameof(telemetrySession)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); } public override void EmitTelemetryEvent(TelemetryEvent telemetryData) { if (telemetryData == null) { throw new ArgumentNullException(nameof(telemetryData)); } lock (_logger) { var operationId = telemetryData["OperationId"]; var parentId = telemetryData["ParentId"]; _logger.LogInformation("--------------------------"); _logger.LogInformation($"Name: {telemetryData.Name}"); _logger.LogInformation($"OperationId: {operationId}"); _logger.LogInformation($"ParentId: {parentId}"); _logger.LogInformation($"Json: {telemetryData.ToJson()}"); _logger.LogInformation($"Stack: {Environment.StackTrace}"); _logger.LogInformation("--------------------------"); } _telemetrySession.PostEvent(telemetryData); } } } }
56.247611
294
0.620557
[ "Apache-2.0" ]
PositiveTechnologies/NuGet.Client
test/NuGet.Core.Tests/NuGet.PackageManagement.Test/NuGetPackageManagerTests.cs
394,352
C#
using OpenQA.Selenium; using OpenQA.Selenium.Support.UI; using System; using System.Collections; using System.Collections.Generic; using System.Text; namespace DemoKule { class SeleniumSetMethod { public static IList<IWebElement> Liste(IWebElement element) { IList<IWebElement> List = new List<IWebElement>(); ///***** //if (elementType == PropertyType.Id) //{ // List = PropertiesCollection.driver.FindElements(By.Id(element)); // return List; //} //if (elementType == PropertyType.XPath) //{ // List = PropertiesCollection.driver.FindElements(By.XPath(element)); // //IList<IWebElement> activeDayList = driver.FindElements(By.XPath("//tbody[@class='mc-table__body']/tr[@class='mc-table__week']/td[@class!='mc-date mc-date--inactive']")); // return List; //} //if (elementType == PropertyType.Name) //{ // List = PropertiesCollection.driver.FindElements(By.Name(element)); // //IList<IWebElement> activeDayList = driver.FindElements(By.XPath("//tbody[@class='mc-table__body']/tr[@class='mc-table__week']/td[@class!='mc-date mc-date--inactive']")); // return List; //} //if (elementType == PropertyType.ClassName) //{ // List = PropertiesCollection.driver.FindElements(By.ClassName(element)); // //IList<IWebElement> activeDayList = driver.FindElements(By.XPath("//tbody[@class='mc-table__body']/tr[@class='mc-table__week']/td[@class!='mc-date mc-date--inactive']")); // return List; //} return List; } public static void EnterText(IWebElement element, string value) { element.SendKeys(value); //if (elementType == PropertyType.Id) // PropertiesCollection.driver.FindElement(By.Id(element)).SendKeys(value); //if (elementType == PropertyType.Name) // PropertiesCollection.driver.FindElement(By.Name(element)).SendKeys(value); //if (elementType == PropertyType.ClassName) // PropertiesCollection.driver.FindElement(By.ClassName(element)).SendKeys(value); //if (elementType == PropertyType.XPath) // PropertiesCollection.driver.FindElement(By.XPath(element)).SendKeys(value); //if (elementType == PropertyType.LinkText) // PropertiesCollection.driver.FindElement(By.LinkText(element)).SendKeys(value); } public static void Click(IWebElement element) { element.Click(); //if (elementType == PropertyType.Id) // PropertiesCollection.driver.FindElement(By.Id(element)).Click(); //if (elementType == PropertyType.Name) // PropertiesCollection.driver.FindElement(By.Name(element)).Click(); //if (elementType == PropertyType.ClassName) // PropertiesCollection.driver.FindElement(By.ClassName(element)).Click(); //if (elementType == PropertyType.XPath) // PropertiesCollection.driver.FindElement(By.XPath(element)).Click(); //if (elementType == PropertyType.LinkText) // PropertiesCollection.driver.FindElement(By.LinkText(element)).Click(); } public static int Random(Random rndm, int minValue, int maxValue) { int random = rndm.Next(minValue, maxValue); return random; } public static void SelectDropdown (IWebElement element, int value) { new SelectElement(element).SelectByIndex(value); } } }
40.021053
189
0.591794
[ "MIT" ]
aslhnbl/DemoKule
SeleniumSetMethod.cs
3,804
C#
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Xml.Serialization; namespace DogeKit_Serializer.Models { public class TWRP { [XmlElement] public string Version { get; set; } } }
18.0625
43
0.709343
[ "Unlicense" ]
squabbi/DogeKit-Pixel2
DogeKit Serializer/Models/TWRP.cs
291
C#
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // <auto-generated/> #nullable disable using System; namespace Azure.ResourceManager.Resources.Models { /// <summary> The managed application provider authorization. </summary> public partial class ApplicationProviderAuthorization { /// <summary> Initializes a new instance of ApplicationProviderAuthorization. </summary> /// <param name="principalId"> The provider&apos;s principal identifier. This is the identity that the provider will use to call ARM to manage the managed application resources. </param> /// <param name="roleDefinitionId"> The provider&apos;s role definition identifier. This role will define all the permissions that the provider must have on the managed application&apos;s container resource group. This role definition cannot have permission to delete the resource group. </param> /// <exception cref="ArgumentNullException"> <paramref name="principalId"/> or <paramref name="roleDefinitionId"/> is null. </exception> public ApplicationProviderAuthorization(string principalId, string roleDefinitionId) { if (principalId == null) { throw new ArgumentNullException(nameof(principalId)); } if (roleDefinitionId == null) { throw new ArgumentNullException(nameof(roleDefinitionId)); } PrincipalId = principalId; RoleDefinitionId = roleDefinitionId; } /// <summary> The provider&apos;s principal identifier. This is the identity that the provider will use to call ARM to manage the managed application resources. </summary> public string PrincipalId { get; set; } /// <summary> The provider&apos;s role definition identifier. This role will define all the permissions that the provider must have on the managed application&apos;s container resource group. This role definition cannot have permission to delete the resource group. </summary> public string RoleDefinitionId { get; set; } } }
53.6
304
0.704291
[ "MIT" ]
0rland0Wats0n/azure-sdk-for-net
sdk/resources/Azure.ResourceManager.Resources/src/Generated/Models/ApplicationProviderAuthorization.cs
2,144
C#
// Copyright (c) 2019 Jennifer Messerly // This code is licensed under MIT license (see LICENSE for details) using System; using System.Collections.Generic; using Kingmaker; using Kingmaker.Blueprints; using Kingmaker.Blueprints.Classes; using Kingmaker.Blueprints.Classes.Selection; using Kingmaker.Blueprints.Classes.Spells; using Kingmaker.Blueprints.Facts; using Kingmaker.Blueprints.Items.Components; using Kingmaker.Blueprints.Items.Equipment; using Kingmaker.Blueprints.Root; using Kingmaker.Controllers; using Kingmaker.Designers.Mechanics.Buffs; using Kingmaker.Designers.Mechanics.Facts; using Kingmaker.EntitySystem.Entities; using Kingmaker.EntitySystem.Stats; using Kingmaker.Enums; using Kingmaker.Items; using Kingmaker.Items.Slots; using Kingmaker.PubSubSystem; using Kingmaker.RuleSystem.Rules; using Kingmaker.UI.Common; using Kingmaker.UnitLogic; using Kingmaker.UnitLogic.Abilities; using Kingmaker.UnitLogic.Abilities.Blueprints; using Kingmaker.UnitLogic.Buffs; using Kingmaker.UnitLogic.Buffs.Blueprints; using Kingmaker.UnitLogic.FactLogic; using Kingmaker.UnitLogic.Mechanics; using Kingmaker.UnitLogic.Mechanics.Components; using Kingmaker.UnitLogic.Parts; using Kingmaker.Utility; using Kingmaker.Visual.Sound; using Newtonsoft.Json; namespace EldritchArcana { static class OracleCurses { static LibraryScriptableObject library => Main.library; static BlueprintCharacterClass oracle => OracleClass.oracle; static BlueprintCharacterClass[] oracleArray => OracleClass.oracleArray; internal static BlueprintFeatureSelection CreateSelection() { var selection = Helpers.CreateFeatureSelection("OracleCurseSelection", "Curse", "Each oracle is cursed, but this curse comes with a benefit as well as a hindrance.This choice is made at 1st level, and once made, it cannot be changed.The oracle’s curse cannot be removed or dispelled without the aid of a deity.An oracle’s curse is based on her oracle level plus one for every two levels or Hit Dice other than oracle.Each oracle must choose one of the following curses.", "b4c9164ec94a47589eeb2a6688b24320", null, UpdateLevelUpDeterminatorText.Group); // Note: most curses can't be implemented as written, so they've undergone some // adaptation to work in PK:K, attempting to capture the sprit and RP flavor. // // Powerful (3rd party) ones we could add: // - Branded, Frenetic (Cha to Fortitude/Reflex respectively) var curses = new List<BlueprintProgression>(); curses.Add(CreateBlackenedCurse()); curses.Add(CreateCloudedVision()); curses.Add(CreateCovetousCurse()); curses.Add(CreateDeafCurse()); curses.Add(CreateHauntedCurse()); curses.Add(CreateLameCurse()); curses.Add(CreateTonguesCurse()); curses.Add(CreateWastingCurse()); // Note: BlueprintProgression.CalcLevels is patched to handle curse progression. // (Curses should advance by +1/2 per character level in other classes.) foreach (var curse in curses) { BlueprintProgression_CalcLevel_Patch.onCalcLevel.Add(curse, CalculateCurseLevel); } selection.SetFeatures(curses); return selection; } internal static int CalculateCurseLevel(UnitDescriptor unit) { // Calculate curse level: Oracle levels + 1/2 other levels. int oracleLevel = unit.Progression.GetClassLevel(OracleClass.oracle); int characterLevel = unit.Progression.CharacterLevel; return oracleLevel + (characterLevel - oracleLevel) / 2; } static BlueprintProgression CreateCloudedVision() { // Clouded Vision: targets greater than max sight distance are treated as having concealment // (similar to the fog spell). Can't target spells at creatures beyond max range. // // Distances are adjusted to make sense in game, which generally operates in closer quarters. // The game significantly reduces spell range: close/medium/long is 30/40/50ft, with no caster // level increase, but in PnP it starts at 25/100/400ft and those increase with level. // // For that reason, clouded vision is reduced from 30/60ft to 20/30ft. That way it still has // an effect (otherwise, 60ft would effectively remove the penalty completely). // // Note: I tried altering fog of war settings, but this seems to break some of the game's // cutscene scripts, so those patches are removed. var curse = Helpers.CreateProgression("OracleCurseCloudedVision", "Clouded Vision", "Your eyes are obscured, making it difficult for you to see.\nYou cannot see anything beyond 20 feet. " + "Targets beyond this range have concealment, and you cannot target any point past that range.", "a4556beb36e742db9361c50587de9514", Helpers.GetIcon("46fd02ad56c35224c9c91c88cd457791"), // blindness FeatureGroup.None, Helpers.Create<CloudedVisionLogic>()); var level5 = Helpers.CreateFeature($"{curse.name}Level5", curse.Name, "At 5th level, your vision distance increases to 30 feet.", "9ee32f1d54984aa7b635891fa778205d", curse.Icon, FeatureGroup.None); var level10 = Helpers.CreateFeature($"{curse.name}Blindsense", "Blindsense", "At 10th level, you gain blindsense out to a range of 30 feet.", "b92a0776b8984f19b6ae0a83c4b90579", Helpers.GetIcon("30e5dc243f937fc4b95d2f8f4e1b7ff3"), // see invisible FeatureGroup.None, Helpers.Create<Blindsense>(b => b.Range = 30.Feet())); var level15 = Helpers.CreateFeature($"{curse.name}Blindsight", "Blindsight", "At 15th level, you gain blindsight out to a range of 15 feet.", "69c483cbe48647f2af576275c2a30b59", Helpers.GetIcon("4cf3d0fae3239ec478f51e86f49161cb"), // true seeing FeatureGroup.None, Helpers.Create<Blindsense>(b => { b.Range = 15.Feet(); b.Blindsight = true; })); curse.LevelEntries = new LevelEntry[] { Helpers.LevelEntry(5, level5), Helpers.LevelEntry(10, level10), Helpers.LevelEntry(15, level15) }; curse.UIGroups = Helpers.CreateUIGroups(level5, level10, level15); curse.Classes = oracleArray; return curse; } static BlueprintProgression CreateDeafCurse() { var pcVoiceNone = library.Get<BlueprintUnitAsksList>("e7b22776ba8e2b84eaaff98e439639a7"); var curse = Helpers.CreateProgression("OracleCurseDeaf", "Deaf", "You cannot hear and suffer all of the usual penalties for being deafened: -4 penalty on initiative and -4 perception. You cast all of your spells as if they were modified by the Silent Spell feat. This does not increase their level or casting time.", "a69e00e4787d4f4c9bf38540c88fce13", Helpers.GetIcon("c3893092a333b93499fd0a21845aa265"), // sound burst FeatureGroup.None, AddStatBonusOnCurseLevel.Create(StatType.Initiative, -4, ModifierDescriptor.Penalty, maxLevel: 4), AddStatBonusOnCurseLevel.Create(StatType.SkillPerception, -4, ModifierDescriptor.Penalty, maxLevel: 4), Helpers.Create<ReplaceAsksList>(r => r.Asks = pcVoiceNone), Helpers.Create<OracleCurseLogic>(o => o.Curse = OracleCurse.Deaf)); curse.Classes = oracleArray; Main.ApplyPatch(typeof(AbilityData_VoiceIntensity_Patch), "Oracle Deaf curse, cast using Silent Spell"); var level5 = Helpers.CreateFeature($"{curse.name}Level5", curse.Name, "At 5th level, you no longer receive a penalty on Perception checks, and the initiative penalty for being deaf is reduced to –2.", "373c4a9b4d304cbfa77472613010a367", curse.Icon, FeatureGroup.None, AddStatBonusOnCurseLevel.Create(StatType.Initiative, -2, ModifierDescriptor.Penalty, minLevel: 5, maxLevel: 9)); var level10 = Helpers.CreateFeature($"{curse.name}Level10", curse.Name, "At 10th level, you receive a +3 competence bonus on Perception checks, and you do not suffer any penalty on initiative checks due to being deaf.", "649e4b7f719b4a5d93c322d12ed4ae5b", Helpers.GetIcon("c927a8b0cd3f5174f8c0b67cdbfde539"), // remove blindness FeatureGroup.None, AddStatBonusOnCurseLevel.Create(StatType.SkillPerception, 3, ModifierDescriptor.Competence, minLevel: 10)); var tremorsense = Helpers.CreateFeature($"{curse.name}Tremorsense", "Tremorsense", "At 15th level, you gain tremorsense out to a range of 30 feet.", "26c9d319adb04110b4ee687a3d573190", Helpers.GetIcon("30e5dc243f937fc4b95d2f8f4e1b7ff3"), // see invisible FeatureGroup.None, Helpers.Create<Blindsense>(b => b.Range = 30.Feet())); curse.LevelEntries = new LevelEntry[] { Helpers.LevelEntry(5, level5), Helpers.LevelEntry(10, level10), Helpers.LevelEntry(15, tremorsense) }; curse.UIGroups = Helpers.CreateUIGroups(level5, level10, tremorsense); return curse; } static BlueprintProgression CreateBlackenedCurse() { var burningHands = library.Get<BlueprintAbility>("4783c3709a74a794dbe7c8e7e0b1b038"); var curse = Helpers.CreateProgression("OracleCurseBlackened", "Blackened", "Your hands and forearms are shriveled and blackened, as if you had plunged your arms into a blazing fire, and your thin, papery skin is sensitive to the touch.\n" + "You take a –4 penalty on weapon attack rolls, but you add burning hands to your list of spells known.", "753f68b73c73472db713c06057a6009f", burningHands.Icon, FeatureGroup.None); curse.Classes = oracleArray; var attackPenalty1 = Helpers.CreateFeature($"{curse.name}Level1", curse.Name, curse.Description, "a32003ed18444246bb6a92a79bb478b9", curse.Icon, FeatureGroup.None, Helpers.Create<AttackTypeAttackBonus>(a => { a.Type = AttackTypeAttackBonus.WeaponRangeType.Normal; a.Descriptor = ModifierDescriptor.Penalty; a.Value = 1; // Value and AttackBonus are multiplied a.AttackBonus = -4; })); var scorchingRay = library.Get<BlueprintAbility>("cdb106d53c65bbc4086183d54c3b97c7"); var burningArc = library.Get<BlueprintAbility>("eaac3d36e0336cb479209a6f65e25e7c"); var level5 = Helpers.CreateFeature($"{curse.name}Level5", curse.Name, "At 5th level, add scorching ray and burning arc to your list of spells known.", "a27670ecc84f4b1d9dd9d434eeb1e782", scorchingRay.Icon, FeatureGroup.None, scorchingRay.CreateAddKnownSpell(oracle, 2), burningArc.CreateAddKnownSpell(oracle, 2)); var wallOfFire = FireSpells.wallOfFire; var level10 = Helpers.CreateFeature($"{curse.name}Level10", curse.Name, "At 10th level, add wall of fire to your list of spells known and your penalty on weapon attack rolls is reduced to –2.", "3fb920932967478687bae1d71ffe5c97", wallOfFire.Icon, FeatureGroup.None, wallOfFire.CreateAddKnownSpell(oracle, 4), Helpers.Create<RemoveFeatureOnApply>(r => r.Feature = attackPenalty1), Helpers.Create<AttackTypeAttackBonus>(a => { a.Type = AttackTypeAttackBonus.WeaponRangeType.Normal; a.Descriptor = ModifierDescriptor.Penalty; a.Value = 1; // Value and AttackBonus are multiplied a.AttackBonus = -2; })); curse.SetComponents( burningHands.CreateAddKnownSpell(oracle, 1), Helpers.Create<AddFeatureIfHasFact>(a => { a.Not = true; a.CheckedFact = level10; a.Feature = attackPenalty1; })); var delayedBlastFireball = FireSpells.delayedBlastFireball; var level15 = Helpers.CreateFeature($"{curse.name}Level15", curse.Name, "At 15th level, add delayed blast fireball to your list of spells known.", "330d3fca05884799aef73b546dd27aa5", delayedBlastFireball.Icon, FeatureGroup.None, delayedBlastFireball.CreateAddKnownSpell(oracle, 7)); burningHands.AddRecommendNoFeature(curse); scorchingRay.AddRecommendNoFeature(curse); burningArc.AddRecommendNoFeature(curse); delayedBlastFireball.AddRecommendNoFeature(curse); curse.LevelEntries = new LevelEntry[] { Helpers.LevelEntry(5, level5), Helpers.LevelEntry(10, level10), Helpers.LevelEntry(15, level15), }; curse.UIGroups = Helpers.CreateUIGroups(level5, level10, level15); return curse; } static BlueprintProgression CreateCovetousCurse() { // Note: this was reworked to be based on wealth (instead of fancy clothes). // Also the value was increased 2x because wealth doesn't require spending it as clothes would, // and gold is shared by the party. // var debuff = library.CopyAndAdd<BlueprintBuff>("4e42460798665fd4cb9173ffa7ada323", "OracleCurseCovetousSickened", "be50bd73d0fd4c22be3c26954e097c8c"); var curse = Helpers.CreateProgression("OracleCurseCovetous", "Covetous", "You find yourself drawn to the luster of wealthy living.\nYou must have a gold reserve worth at least 100 gp + 200 gp per character level you have beyond 1st. If you do not have sufficient wealth, you feel a strong desire (but are not compelled) to sell existing items or steal from others to obtain it. You are sickened whenever you do not meet this requirement. Use Magic Device becomes a class skill for you.", "e42c5119978c438b9c445a90198632b0", library.Get<BlueprintItemEquipmentRing>("ba4276197d204314d9b4a69a4366b2a3").Icon, // Gold ring FeatureGroup.None, Helpers.Create<AddClassSkill>(a => a.Skill = StatType.SkillUseMagicDevice), CovetousCurseLogic.Create(debuff)); curse.Classes = oracleArray; debuff.SetDescription($"{debuff.Description}\n{curse.Name}: {curse.Description}"); var level5 = Helpers.CreateFeature($"{curse.name}Level5", curse.Name, $"At 5th level, you gain a +4 insight bonus on {UIUtility.GetStatText(StatType.SkillUseMagicDevice)} checks.", "04d79bcdcf7d44ea97fd5f09763bb7bc", Helpers.GetSkillFocus(StatType.SkillUseMagicDevice).Icon, FeatureGroup.None, Helpers.CreateAddStatBonus(StatType.SkillUseMagicDevice, 4, ModifierDescriptor.Insight)); // Note: reworked; "Fabricate" spell is not in game. // It's now the ability to use UMD to identify items (based on the identify bonus from level 5). var level10 = Helpers.CreateFeature($"{curse.name}Level10", curse.Name, $"At 10th level, you can use your {UIUtility.GetStatText(StatType.SkillUseMagicDevice)} skill to identify items.", "2a32af175975459b9a960b79cfcaaf64", Helpers.GetSkillFocus(StatType.SkillUseMagicDevice).Icon, FeatureGroup.None, Helpers.Create<IdentifySkillReplacement>(i => Helpers.SetField(i, "m_SkillType", (int)StatType.SkillUseMagicDevice))); // Note: reworked to Thievery since there's no steal checks against PC. var level15 = Helpers.CreateFeature($"{curse.name}Level15", curse.Name, $"At 15th level, you gain a +4 insight bonus on {UIUtility.GetStatText(StatType.SkillThievery)} checks.", "c761a8e5ac6e40c087678a3ede5d9bdd", Helpers.GetSkillFocus(StatType.SkillThievery).Icon, FeatureGroup.None, Helpers.CreateAddStatBonus(StatType.SkillThievery, 4, ModifierDescriptor.Insight)); curse.LevelEntries = new LevelEntry[] { Helpers.LevelEntry(5, level5), Helpers.LevelEntry(10, level10), Helpers.LevelEntry(15, level15) }; curse.UIGroups = Helpers.CreateUIGroups(level5, level10, level15); return curse; } static BlueprintProgression CreateHauntedCurse() { // Note: bonus spells reworked, as none of them exist in game. New theme: invisibility spells. // (Most of these are situational in PF:K, except Greater Invisibility, which goes into a level 5 slot.) // Alternate ideas: Blur or Mirror Image, Phatasmal Web, ... ? // Should be: mage hand/ghost sound var vanish = library.Get<BlueprintAbility>("f001c73999fb5a543a199f890108d936"); // Should be: minor image var invisibility = library.Get<BlueprintAbility>("89940cde01689fb46946b2f8cd7b66b7"); // Should be: telekinesis var invisibilityGreater = library.Get<BlueprintAbility>("ecaa0def35b38f949bd1976a6c9539e0"); // Should be: reverse gravity var invisibilityMass = library.Get<BlueprintAbility>("98310a099009bbd4dbdf66bcef58b4cd"); var curse = Helpers.CreateProgression("OracleCurseHaunted", "Haunted", "Malevolent spirits follow you wherever you go, causing minor mishaps and strange occurrences (such as unexpected breezes, small objects moving on their own, and faint noises).\n" + "Retrieving any stored item from your gear requires a standard action, unless it would normally take longer.Any item you drop lands 10 feet away from you in a random direction.\n" + $"Add {vanish.Name} to your list of spells known.", "e2aa739f54c94f7199f550d7a499a2a0", Helpers.GetIcon("c83447189aabc72489164dfc246f3a36"), // frigid touch FeatureGroup.None, vanish.CreateAddKnownSpell(oracle, 1), Helpers.Create<HauntedCurseLogic>()); curse.Classes = oracleArray; var level5 = Helpers.CreateFeature($"{curse.name}Level5", invisibility.Name, $"At 5th level, add {invisibility.Name} to your list of spells known.", "84247c143a9b4d478f4ac3241cce32ab", invisibility.Icon, FeatureGroup.None, invisibility.CreateAddKnownSpell(oracle, 2)); var level10 = Helpers.CreateFeature($"{curse.name}Level10", invisibilityGreater.Name, $"At 10th level, add {invisibilityGreater.Name} to your list of spells known.", "bd62288494144997b3c32cbaa04b25ab", invisibilityGreater.Icon, FeatureGroup.None, invisibilityGreater.CreateAddKnownSpell(oracle, 5)); var level15 = Helpers.CreateFeature($"{curse.name}Level15", invisibilityMass.Name, $"At 15th level, add {invisibilityMass.Name} to your list of spells known.", "90d84dca2e06494cae92566ede0ca6f0", invisibilityMass.Icon, FeatureGroup.None, invisibilityMass.CreateAddKnownSpell(oracle, 7)); curse.LevelEntries = new LevelEntry[] { Helpers.LevelEntry(5, level5), Helpers.LevelEntry(10, level10), Helpers.LevelEntry(15, level15) }; curse.UIGroups = Helpers.CreateUIGroups(level5, level10, level15); Main.ApplyPatch(typeof(ItemsCollection_DropItem_Patch), "Haunted curse (moving items away)"); return curse; } static BlueprintProgression CreateLameCurse() { Main.ApplyPatch(typeof(PartyEncumbranceController_UpdatePartyEncumbrance_Patch), "Lame curse (party speed not reduced by encumbrance"); Main.ApplyPatch(typeof(UnitPartEncumbrance_GetSpeedPenalty_Patch), "Lame curse (speed not reduced by encumbrance)"); var curse = Helpers.CreateProgression("OracleCurseLame", "Lame", "One of your legs is permanently wounded, reducing your base land speed by 10 feet if your base speed is 30 feet or more. If your base speed is less than 30 feet, your speed is reduced by 5 feet. Your speed is never reduced due to encumbrance.", "08f1f729406a43f5ab9fece5e92579b6", Helpers.GetIcon("f492622e473d34747806bdb39356eb89"), // slow FeatureGroup.None, Helpers.Create<OracleCurseLameSpeedPenalty>()); curse.Classes = oracleArray; var fatigueImmunity = Helpers.CreateFeature("OracleCurseLameFatigueImmunity", "Immune to Fatigue", "At 5th level, you are immune to the fatigued condition (but not exhaustion).", "b2b9ef97c1b54faeb552247e731d7270", Helpers.GetIcon("e5aa306af9b91974a9b2f2cbe702f562"), // mercy fatigue FeatureGroup.None, UnitCondition.Fatigued.CreateImmunity(), SpellDescriptor.Fatigue.CreateBuffImmunity()); var effortlessArmor = Helpers.CreateFeature("OracleCurseLameEffortlessArmor", "Effortless Armor", "At 10th level, your speed is never reduced by armor.", "fbe8560cf3f14cd58f380a8dc630b1c7", Helpers.GetIcon("e1291272c8f48c14ab212a599ad17aac"), // effortless armor FeatureGroup.None, // Conceptually similar to ArmorSpeedPenaltyRemoval, but doesn't need 2 ranks in the feat to work. AddMechanicsFeature.MechanicsFeatureType.ImmunToMediumArmorSpeedPenalty.CreateAddMechanics(), AddMechanicsFeature.MechanicsFeatureType.ImmunToArmorSpeedPenalty.CreateAddMechanics()); var exhaustionImmunity = Helpers.CreateFeature("OracleCurseLameExhaustionImmunity", "Immune to Exhausted", "At 15th level, you are immune to the exhausted condition.", "be45e9251c134ac9baee97e1e3ffc30a", Helpers.GetIcon("25641bda25467224e930e8c70eaf9a83"), // mercy exhausted FeatureGroup.None, UnitCondition.Exhausted.CreateImmunity(), SpellDescriptor.Exhausted.CreateBuffImmunity()); curse.LevelEntries = new LevelEntry[] { Helpers.LevelEntry(5, fatigueImmunity), Helpers.LevelEntry(10, effortlessArmor), Helpers.LevelEntry(15, exhaustionImmunity) }; curse.UIGroups = Helpers.CreateUIGroups(fatigueImmunity, effortlessArmor, exhaustionImmunity); return lameCurse = curse; } static BlueprintProgression CreateTonguesCurse() { // Tongues: // - the PC can't order party members in combat, unless the NPC has 1 rank in // Knowledge: World (linguistics). // - once NPC levels up, this will be a non-issue // - disable the restriction until level 2 (e.g. so prologue isn't affected). // // Basically, it's only an issue until each new NPC is leveled up once. // // Since languages aren't implemented, this instead gives a bonus to knowledge skills. var linguistics = UIUtility.GetStatText(StatType.SkillKnowledgeWorld); var curse = Helpers.CreateProgression("OracleCurseTongues", "Tongues", "In times of stress or unease, you speak in tongues.\n" + "Pick one of the following languages: Abyssal, Aklo, Aquan, Auran, Celestial, Ignan, Infernal, or Terran.\n" + $"Whenever you are in combat, you can only speak and understand the selected language. This does not interfere with spellcasting, but it does apply to spells that are language dependent. You know the selected language, and gain a +2 bonus to {linguistics} representing your knowledge of otherworldly languages.\n" + $"If your party members have at least 1 rank in {linguistics} they can communicate with you in combat, allowing you to issue orders to them or vice versa.", "983b66fc844a496da24acbcbdceebede", Helpers.GetIcon("f09453607e683784c8fca646eec49162"), // shout FeatureGroup.None, Helpers.CreateAddStatBonus(StatType.SkillKnowledgeWorld, 2, ModifierDescriptor.UntypedStackable), Helpers.Create<OracleCurseLogic>(o => o.Curse = OracleCurse.Tongues)); curse.Classes = oracleArray; var level5 = Helpers.CreateFeature($"{curse.name}Level5", "Bonus Language", $"At 5th level, you learn a new language, gaining an additional +2 {linguistics} representing this knowledge.", "7b08ed37b3034c94b5e00c7f507f1000", curse.Icon, FeatureGroup.None, Helpers.CreateAddStatBonus(StatType.SkillKnowledgeWorld, 2, ModifierDescriptor.UntypedStackable)); var level10 = Helpers.CreateFeature($"{curse.name}Level10", "Understand All Languages", $"At 10th level, you can understand any spoken language, as if under the effects of tongues, even during combat.\nYou gain an additional +4 {linguistics} representing this knowledge.", "9a38bf8a757e4980b4d07298d7cdad52", curse.Icon, FeatureGroup.None, Helpers.CreateAddStatBonus(StatType.SkillKnowledgeWorld, 4, ModifierDescriptor.UntypedStackable)); var level15 = Helpers.CreateFeature($"{curse.name}Level15", "Speak All Languages", $"At 15th level, you can speak and understand any language, but your speech is still restricted during combat.\nYou gain an additional +4 {linguistics} representing this knowledge.", "40ef931c66c94183a3a6b34454e6cde1", curse.Icon, FeatureGroup.None, Helpers.CreateAddStatBonus(StatType.SkillKnowledgeWorld, 4, ModifierDescriptor.UntypedStackable)); curse.LevelEntries = new LevelEntry[] { Helpers.LevelEntry(5, level5), Helpers.LevelEntry(10, level10), Helpers.LevelEntry(15, level15) }; curse.UIGroups = Helpers.CreateUIGroups(level5, level10, level15); Main.ApplyPatch(typeof(UnitEntityData_IsDirectlyControllable_Patch), "Tongues curse (party members not controllable without 1 rank Knowledge: world)"); return curse; } static BlueprintProgression CreateWastingCurse() { var curse = Helpers.CreateProgression("OracleCurseWasting", "Wasting", "Your body is slowly rotting away.\nYou take a –4 penalty on Charisma-based skill checks, except for Intimidate. You gain a +4 competence bonus on saves made against disease.", "12fcf38c71064c9a8e9a79e5d7c115bc", Helpers.GetIcon("4e42460798665fd4cb9173ffa7ada323"), // sickened FeatureGroup.None, Helpers.CreateAddStatBonus(StatType.CheckDiplomacy, -4, ModifierDescriptor.Penalty), Helpers.CreateAddStatBonus(StatType.CheckBluff, -4, ModifierDescriptor.Penalty), Helpers.CreateAddStatBonus(StatType.SkillUseMagicDevice, -4, ModifierDescriptor.Penalty), Helpers.Create<SavingThrowBonusAgainstDescriptor>(s => { s.Value = 4; s.SpellDescriptor = SpellDescriptor.Disease; s.ModifierDescriptor = ModifierDescriptor.Competence; })); curse.Classes = oracleArray; var level5 = Helpers.CreateFeature($"{curse.name}SickenImmunity", "Immune to Sickened", "At 5th level, you are immune to the sickened condition (but not nauseated).", "a325e582ba97456784cb3c0e206de8e0", Helpers.GetIcon("7ee2ef06226a4884f80b7647a2aa2dee"), // mercy sickened FeatureGroup.None, UnitCondition.Sickened.CreateImmunity(), SpellDescriptor.Sickened.CreateBuffImmunity()); var level10 = Helpers.CreateFeature($"{curse.name}DiseaseImmunity", "Immune to Disease", "At 10th level, you gain immunity to disease.", "ffebfb47717246c58304a01223c26086", Helpers.GetIcon("3990a92ce97efa3439e55c160412ce14"), // mercy diseased FeatureGroup.None, SpellDescriptor.Disease.CreateSpellImmunity(), SpellDescriptor.Disease.CreateBuffImmunity()); var level15 = Helpers.CreateFeature($"{curse.name}NauseatedImmunity", "Immune to Nauseated", "At 15th level, you are immune to the nauseated condition.", "9fb165ed9340414085930eb72b0661b6", Helpers.GetIcon("a0cacf71d872d2a42ae3deb6bf977962"), // mercy nauseated FeatureGroup.None, UnitCondition.Nauseated.CreateImmunity(), SpellDescriptor.Nauseated.CreateBuffImmunity()); curse.LevelEntries = new LevelEntry[] { Helpers.LevelEntry(5, level5), Helpers.LevelEntry(10, level10), Helpers.LevelEntry(15, level15) }; curse.UIGroups = Helpers.CreateUIGroups(level5, level10, level15); curse.Classes = oracleArray; return curse; } internal static BindAbilitiesToClass CreateBindToOracle(params BlueprintAbility[] abilities) { return Helpers.Create<BindAbilitiesToClass>(b => { b.Stat = StatType.Charisma; b.Abilites = abilities; b.CharacterClass = oracle; b.AdditionalClasses = Array.Empty<BlueprintCharacterClass>(); b.Archetypes = Array.Empty<BlueprintArchetype>(); }); } // Used by Flame Mystery Cinder Dance (to mark it incompatible). internal static BlueprintProgression lameCurse; } class CovetousCurseLogic : OwnedGameLogicComponent<UnitDescriptor>, IUnitGainLevelHandler, IItemsCollectionHandler { public BlueprintBuff Debuff; [JsonProperty] Buff appliedBuff; [JsonProperty] long lastMoney = 0; public static CovetousCurseLogic Create(BlueprintBuff debuff) { var c = Helpers.Create<CovetousCurseLogic>(); c.Debuff = debuff; return c; } public override void OnTurnOn() { CheckCovetous(false); base.OnTurnOn(); } public override void OnTurnOff() { appliedBuff?.Remove(); appliedBuff = null; base.OnTurnOff(); } public void HandleUnitGainLevel(UnitDescriptor unit, BlueprintCharacterClass @class) { if (unit == Owner) CheckCovetous(false); } public void HandleItemsAdded(ItemsCollection collection, ItemEntity item, int count) => CheckCovetous(true); public void HandleItemsRemoved(ItemsCollection collection, ItemEntity item, int count) => CheckCovetous(true); void CheckCovetous(bool checkMoneyChanged) { try { long money; if (Owner.IsPlayerFaction) { money = Game.Instance.Player.Money; } else { money = Owner.Inventory.Count(BlueprintRoot.Instance.SystemMechanics.GoldCoin); } if (checkMoneyChanged && money == lastMoney) return; lastMoney = money; var requiredMoney = Owner.Progression.CharacterLevel * 200 + 100; Log.Append($"Covetous curse: check {Owner.CharacterName}, money {money}, requires {requiredMoney}"); if (money < requiredMoney) { if (appliedBuff == null) { appliedBuff = Owner.AddBuff(Debuff, Owner.Unit); if (appliedBuff == null) return; appliedBuff.IsNotDispelable = true; appliedBuff.IsFromSpell = false; } } else { appliedBuff?.Remove(); appliedBuff = null; } } catch (Exception e) { Log.Error(e); } } } [AllowedOn(typeof(BlueprintUnitFact))] [AllowedOn(typeof(BlueprintUnit))] [AllowMultipleComponents] public class OracleCurseLogic : OwnedGameLogicComponent<UnitDescriptor> { public OracleCurse Curse; public override void OnTurnOn() => Owner.Ensure<UnitPartOracleCurse>().Curses |= Curse; public override void OnTurnOff() => Owner.Ensure<UnitPartOracleCurse>().Curses &= ~Curse; } [AllowedOn(typeof(BlueprintUnitFact))] [AllowMultipleComponents] public class OracleCurseLameSpeedPenalty : OracleCurseLogic { [JsonProperty] private ModifiableValue.Modifier m_Modifier; public OracleCurseLameSpeedPenalty() { Curse = OracleCurse.Lame; } public override void OnTurnOn() { var speed = Owner.Stats.Speed; var penalty = speed.Racial >= 30 ? -10 : -5; m_Modifier = speed.AddModifier(penalty, this, ModifierDescriptor.Penalty); base.OnTurnOn(); } public override void OnTurnOff() { m_Modifier?.Remove(); m_Modifier = null; base.OnTurnOff(); } } [Harmony12.HarmonyPatch(typeof(UnitPartEncumbrance), "GetSpeedPenalty", typeof(UnitDescriptor), typeof(Encumbrance))] static class UnitPartEncumbrance_GetSpeedPenalty_Patch { static void Postfix(UnitDescriptor owner, Encumbrance encumbrance, ref int __result) { if (__result < 0 && owner.Get<UnitPartOracleCurse>()?.HasLame == true) { __result = 0; } } } // Kingmaker.Controllers.PartyEncumbranceController [Harmony12.HarmonyPatch(typeof(PartyEncumbranceController), "UpdatePartyEncumbrance", new Type[0])] static class PartyEncumbranceController_UpdatePartyEncumbrance_Patch { static bool Prefix() { try { var player = Game.Instance.Player; if (player.Party.Any(u => u.Get<UnitPartOracleCurse>()?.HasLame == true)) { if (player.Encumbrance != Encumbrance.Light) { player.Encumbrance = Encumbrance.Light; EventBus.RaiseEvent((IPartyEncumbranceHandler p) => p.ChangePartyEncumbrance()); } return false; } } catch (Exception e) { Log.Error(e); } return true; } } [Harmony12.HarmonyPatch(typeof(UnitEntityData), "get_IsDirectlyControllable", new Type[0])] static class UnitEntityData_IsDirectlyControllable_Patch { static void Postfix(UnitEntityData __instance, ref bool __result) { try { if (!__result) return; if (Main.settings?.RelaxTonguesCurse == true) return; // Tongues only has effect in combat. var self = __instance; if (!self.IsInCombat) return; // PC and PC's pet are always controllable. var mainChar = Game.Instance.Player.MainCharacter; var npc = self.Descriptor; if (self == mainChar || npc.Master == mainChar) return; // Don't apply the penalty until we've had an opportunity to level up. var pc = mainChar.Value.Descriptor; if (pc.Progression.CharacterLevel < 2) return; // If either PC or NPC has the Tongues curse, and the other party // doesn't have 1 rank in linguistics (Knowledge: World), then they // can't be communicated with in combat (i.e. ordered around). if (pc.Stats.SkillKnowledgeWorld.BaseValue == 0 && npc.Get<UnitPartOracleCurse>()?.HasTongues == true || npc.Stats.SkillKnowledgeWorld.BaseValue == 0 && pc.Get<UnitPartOracleCurse>()?.HasTongues == true) { // Tongues curse: can't talk to this party member in combat. __result = false; return; } } catch (Exception e) { Log.Error(e); } } } // Note: this patch could be avoided if curses were BlueprintFeatures, with a component that // knows how to compute the curse level. But that doesn't look as nice in the UI as progressions. // // Another option is to have a Progression just for the UI (no functionality), but that seems // rather complex and might lead to issues (e.g. mismatch between UI and implementation.) [Harmony12.HarmonyPatch(typeof(BlueprintProgression), "CalcLevel", new Type[] { typeof(UnitDescriptor) })] static class BlueprintProgression_CalcLevel_Patch { internal static readonly Dictionary<BlueprintProgression, Func<UnitDescriptor, int>> onCalcLevel = new Dictionary<BlueprintProgression, Func<UnitDescriptor, int>>(); static BlueprintProgression_CalcLevel_Patch() => Main.ApplyPatch(typeof(BlueprintProgression_CalcLevel_Patch), "Oracle curse advancement for non-oracle levels"); static bool Prefix(BlueprintProgression __instance, UnitDescriptor unit, ref int __result) { try { Func<UnitDescriptor, int> calcLevel; if (onCalcLevel.TryGetValue(__instance, out calcLevel)) { __result = calcLevel(unit); return false; } } catch (Exception e) { Log.Error(e); } return true; } } [Flags] public enum OracleCurse { Tongues = 0x1, Haunted = 0x2, Lame = 0x4, Deaf = 0x8 } // Used for curses where we need to be able to quickly look up information on the unit. // We don't track all curses this way, only those that need method patching (or that // need fast lookups for some other reason.) This is similar to UnitMechanicsFeatures. public class UnitPartOracleCurse : UnitPart { [JsonProperty] public OracleCurse Curses; [JsonProperty] public float CloudedVisionDistance; public bool HasCloudedVision => CloudedVisionDistance != 0; public bool HasTongues => (Curses & OracleCurse.Tongues) != 0; public bool HasHaunted => (Curses & OracleCurse.Haunted) != 0; public bool HasLame => (Curses & OracleCurse.Lame) != 0; public bool HasDeaf => (Curses & OracleCurse.Deaf) != 0; } [AllowedOn(typeof(BlueprintProgression))] public class CloudedVisionLogic : RuleInitiatorLogicComponent<RuleConcealmentCheck>, ILevelUpCompleteUIHandler { static CloudedVisionLogic() { var description = "Oracle Clouded Vision curse reduced range"; Main.ApplyPatch(typeof(AbilityData_GetVisualDistance_Patch), description); Main.ApplyPatch(typeof(AbilityData_GetApproachDistance_Patch), description); } public override void OnTurnOn() { Log.Write($"{GetType().Name}::OnTurnOn"); UpdateRange(); } public override void OnTurnOff() { Log.Write($"{GetType().Name}::OnTurnOff"); Owner.Ensure<UnitPartOracleCurse>().CloudedVisionDistance = 0; } public override void OnEventAboutToTrigger(RuleConcealmentCheck evt) { } public override void OnEventDidTrigger(RuleConcealmentCheck evt) { try { var initiator = evt.Initiator; var target = evt.Target; var part = initiator.Get<UnitPartOracleCurse>(); if (part == null) return; var distance = initiator.DistanceTo(target); var sightDistance = part.CloudedVisionDistance; sightDistance += (initiator.View?.Corpulence ?? 0.5f) + (target?.View.Corpulence ?? 0.5f); if (distance > sightDistance) { var isFar = distance > (sightDistance + 5.Feet().Meters); set_Concealment(evt, isFar ? Concealment.Total : Concealment.Partial); } } catch (Exception e) { Log.Error(e); } } void ILevelUpCompleteUIHandler.HandleLevelUpComplete(UnitEntityData unit, bool isChargen) { if (unit.Descriptor == Owner) UpdateRange(); } void UpdateRange() { try { int level = ((BlueprintProgression)Fact.Blueprint).CalcLevel(Owner); var range = (level >= 5 ? 30 : 20).Feet().Meters; Owner.Ensure<UnitPartOracleCurse>().CloudedVisionDistance = range; } catch (Exception e) { Log.Error(e); } } static FastSetter set_Concealment = Helpers.CreateSetter<RuleConcealmentCheck>("Concealment"); } [Harmony12.HarmonyPatch(typeof(AbilityData), "GetVisualDistance")] static class AbilityData_GetVisualDistance_Patch { static void Postfix(AbilityData __instance, ref float __result) { AbilityData_GetApproachDistance_Patch.Postfix(__instance, null, ref __result); } } [Harmony12.HarmonyPatch(typeof(AbilityData), "GetApproachDistance")] static class AbilityData_GetApproachDistance_Patch { internal static void Postfix(AbilityData __instance, UnitEntityData target, ref float __result) { try { var caster = __instance.Caster; var part = caster.Get<UnitPartOracleCurse>(); if (part?.HasCloudedVision == true) { var maxRange = part.CloudedVisionDistance + (caster.Unit.View?.Corpulence ?? 0.5f) + (target?.View.Corpulence ?? 0.5f); var original = __result; __result = Math.Min(maxRange, original); Log.Write($"Clouded Vision: adjust range from {original} to {__result} (max range: {maxRange})"); } } catch (Exception e) { Log.Error(e); } } } // This handles the "retrieving an item is a standard action". // "Moving dropped item randomly" is handled by ItemsCollection_DropItem_Patch, below. public class HauntedCurseLogic : OracleCurseLogic, IUnitEquipmentHandler { public HauntedCurseLogic() { Curse = OracleCurse.Haunted; } public void HandleEquipmentSlotUpdated(ItemSlot slot, ItemEntity previousItem) { if (slot.Owner == Owner && Owner.Unit.IsInCombat && slot.HasItem && (bool)Helpers.GetField(typeof(ItemsCollection), null, "s_RaiseEvents")) { Log.Write($"Haunted curse: used standard action to retrieve item \"{slot.Item.Name}\""); Owner.Unit.CombatState.Cooldown.StandardAction += 6; } } } [Harmony12.HarmonyPatch(typeof(ItemsCollection), "DropItem", typeof(ItemEntity))] static class ItemsCollection_DropItem_Patch { static bool Prefix(ItemsCollection __instance, ItemEntity item) { try { var self = __instance; if (item.Collection != self || !item.Collection.IsPlayerInventory) return true; // We don't know which character dropped it, so we need to check everyone. // TODO: cache this check and update on level up? if (!Game.Instance.Player.Party.Any(p => p.Descriptor.Get<UnitPartOracleCurse>()?.HasHaunted == true)) { return true; } Log.Write("Haunted curse: moving dropped item randomly 10ft away."); var player = Game.Instance.Player.MainCharacter.Value; var position = player.Position + GeometryUtils.To3D(UnityEngine.Random.insideUnitCircle * 10.Feet().Meters); var drop = Game.Instance.EntityCreator.SpawnEntityView(BlueprintRoot.Instance.Prefabs.DroppedLootBag, position, player.View.transform.rotation, Game.Instance.State.LoadedAreaState.MainState); drop.Loot = new ItemsCollection(); drop.IsDroppedByPlayer = true; self.Transfer(item, drop.Loot); return false; } catch (Exception e) { Log.Error(e); return true; } } } [Harmony12.HarmonyPatch(typeof(AbilityData), "get_VoiceIntensity")] static class AbilityData_VoiceIntensity_Patch { static void Postfix(AbilityData __instance, ref AbilityData.VoiceIntensityType __result) { var self = __instance; try { if (__result != AbilityData.VoiceIntensityType.None && self.Caster.Get<UnitPartOracleCurse>()?.HasDeaf == true) { Log.Write("Deaf curse: cast spells using Silent Spell"); __result = AbilityData.VoiceIntensityType.None; } } catch (Exception e) { Log.Error(e); } } } [ComponentName("Add stat bonus based on character level")] [AllowedOn(typeof(BlueprintUnitFact))] [AllowedOn(typeof(BlueprintUnit))] [AllowMultipleComponents] public class AddStatBonusOnCurseLevel : AddStatBonusOnLevel { public static AddStatBonusOnCurseLevel Create(StatType stat, int value, ModifierDescriptor descriptor, int minLevel = 1, int maxLevel = 20) { var addStat = Helpers.Create<AddStatBonusOnCurseLevel>(); addStat.Stat = stat; addStat.Value = value; addStat.Descriptor = descriptor; addStat.MinLevel = minLevel; addStat.MaxLevelInclusive = maxLevel; return addStat; } protected override bool CheckLevel(UnitDescriptor unit) { int level = OracleCurses.CalculateCurseLevel(unit); return level >= MinLevel && level <= MaxLevelInclusive; } } }
47.500982
430
0.617731
[ "MIT" ]
OldIronEyes/pathfinder-mods
EldritchArcana/Oracle/OracleCurses.cs
48,368
C#
// // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // namespace Microsoft.PackageManagement.Internal.Utility.Plugin { using System; using System.Collections.Generic; using System.Linq; using System.Reflection; using Collections; using Extensions; public static class DynamicInterfaceExtensions { private static readonly Type[] _emptyTypes = { }; private static readonly Dictionary<Type, MethodInfo[]> _methodCache = new Dictionary<Type, MethodInfo[]>(); private static readonly Dictionary<Type[], MethodInfo[]> _methodCacheForTypes = new Dictionary<Type[], MethodInfo[]>(); private static readonly Dictionary<Type, FieldInfo[]> _delegateFieldsCache = new Dictionary<Type, FieldInfo[]>(); private static readonly Dictionary<Type, PropertyInfo[]> _delegatePropertiesCache = new Dictionary<Type, PropertyInfo[]>(); private static readonly Dictionary<Type, MethodInfo[]> _requiredMethodsCache = new Dictionary<Type, MethodInfo[]>(); private static readonly Dictionary<Type, MethodInfo[]> _virtualMethodsCache = new Dictionary<Type, MethodInfo[]>(); private static readonly Dictionary<Assembly, Type[]> _creatableTypesCache = new Dictionary<Assembly, Type[]>(); public static MethodInfo FindMethod(this MethodInfo[] methods, MethodInfo methodSignature) { // this currently returns the first thing that matches acceptably. // we'd really like to find the *best* match, but still be able to have the earlier ones override the later ones. // which is return methods.FirstOrDefault(candidate => DoNamesMatchAcceptably(methodSignature.Name, candidate.Name) && DoSignaturesMatchAcceptably(methodSignature, candidate)); } public static MethodInfo FindMethod(this MethodInfo[] methods, Type delegateType) { return methods.FirstOrDefault(candidate => DoNamesMatchAcceptably(delegateType.Name, candidate.Name) && delegateType.IsDelegateAssignableFromMethod(candidate)); } public static Delegate FindDelegate(this FieldInfo[] fields, object actualInstance, MethodInfo signature) { return (from field in fields let value = field.GetValue(actualInstance) as Delegate where DoNamesMatchAcceptably(signature.Name, field.Name) && field.FieldType.IsDelegateAssignableFromMethod(signature) && value != null select value).FirstOrDefault(); } public static Delegate FindDelegate(this PropertyInfo[] properties, object actualInstance, MethodInfo signature) { return (from property in properties let value = property.GetValue(actualInstance, null) as Delegate where DoNamesMatchAcceptably(signature.Name, property.Name) && property.PropertyType.IsDelegateAssignableFromMethod(signature) && value != null select value).FirstOrDefault(); } public static Delegate FindDelegate(this FieldInfo[] fields, object actualInstance, Type delegateType) { return (from candidate in fields let value = candidate.GetValue(actualInstance) as Delegate where value != null && DoNamesMatchAcceptably(delegateType.Name, candidate.Name) && delegateType.IsDelegateAssignableFromDelegate(value.GetType()) select value).FirstOrDefault(); } public static Delegate FindDelegate(this PropertyInfo[] properties, object actualInstance, Type delegateType) { return (from candidate in properties let value = candidate.GetValue(actualInstance, null) as Delegate where value != null && DoNamesMatchAcceptably(delegateType.Name, candidate.Name) && delegateType.IsDelegateAssignableFromDelegate(value.GetType()) select value).FirstOrDefault(); } private static bool DoNamesMatchAcceptably(string originalName, string candidateName) { if (string.IsNullOrWhiteSpace(originalName) || string.IsNullOrWhiteSpace(candidateName) || originalName[0] == '_' || candidateName[0] == '_') { // names that start with underscores are considered to be private and not supported. return false; } if (originalName.EqualsIgnoreCase(candidateName)) { return true; } // transform non-leading underscores to nothing. candidateName = candidateName.Replace("_", ""); originalName = originalName.Replace("_", ""); // this allows GetSomething to be the same as Get_Some_thing() or get_Something ... if (originalName.EqualsIgnoreCase(candidateName)) { return true; } return false; } private static bool DoSignaturesMatchAcceptably(MethodInfo member, MethodInfo candidate) { return candidate.GetParameterTypes().SequenceEqual(member.GetParameterTypes(), AssignableTypeComparer.Instance) && (AssignableTypeComparer.IsAssignableOrCompatible(member.ReturnType, candidate.ReturnType) || member.ReturnType == typeof(void)); } internal static MethodInfo[] GetPublicMethods(this Type type) { return _methodCache.GetOrAdd(type, () => type != null ? type.GetMethods(BindingFlags.FlattenHierarchy | BindingFlags.Public | BindingFlags.Instance) : new MethodInfo[0]); } internal static MethodInfo[] GetPublicMethods(this Type[] types) { return _methodCacheForTypes.GetOrAdd(types, () => types.SelectMany(each => each.GetPublicMethods()).ToArray()); } internal static IEnumerable<FieldInfo> GetPublicFields(this Type type) { return type != null ? type.GetFields(BindingFlags.FlattenHierarchy | BindingFlags.Instance | BindingFlags.Public) : Enumerable.Empty<FieldInfo>(); } internal static FieldInfo[] GetPublicDelegateFields(this Type type) { return _delegateFieldsCache.GetOrAdd(type, () => type.GetPublicFields().Where(each => each.FieldType.GetTypeInfo().BaseType == typeof(MulticastDelegate)).ToArray()); } internal static PropertyInfo[] GetPublicDelegateProperties(this Type type) { return _delegatePropertiesCache.GetOrAdd(type, () => type.GetPublicProperties().Where(each => each.PropertyType.GetTypeInfo().BaseType == typeof(MulticastDelegate)).ToArray()); } internal static IEnumerable<PropertyInfo> GetPublicProperties(this Type type) { return type != null ? type.GetProperties(BindingFlags.FlattenHierarchy | BindingFlags.Instance | BindingFlags.Public) : Enumerable.Empty<PropertyInfo>(); } private static IEnumerable<MethodInfo> DisambiguateMethodsBySignature(params IEnumerable<MethodInfo>[] setsOfMethods) { var unique = new HashSet<string>(); return setsOfMethods.SelectMany(methodSet => methodSet.ReEnumerable()).Where(method => { var sig = method.ToSignatureString(); if (!unique.Contains(sig)) { unique.Add(sig); return true; } return false; }); } internal static MethodInfo[] GetVirtualMethods(this Type type) { return _virtualMethodsCache.GetOrAdd(type, () => { var methods = (type.GetTypeInfo().IsInterface ? (IEnumerable<MethodInfo>)type.GetMethods(BindingFlags.FlattenHierarchy | BindingFlags.Public | BindingFlags.Instance) : (IEnumerable<MethodInfo>)type.GetMethods(BindingFlags.FlattenHierarchy | BindingFlags.Public | BindingFlags.Instance).Where(each => each.IsAbstract || each.IsVirtual)); methods = methods.Where(each => each.Name != "Dispose"); // option 1: // if the target type is a class, and implements an interface -- and the implementation of that interface is already present (ie, abstract class Foo : IDisposable { public void Dispose() {} } ) then // the generated type should not try to create a method for that interface // option 2: // I think we're just talking about IDisposable here. maybe we shouldn't try to ducktype IDisposable at all. // try option2 : #if DEEP_DEBUG var ifaces = type.GetInterfaces().ToArray(); if (ifaces.Any()) { Console.WriteLine("Interface: {0}",ifaces.Select(each=>each.Name).JoinWithComma()); if (ifaces.Any(each => each == typeof (IDisposable))) { Debugger.Break(); } } #endif var interfaceMethods = type.GetInterfaces().Where(each => each != typeof(IDisposable)).SelectMany(GetVirtualMethods); return DisambiguateMethodsBySignature(methods, interfaceMethods).ToArray(); }); } internal static MethodInfo[] GetRequiredMethods(this Type type) { return _requiredMethodsCache.GetOrAdd(type, () => { var i = type.GetVirtualMethods().Where(each => each.GetCustomAttributes(true).Any(attr => attr.GetType().Name.Equals("RequiredAttribute", StringComparison.OrdinalIgnoreCase))).ToArray(); return i; }); } internal static ConstructorInfo GetDefaultConstructor(this Type t) { try { return t.GetConstructor(_emptyTypes); } catch { } return null; } internal static string ToSignatureString(this MethodInfo method) { return "{0} {1}({2})".format(method.ReturnType.Name, method.Name, method.GetParameters().Select(each => "{0} {1}".format(each.ParameterType.NiceName(), each.Name)).JoinWithComma()); } public static string NiceName(this Type type) { if (!type.GetTypeInfo().IsGenericType) { return type.Name; } var typeName = type.GetGenericTypeDefinition().Name; typeName = typeName.Substring(0, typeName.IndexOf('`')); return typeName + "<" + string.Join(",", type.GetGenericArguments().Select(NiceName).ToArray()) + ">"; } public static string FullNiceName(this Type type) { if (!type.GetTypeInfo().IsGenericType) { return type.FullName; } var typeName = type.GetGenericTypeDefinition().FullName; typeName = typeName.Substring(0, typeName.IndexOf('`')); return typeName + "<" + string.Join(",", type.GetGenericArguments().Select(NiceName).ToArray()) + ">"; } internal static Func<string, bool> GenerateInstancesSupportsMethod(object[] actualInstance) { var ism = actualInstance.Select(GenerateInstanceSupportsMethod).ToArray(); return (s) => ism.Any(each => each(s)); } internal static Func<string, bool> GenerateInstanceSupportsMethod(object actualInstance) { // if the object implements an IsMethodImplemented Method, we'll be using that // to see if the method is actually supposed to be used. // this enables an implementor to physically implement the function in the class // yet treat it as if it didn't. (see the PowerShellPackageProvider) var imiMethodInfo = actualInstance.GetType().GetMethod("IsMethodImplemented", new[] { typeof (string) }); return imiMethodInfo == null ? (s) => true : actualInstance.CreateProxiedDelegate<Func<string, bool>>(imiMethodInfo); } /// <summary> /// This extension uses the DuckTyper to transform an object into a given interface or type. /// </summary> /// <typeparam name="TInterface"></typeparam> /// <param name="instance"></param> /// <returns></returns> public static TInterface As<TInterface>(this object instance) { if (typeof(TInterface).IsDelegate()) { // find a function in this object that matches the delegate that we are given // and return that instead. if (instance.GetType().IsDelegate()) { if (typeof(TInterface).IsDelegateAssignableFromDelegate(instance.GetType())) { return ((Delegate)instance).CreateProxiedDelegate<TInterface>(); } throw new Exception("Delegate '{0}' can not be created from Delegate '{1}'.".format(typeof(TInterface).NiceName(), instance.GetType().NiceName())); } var instanceSupportsMethod = GenerateInstanceSupportsMethod(instance); var instanceType = instance.GetType(); var instanceMethods = instanceType.GetPublicMethods(); var instanceFields = instanceType.GetPublicDelegateFields(); var instanceProperties = instanceType.GetPublicDelegateProperties(); if (!instanceSupportsMethod(typeof(TInterface).Name)) { throw new Exception("Generation of Delegate '{0}' not supported from object.".format(typeof(TInterface).NiceName())); } var method = instanceMethods.FindMethod(typeof(TInterface)); if (method != null) { return instance.CreateProxiedDelegate<TInterface>(method); } var instanceDelegate = instanceFields.FindDelegate(instance, typeof(TInterface)) ?? instanceProperties.FindDelegate(instance, typeof(TInterface)); if (instanceDelegate != null) { if (instanceDelegate is TInterface) { return (TInterface)(object)instanceDelegate; } return instanceDelegate.CreateProxiedDelegate<TInterface>(); } return (TInterface)(object)typeof(TInterface).CreateEmptyDelegate(); // throw new Exception("Delegate '{0}' not matched in object.".format(typeof (TInterface).NiceName())); } return DynamicInterface.DynamicCast<TInterface>(instance); } public static TInterface Extend<TInterface>(this object obj, params object[] objects) { return DynamicInterface.DynamicCast<TInterface>(objects, obj); } public static bool IsDelegate(this Type t) { return t.GetTypeInfo().BaseType == typeof(MulticastDelegate); } public static bool IsIEnumerableT(this Type t) { #if FRAMEWORKv45 return t.IsConstructedGenericType && t.GetGenericTypeDefinition() == typeof(IEnumerable<>); #else try { return t.GetGenericTypeDefinition() == typeof (IEnumerable<>); } catch { } return false; #endif } public static IEnumerable<Type> CreatableTypes(this Assembly assembly) { #if DEEPDEBUG var x = _creatableTypesCache.GetOrAdd(assembly, () => assembly.GetTypes().Where(each => each.GetTypeInfo().GetTypeInfo().IsPublic && !each.GetTypeInfo().IsEnum && !each.GetTypeInfo().IsInterface && !each.GetTypeInfo().IsAbstract && each.GetDefaultConstructor() != null && each.GetTypeInfo().BaseType != typeof(MulticastDelegate)).ToArray()); foreach (var i in x) { Debug.WriteLine("Creatable Type in assembly {0} - {1}", assembly.GetName(), i.Name); } #endif #if CORECLR return _creatableTypesCache.GetOrAdd<Assembly, Type[]>(assembly, () => assembly.DefinedTypes.Where(each => each.IsPublic && !each.IsEnum && !each.IsInterface && !each.IsAbstract && each.AsType().GetDefaultConstructor() != null && each.BaseType != typeof (MulticastDelegate)).Select(item => item.AsType()).ToArray()); #else return _creatableTypesCache.GetOrAdd(assembly, () => assembly.GetTypes().Where(each => each.IsPublic && !each.IsEnum && !each.IsInterface && !each.IsAbstract && each.GetDefaultConstructor() != null && each.BaseType != typeof (MulticastDelegate)).ToArray()); #endif } } }
50.340909
354
0.611061
[ "MIT" ]
DalavanCloud/oneget
src/Microsoft.PackageManagement/Utility/Plugin/DynamicInterfaceExtensions.cs
17,720
C#
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** // *** Do not edit by hand unless you're certain you know what you are doing! *** using System; using System.Collections.Generic; using System.Collections.Immutable; using System.Threading.Tasks; using Pulumi.Serialization; namespace Pulumi.Azure.Storage.Outputs { [OutputType] public sealed class AccountIdentity { /// <summary> /// A list of IDs for User Assigned Managed Identity resources to be assigned. /// </summary> public readonly ImmutableArray<string> IdentityIds; /// <summary> /// The Principal ID for the Service Principal associated with the Identity of this Storage Account. /// </summary> public readonly string? PrincipalId; /// <summary> /// The Tenant ID for the Service Principal associated with the Identity of this Storage Account. /// </summary> public readonly string? TenantId; /// <summary> /// Specifies the identity type of the Storage Account. Possible values are `SystemAssigned`, `UserAssigned`, `SystemAssigned,UserAssigned` (to enable both). /// </summary> public readonly string Type; [OutputConstructor] private AccountIdentity( ImmutableArray<string> identityIds, string? principalId, string? tenantId, string type) { IdentityIds = identityIds; PrincipalId = principalId; TenantId = tenantId; Type = type; } } }
32.34
165
0.630798
[ "ECL-2.0", "Apache-2.0" ]
ScriptBox99/pulumi-azure
sdk/dotnet/Storage/Outputs/AccountIdentity.cs
1,617
C#
//using System; //using System.Linq; //using AgileBrowser.WinForms.Security; //using DotNetAuth.OAuth1a; //using DotNetAuth.OAuth1a.Framework; //using RestSharp; //using RestSharp.Authenticators; //public class OAuth1aAuthenticator : IAuthenticator //{ // private OAuth1aProviderDefinition definition; // private ApplicationCredentials credentials; // private string access_token; // private string access_token_secret; // private string method; // public OAuth1aAuthenticator(OAuth1aProviderDefinition oAuthDefinition, string consumerKey, string privateKey, // string accessToken, string accessTokenSecret) // { // var consumerSecret = privateKey.Replace("-----BEGIN PRIVATE KEY-----", "").Replace("-----END PRIVATE KEY-----", "").Replace("\r\n", "").Replace("\n", ""); // var keyInfo = opensslkey.DecodePrivateKeyInfo(Convert.FromBase64String(consumerSecret)); // var applicationCredentials = new ApplicationCredentials // { // ConsumerKey = consumerKey, // ConsumerSecret = keyInfo.ToXmlString(true) // }; // this.definition = oAuthDefinition; // this.credentials = applicationCredentials; // this.access_token = accessToken; // this.access_token_secret = accessTokenSecret; // } // public void Authenticate(IRestClient client, IRestRequest request) // { // method = request.Method.ToString(); // string url = client.BuildUri(request) // .ToString(); // int queryStringStart = url.IndexOf('?'); // string leftPart = url; // if (queryStringStart != -1) // { // leftPart = url.Substring(0, queryStringStart); // } // var contentType = request.Parameters.Where(x => x.ContentType == "application/x-www-form-urlencoded"); // ParameterSet parameterSet = ParameterSet.FromResponseBody(request.Resource); // ParameterSet parameterSet2 = (contentType.Any()) ? ParameterSet.FromResponseBody(contentType.FirstOrDefault().Value.ToString()) : new ParameterSet(); // ParameterSet authorizationParameters = definition.GetAuthorizationParameters(credentials, access_token); // string signature = definition.GetSignature(credentials.ConsumerSecret, access_token_secret, leftPart, method, new ParameterSet[] // { // authorizationParameters, // parameterSet, // parameterSet2 // }); // authorizationParameters.Add("oauth_signature", signature, null); // string authorizationHeader = definition.GetAuthorizationHeader(authorizationParameters); // request.AddHeader("Authorization", authorizationHeader); // request.AddHeader("Accept", "application/json"); // } //}
40.985294
164
0.667743
[ "MIT" ]
lurume84/agile-browser
AgileBrowser/OAuth/OAuth1aAuthenticator.cs
2,787
C#
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; // Sockets using System.Net.Sockets; using System.Net; // debug using System.Diagnostics; // threading using System.Threading; // byte data serialization using System.Runtime.Serialization.Formatters.Binary; // memory streams using System.IO; namespace SampleUDPPeer { partial class Model { // make GameData serializeable. Otherwise, we can't send it over // a byte stream (e.g. socket) [Serializable] struct GameData { public int data1, data2; public String message; public GameData(int p, int s, String msg) { data1 = p; data2 = s; message = msg; } } // this is the UDP socket that will be used to communicate // over the network UdpClient _dataSocket; // some data that keeps track of ports and addresses private static UInt32 _localPort; private static String _localIPAddress; private static UInt32 _remotePort; private static String _remoteIPAddress; // this is the thread that will run in the background // waiting for incomming data private Thread _receiveDataThread; // this thread is used to synchronize the startup of // two UDP peers private Thread _synchWithOtherPlayerThread; private Random _randomNumber; public Model() { _randomNumber = new Random(); Data1 = _randomNumber.Next(100).ToString(); Data2 = _randomNumber.Next(100).ToString(); // this disables the Send button initially SendEnabled = false; // initialize the help test HelpText = "Select Socket Setup button to being."; } /// <summary> /// this method is called to set this UDP peer's local port and address /// </summary> /// <param name="port"></param> /// <param name="ipAddress"></param> public void SetLocalNetworkSettings(UInt32 port, String ipAddress) { _localPort = port; _localIPAddress = ipAddress; } /// <summary> /// this method is called to set the remote UDP peer's port and address /// </summary> /// <param name="port"></param> /// <param name="ipAddress"></param> public void SetRemoteNetworkSettings(UInt32 port, String ipAddress) { _remotePort = port; _remoteIPAddress = ipAddress; } /// <summary> /// initialize the necessary data, and start the synchronization /// thread to wait for the other peer to join /// </summary> /// <returns></returns> public bool InitModel() { try { // set up generic UDP socket and bind to local port // _dataSocket = new UdpClient((int)_localPort); } catch (Exception ex) { Debug.Write(ex.ToString()); return false; } ThreadStart threadFunction; threadFunction = new ThreadStart(SynchWithOtherPlayer); _synchWithOtherPlayerThread = new Thread(threadFunction); StatusTextBox = StatusTextBox + DateTime.Now + ":" + " Waiting for other UDP peer to join.\n"; _synchWithOtherPlayerThread.Start(); // reset help text HelpText = ""; return true; } /// <summary> /// called to send some data to the other UDP peer /// </summary> public void SendMessage() { // data structure used to communicate data with the other player GameData gameData; // formatter used for serialization of data BinaryFormatter formatter = new BinaryFormatter(); // stream needed for serialization MemoryStream stream = new MemoryStream(); // Byte array needed to send data over a socket Byte[] sendBytes; // check to make sure boxes have something in them to send if (MeBox == "" || Data1 == "" || Data2 == "") { StatusTextBox = StatusTextBox + DateTime.Now + " Empty boxes! Try again.\n"; return; } // we make sure that the data in the boxes is in the correct format try { gameData.data1 = int.Parse(Data1); gameData.data2 = int.Parse(Data2); gameData.message = MeBox; } catch (System.Exception) { // we get here if the format of teh data in the boxes was incorrect. Most likely the boxes we assumed // had integers in them had characters as well StatusTextBox = StatusTextBox + DateTime.Now + " Data not in correct format! Try again.\n"; return; } // serialize the gameData structure to a stream formatter.Serialize(stream, gameData); // retrieve a Byte array from the stream sendBytes = stream.ToArray(); // send the serialized data IPEndPoint remoteHost = new IPEndPoint(IPAddress.Parse(_remoteIPAddress), (int)_remotePort); try { _dataSocket.Send(sendBytes, sendBytes.Length, remoteHost); } catch (SocketException) { StatusTextBox = StatusTextBox + DateTime.Now + ":" + " ERROR: Message not sent!\n"; return; } StatusTextBox = StatusTextBox + DateTime.Now + ":" + " Message sent successfully.\n"; } /// <summary> /// called when the view is closing to ensure we clean up our socket /// if we don't, the application may hang on exit /// </summary> public void Model_Cleanup() { // important. Close socket or application will not exit correctly. if (_dataSocket != null) _dataSocket.Close(); if (_receiveDataThread != null) _receiveDataThread.Abort(); } // this is the thread that waits for incoming messages private void ReceiveThreadFunction() { IPEndPoint endPoint = new IPEndPoint(IPAddress.Any, 0); while (true) { try { // wait for data Byte[] receiveData = _dataSocket.Receive(ref endPoint); // check to see if this is synchronization data // ignore it. we should not recieve any sychronization // data here, because synchronization data should have // been consumed by the SynchWithOtherPlayer thread. but, // it is possible to get 1 last synchronization byte, which we // want to ignore if (receiveData.Length < 2) continue; // process and display data GameData gameData; BinaryFormatter formatter = new BinaryFormatter(); MemoryStream stream = new MemoryStream(); // deserialize data back into our GameData structure stream = new System.IO.MemoryStream(receiveData); gameData = (GameData)formatter.Deserialize(stream); // update view data through our bound properties MyFriendBox = gameData.message; Data1 = gameData.data1.ToString(); Data2 = gameData.data2.ToString(); // update status window StatusTextBox = StatusTextBox + DateTime.Now + ":" + " New message received.\n"; } catch (SocketException ex) { // got here because either the Receive failed, or more // or more likely the socket was destroyed by // exiting from the JoystickPositionWindow form Console.WriteLine(ex.ToString()); return; } catch (Exception ex) { Console.Write(ex.ToString()); } } } /// <summary> /// this thread is used at initialization to synchronize with the other /// UDP peer /// </summary> private void SynchWithOtherPlayer() { // set up socket for sending synch byte to UDP peer // we can't use the same socket (i.e. _dataSocket) in the same thread context in this manner // so we need to set up a separate socket here Byte[] data = new Byte[1]; IPEndPoint endPointSend = new IPEndPoint(IPAddress.Parse(_remoteIPAddress), (int)_remotePort); IPEndPoint endPointRecieve = new IPEndPoint(IPAddress.Any,0); UdpClient synchSocket = new UdpClient((int)_localPort + 10); // set timeout of receive to 1 second _dataSocket.Client.ReceiveTimeout = 1000; while (true) { try { synchSocket.Send(data, data.Length, endPointSend); _dataSocket.Receive(ref endPointRecieve); // got something, so break out of loop break; } catch (SocketException ex) { // we get an exception if there was a timeout // if we timed out, we just go back and try again if (ex.ErrorCode == (int)SocketError.TimedOut) { Debug.Write(ex.ToString()); } else { // we did not time out, but got a really bad // error synchSocket.Close(); StatusTextBox = StatusTextBox + "Socket exception occurred. Unable to sync with other UDP peer.\n"; StatusTextBox = StatusTextBox + ex.ToString(); return; } } catch (System.ObjectDisposedException ex) { // something bad happened. close the socket and return Console.WriteLine(ex.ToString()); synchSocket.Close(); StatusTextBox = StatusTextBox + "Error occurred. Unable to sync with other UDP peer.\n"; return; } } // send synch byte synchSocket.Send(data, data.Length, endPointSend); // close the socket we used to send periodic requests to player 2 synchSocket.Close(); // reset the timeout for the dataSocket to infinite // _dataSocket will be used to recieve data from other UDP peer _dataSocket.Client.ReceiveTimeout = 0; // start the thread to listen for data from other UDP peer ThreadStart threadFunction = new ThreadStart(ReceiveThreadFunction); _receiveDataThread = new Thread(threadFunction); _receiveDataThread.Start(); // got this far, so we received a response from player 2 StatusTextBox = StatusTextBox + DateTime.Now + ":" + " Other UDP peer has joined the session.\n"; HelpText = "Enter text in the Me box and hit the Send button."; SendEnabled = true; } } }
35.131965
123
0.532053
[ "Unlicense" ]
luciochen233/CSE483-code
repo/Expamles/SampleUDPPeer/Model.cs
11,982
C#
using System.Collections.Generic; namespace pelazem.azure.cognitive.videoindexer { public class Face { public int Id { get; set; } public string VideoId { get; set; } public string Name { get; set; } public double Confidence { get; set; } public string Description { get; set; } public string ThumbNailId { get; set; } public string ReferenceId { get; set; } public string ReferenceType { get; set; } public string Title { get; set; } public string ImageUrl { get; set; } public string KnownPersonId { get; set; } public double SeenDuration { get; set; } public double SeenDurationRatio { get; set; } public List<Appearance> Appearances { get; set; } public List<FaceInstance> Instances { get; set; } } }
28.481481
52
0.669701
[ "MIT" ]
plzm/AIServices
Libraries/videoindexer/Face.cs
771
C#
using System.Collections; using System.Collections.Generic; using System.Diagnostics; using UnityEngine; public class ColorSystem : MonoBehaviour { #region fields private ColorPlacer colorPlacer; private Colors colors; private Recorder recorder; private int safeFrames = 60; private bool stop = false; [SerializeField] private bool detailed; public static bool Detailed; #endregion private void Start() { colorPlacer = GetComponent<ColorPlacer>(); colors = GetComponent<Colors>(); recorder = GetComponent<Recorder>(); Detailed = detailed; StartCoroutine(ColorSequence()); } private void Update() { if (Input.GetKeyDown(KeyCode.Space)) { stop = true; } } private IEnumerator ColorSequence() { for (int i = 0; i < 30; i++) { yield return null; } yield return new WaitForEndOfFrame(); recorder.InitImage(); yield return null; while (true) { UpdateColors(); yield return new WaitForEndOfFrame(); if (recorder.record) { recorder.SaveImage(); } colors.State.NextColors(); colors.SaveState(); yield return null; if (colors.State.done || stop) { break; } } Shutdown(); } private void Shutdown() { if (!stop) { print("Shutdown: " + colors.State.done); Process.Start(new ProcessStartInfo("shutdown", "/s /t 10") { CreateNoWindow = true, UseShellExecute = false }); } Application.Quit(); } private void UpdateColors() { for (int y = 0; y < 16; y++) { for (int x = 0; x < 16; x++) { byte r = (byte)(y * 16 + x); colorPlacer.UpdateImage(x, y, colors.State.GetColor(r)); } } } }
21.989011
72
0.528736
[ "MIT" ]
porrasm/all-colors-visualizer
Assets/Scripts/ColorSystem.cs
2,003
C#
using System; using System.Drawing; using MonoMac.Foundation; using MonoMac.AppKit; using MonoMac.ObjCRuntime; namespace NeHeLesson8 { class MainClass { static void Main (string[] args) { NSApplication.Init (); NSApplication.Main (args); } } }
13.789474
34
0.717557
[ "MIT" ]
Devolutions/monomac
samples/OpenGL-NeHe/NeHeLesson8/Main.cs
262
C#
// -------------------------------------------------------------------------------------------------------------------- // <copyright file="UIMode.cs" company="NBug Project"> // Copyright (c) 2011 - 2013 Teoman Soygul. Licensed under MIT license. // </copyright> // -------------------------------------------------------------------------------------------------------------------- namespace NBug.Enums { public enum UIMode { /// <summary> /// Automatic mode selection is the default setting. Mode and provider is automatically selected for different application types. /// </summary> Auto, /// <summary> /// No user interface is displayed at all. All the exception handling and bug reporting process is silent. In this mode, termination of /// of the host application can be skipped altogether via <see cref="Settings.ExitApplicationImmediately" /> /// </summary> None, /// <summary> /// Minimal user interface is displayed. This consists of a simple message box for WinForms and WPF, and a single line of information /// message for console applications. /// </summary> Minimal, /// <summary> /// Normal user interface is displayed to the user, which strikes a balance between the level of details shown about the exception and /// being still user friendly. This closely replicates the original interface displayed by CLR in case of unhandled exceptions. /// </summary> Normal, /// <summary> /// Full blown user interface is displayed to the user. This interface contains as much detail about the exception and the application /// as possible. This is very useful for power users. /// </summary> Full } }
41.775
137
0.613405
[ "MIT" ]
JavierCanon/ExceptionReporter.NET
src/Others/NBug/NBug/Enums/UIMode.cs
1,673
C#
/* * UiPath.WebApi * * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) * * OpenAPI spec version: V2 * * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using PropertyChanged; using System.ComponentModel; using System.ComponentModel.DataAnnotations; using SwaggerDateConverter = UiPathEJC.Service.Rest.Client.SwaggerDateConverter; namespace UiPathEJC.Service.Rest.Model { /// <summary> /// TenantLicenseDto /// </summary> [DataContract] [ImplementPropertyChanged] public partial class TenantLicenseDto : IEquatable<TenantLicenseDto>, IValidatableObject { /// <summary> /// Initializes a new instance of the <see cref="TenantLicenseDto" /> class. /// </summary> /// <param name="HostLicenseId">The host license Id..</param> /// <param name="CreationTime">The date it was uploaded..</param> /// <param name="Code">The license code..</param> /// <param name="Allowed">Contains the number of allowed licenses for each type.</param> /// <param name="Id">Id.</param> public TenantLicenseDto(long? HostLicenseId = default(long?), DateTime? CreationTime = default(DateTime?), string Code = default(string), LicenseFields Allowed = default(LicenseFields), long? Id = default(long?)) { this.HostLicenseId = HostLicenseId; this.CreationTime = CreationTime; this.Code = Code; this.Allowed = Allowed; this.Id = Id; } /// <summary> /// The host license Id. /// </summary> /// <value>The host license Id.</value> [DataMember(Name="HostLicenseId", EmitDefaultValue=false)] public long? HostLicenseId { get; set; } /// <summary> /// The date it was uploaded. /// </summary> /// <value>The date it was uploaded.</value> [DataMember(Name="CreationTime", EmitDefaultValue=false)] public DateTime? CreationTime { get; set; } /// <summary> /// The license code. /// </summary> /// <value>The license code.</value> [DataMember(Name="Code", EmitDefaultValue=false)] public string Code { get; set; } /// <summary> /// Contains the number of allowed licenses for each type /// </summary> /// <value>Contains the number of allowed licenses for each type</value> [DataMember(Name="Allowed", EmitDefaultValue=false)] public LicenseFields Allowed { get; set; } /// <summary> /// Gets or Sets Id /// </summary> [DataMember(Name="Id", EmitDefaultValue=false)] public long? Id { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class TenantLicenseDto {\n"); sb.Append(" HostLicenseId: ").Append(HostLicenseId).Append("\n"); sb.Append(" CreationTime: ").Append(CreationTime).Append("\n"); sb.Append(" Code: ").Append(Code).Append("\n"); sb.Append(" Allowed: ").Append(Allowed).Append("\n"); sb.Append(" Id: ").Append(Id).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="input">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object input) { return this.Equals(input as TenantLicenseDto); } /// <summary> /// Returns true if TenantLicenseDto instances are equal /// </summary> /// <param name="input">Instance of TenantLicenseDto to be compared</param> /// <returns>Boolean</returns> public bool Equals(TenantLicenseDto input) { if (input == null) return false; return ( this.HostLicenseId == input.HostLicenseId || (this.HostLicenseId != null && this.HostLicenseId.Equals(input.HostLicenseId)) ) && ( this.CreationTime == input.CreationTime || (this.CreationTime != null && this.CreationTime.Equals(input.CreationTime)) ) && ( this.Code == input.Code || (this.Code != null && this.Code.Equals(input.Code)) ) && ( this.Allowed == input.Allowed || (this.Allowed != null && this.Allowed.Equals(input.Allowed)) ) && ( this.Id == input.Id || (this.Id != null && this.Id.Equals(input.Id)) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { int hashCode = 41; if (this.HostLicenseId != null) hashCode = hashCode * 59 + this.HostLicenseId.GetHashCode(); if (this.CreationTime != null) hashCode = hashCode * 59 + this.CreationTime.GetHashCode(); if (this.Code != null) hashCode = hashCode * 59 + this.Code.GetHashCode(); if (this.Allowed != null) hashCode = hashCode * 59 + this.Allowed.GetHashCode(); if (this.Id != null) hashCode = hashCode * 59 + this.Id.GetHashCode(); return hashCode; } } /// <summary> /// Property changed event handler /// </summary> public event PropertyChangedEventHandler PropertyChanged; /// <summary> /// Trigger when a property changed /// </summary> /// <param name="propertyName">Property Name</param> public virtual void OnPropertyChanged(string propertyName) { // NOTE: property changed is handled via "code weaving" using Fody. // Properties with setters are modified at compile time to notify of changes. var propertyChanged = PropertyChanged; if (propertyChanged != null) { propertyChanged(this, new PropertyChangedEventArgs(propertyName)); } } /// <summary> /// To validate all properties of the instance /// </summary> /// <param name="validationContext">Validation context</param> /// <returns>Validation Result</returns> IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext) { yield break; } } }
36.546296
220
0.553332
[ "MIT" ]
Blackspo0n/UiPath-Easy-Job-Control
UiPathEJC.Service.Rest/Model/TenantLicenseDto.cs
7,894
C#
#if HAVE_MODULE_AI || !UNITY_2019_1_OR_NEWER using System.Collections.Generic; using UnityEngine; using UnityEngine.AI; namespace Newtonsoft.Json.UnityConverters.Tests.AI.NavMesh { public class NavMeshHitTests : ValueTypeTester<NavMeshHit> { public static readonly IReadOnlyCollection<(NavMeshHit deserialized, object anonymous)> representations = new (NavMeshHit, object)[] { (new NavMeshHit(), new { position = new { x = 0f, y = 0f, z = 0f }, normal = new { x = 0f, y = 0f, z = 0f }, distance = 0f, mask = 0, hit = false }), (new NavMeshHit{ position = new Vector3(1, 2, 3), normal = new Vector3(1, 0, 0), distance = 4, mask = 5, hit = true, }, new { position = new { x = 1f, y = 2f, z = 3f }, normal = new { x = 1f, y = 0f, z = 0f }, distance = 4f, mask = 5, hit = true, }), }; } } #endif
32.057143
142
0.474153
[ "MIT" ]
Mu-L/Newtonsoft.Json-for-Unity.Converters
Assets/Newtonsoft.Json.UnityConverters.Tests/AI/NavMesh/NavMeshHitTests.cs
1,124
C#
using System.Collections.Generic; using System.Text.Json.Serialization; using Horizon.Payment.Alipay.Domain; namespace Horizon.Payment.Alipay.Response { /// <summary> /// KoubeiCateringPosDeskQueryResponse. /// </summary> public class KoubeiCateringPosDeskQueryResponse : AlipayResponse { /// <summary> /// 餐台信息 /// </summary> [JsonPropertyName("pos_desk_list")] public List<DeskEntity> PosDeskList { get; set; } } }
25.421053
68
0.662526
[ "Apache-2.0" ]
bluexray/Horizon.Sample
Horizon.Payment.Alipay/Response/KoubeiCateringPosDeskQueryResponse.cs
493
C#
using Assets.scripts.Location; using System; using System.Collections; using System.Collections.Generic; using UnityEngine; public class PlayerPositionMeasure { public float lattitude; public float longitude; public long timestamp; } public class MapPlayerBehaviour : MonoBehaviour { public static MapPlayerBehaviour instance; [Header("References in scene")] public MapGeneratorOSM mapGenerator; public Transform playerTransform; public Rigidbody playerRigidbody; public Animator playerAnimator; public Transform playerMouvTarget; public Transform playerReachAreaTransform; [Header("Player info on Map")] public bool playerPositionIsValid; public float playerLat; public float playerLon; public float playerReach; [Header("Debug")] public bool editorMode; public float editorModePlayerSpeed; public void SetPlayerReach(float newReach) { playerReach = newReach; float epsilon = 0.01f; playerReachAreaTransform.localScale = new Vector3(playerReach * 2, playerReach * 2, playerReach * 2); playerReachAreaTransform.localPosition = Vector3.up * (epsilon * playerReach - 2.5f); } private List<PlayerPositionMeasure> mouvementMeasures; private PlayerPositionMeasure GetLastMeasure() { if (mouvementMeasures.Count <= 0) return null; return mouvementMeasures[mouvementMeasures.Count - 1]; } public void AddPlayerPositionMeasure(float lat, float lon) { PlayerPositionMeasure measure = new PlayerPositionMeasure() { lattitude = lat, longitude = lon, timestamp = DateTime.Now.Ticks }; mouvementMeasures.Add(measure); RemoveUnusedPlayerPositionMeasures(); } private void RemoveUnusedPlayerPositionMeasures() { while (mouvementMeasures.Count > 10) { mouvementMeasures.RemoveAt(0); } } /// <summary> /// Return player speed in m/s (in real world) /// </summary> /// <returns></returns> private float PlayerCurrentSpeedFromMeasures() { float speed = 0; if (mouvementMeasures.Count > 1) { PlayerPositionMeasure oldestMeasure = mouvementMeasures[0]; PlayerPositionMeasure currentMeasure = mouvementMeasures[mouvementMeasures.Count - 1]; double distanceInMeters = Tools.getDistanceFromLatLonInM(currentMeasure.lattitude, currentMeasure.longitude, oldestMeasure.lattitude, oldestMeasure.longitude); float timeInSeconds = Mathf.Abs((currentMeasure.timestamp - oldestMeasure.timestamp) * 1.0f / TimeSpan.TicksPerSecond); speed = (float)( distanceInMeters / timeInSeconds ); } return speed; } private PlayerPositionMeasure NextPositionEstimated() { PlayerPositionMeasure estimatedNextMeasure = null; if (mouvementMeasures.Count > 1) { // at least 2 measures available PlayerPositionMeasure lastMeasure = mouvementMeasures[mouvementMeasures.Count - 1]; PlayerPositionMeasure oldMeasure = mouvementMeasures[mouvementMeasures.Count - 2]; estimatedNextMeasure = new PlayerPositionMeasure() { longitude = lastMeasure.longitude * 2 - oldMeasure.longitude, lattitude = lastMeasure.lattitude * 2 - oldMeasure.lattitude, timestamp = lastMeasure.timestamp * 2 - oldMeasure.timestamp }; } else if (mouvementMeasures.Count == 1) { // only one measure available estimatedNextMeasure = mouvementMeasures[0]; } return estimatedNextMeasure; } void Awake() { MapPlayerBehaviour.instance = this; mouvementMeasures = new List<PlayerPositionMeasure>(); } // Use this for initialization void Start() { if (MyLocationService.instance == null || editorMode || Application.platform != RuntimePlatform.Android) { AddManualPlayerPositionMeasure(playerLat, playerLon); } StartCoroutine(WaitAndUpdatePlayerMovement()); this.SetPlayerReach(this.playerReach); } // Update is called once per frame void Update () { if (MyLocationService.instance == null || editorMode || Application.platform != RuntimePlatform.Android) { // editor mode : player location depends on inputs float latStep = 0.001f * editorModePlayerSpeed; float lonStep = 0.001f * editorModePlayerSpeed; float newPlayerLat = playerLat; float newPlayerLon = playerLon; bool spacePressed = Input.GetKeyDown(KeyCode.Space); bool leftArrowPressed = Input.GetKeyDown(KeyCode.LeftArrow); bool rightArrowPressed = Input.GetKeyDown(KeyCode.RightArrow); bool upArrowPressed = Input.GetKeyDown(KeyCode.UpArrow); bool downArrowPressed = Input.GetKeyDown(KeyCode.DownArrow); bool newInput = spacePressed || leftArrowPressed || rightArrowPressed || upArrowPressed || downArrowPressed; if (leftArrowPressed) { newPlayerLon -= lonStep; } if (rightArrowPressed) { newPlayerLon += lonStep; } newPlayerLon = (newPlayerLon < -180) ? (newPlayerLon + 360) : ((newPlayerLon > 180) ? (newPlayerLon - 360) : newPlayerLon); if (upArrowPressed) { newPlayerLat += latStep; } if (downArrowPressed) { newPlayerLat -= latStep; } newPlayerLat = (newPlayerLat < -90) ? -90 : ((newPlayerLat > 90) ? 90 : newPlayerLat); if (newInput) { AddManualPlayerPositionMeasure(newPlayerLat, newPlayerLon); playerLat = newPlayerLat; playerLon = newPlayerLon; } playerPositionIsValid = true; } else if (MyLocationService.instance.locationServiceIsRunning) { // release mode : player location depends on GPS GeoPoint playerLocation = MyLocationService.instance.playerLocation; playerLon = playerLocation.lon_d; playerLat = playerLocation.lat_d; AddManualPlayerPositionMeasure(playerLat, playerLon); playerPositionIsValid = true; } if (playerPositionIsValid && mapGenerator.mapIsReady) { PlayerPositionMeasure playerPosition = GetLastMeasure(); if (playerPosition != null) { Vector3 playerPositionInScene = mapGenerator.GetWorldPositionFromLatLon(playerPosition.lattitude, playerPosition.longitude); playerMouvTarget.position = playerPositionInScene + 2.5f * Vector3.up; } } } private void AddManualPlayerPositionMeasure(float lat, float lon) { this.AddPlayerPositionMeasure(lat, lon); } private IEnumerator WaitAndUpdatePlayerMovement() { yield return new WaitForEndOfFrame(); if (playerPositionIsValid && mapGenerator.mapIsReady) { UpdatePlayerMovement(); } StartCoroutine(WaitAndUpdatePlayerMovement()); } private void UpdatePlayerMovement() { Vector3 currentPlayerPosition = playerTransform.position; Vector3 destinationPlayerPosition = playerMouvTarget.position; Vector3 directionToDestination = (destinationPlayerPosition - currentPlayerPosition).normalized; float distanceInScene = Vector3.Distance(currentPlayerPosition, destinationPlayerPosition); float estimatedSpeedInWorld = this.PlayerCurrentSpeedFromMeasures(); float speedComputedFromMeasures = estimatedSpeedInWorld * 0.02f; float speedComputedFromDistance = distanceInScene * 0.4f; float maxDistanceInScene = 5; float minDistanceInScene = 0.2f; float t = (distanceInScene - minDistanceInScene) / (maxDistanceInScene - minDistanceInScene); t = (t < 0) ? 0 : ((t > 1) ? 1 : t); float speed = speedComputedFromDistance * t + speedComputedFromMeasures * (1 - t); if (distanceInScene > 100) { // teleport playerRigidbody.MovePosition(playerMouvTarget.position); } else { // move if (distanceInScene > 0.1f) { speed = speed < 0.04f ? 0.04f : speed; playerRigidbody.velocity = directionToDestination * speed; // turn float signedAngleBetweenCurrentAndTargetOrientation = Vector3.SignedAngle(playerTransform.forward, directionToDestination, Vector3.up) * Mathf.Deg2Rad; float angularSpeedInRadians = 1.5f; if (Mathf.Abs(signedAngleBetweenCurrentAndTargetOrientation) > 0.01f) { playerRigidbody.angularVelocity = new Vector3(0, angularSpeedInRadians * signedAngleBetweenCurrentAndTargetOrientation, 0); } else { playerRigidbody.angularVelocity = Vector3.zero; } UpdateAnimatorStatus(speed, signedAngleBetweenCurrentAndTargetOrientation); } else { playerRigidbody.velocity = Vector3.zero; playerRigidbody.angularVelocity = Vector3.zero; UpdateAnimatorStatus(0, 0); } } } private void UpdateAnimatorStatus(float speed, float directionAngle) { playerAnimator.SetFloat("Speed", (speed > 0 && speed < 0.1f) ? 0.1f : speed ); playerAnimator.SetFloat("Direction", directionAngle); } }
36.212454
171
0.632713
[ "MIT" ]
RemiFusade2/KorokGO
Korok GO/Assets/scripts/map/MapPlayerBehaviour.cs
9,888
C#
using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Mvc; using avaliacao_wesleyandrade_microondas.Models; namespace avaliacao_wesleyandrade_microondas.Controllers { public class HomeController : Controller { [Filters.VerificaSessao] public ActionResult ExcluirPrograma(int id) { Microondas M; M = (Microondas)Session["microondas"]; if(id < M.programas.Count && !(M.programas.ElementAt(id).original)) // é possivel fazer a exclusão { M.programas.RemoveAt(id); Session["microondas"] = M; } else // não foi possível fazer a exclusão { String erro=""; if (id >= M.programas.Count) erro = "Programa não localizado"; else if (M.programas.ElementAt(id).original) erro = "Programas Originais do microondas não podem ser excluídos"; ViewBag.erro_exclusao = erro; } return View("ListarProgramas", M.programas); } [HttpPost] public ActionResult BuscarProgramas(FormCollection form) { Microondas M; M = (Microondas)Session["microondas"]; if (form!=null && form["busca"].ToString().Length>0) // tem algo digitado na busca { // faz a busca pelo nome String palavra_chave = form["busca"].ToString(); List<Programa> resultado = new List<Programa>(); for(int k=0;k<M.programas.Count;k++) // percorre os programas existentes { if (M.programas.ElementAt(k).nome.ToLower().Contains(palavra_chave.ToLower())) // achou elemento que satisfaz a busca resultado.Add(M.programas.ElementAt(k)); } return View("ListarProgramas", resultado); } else // busca vazia, retorna todos { return View("ListarProgramas",M.programas); } } [Filters.VerificaSessao] public ActionResult ListarProgramas() { Microondas M; M = (Microondas)Session["microondas"]; return View(M.programas); } [HttpPost] public ActionResult Index(FormCollection form) { if(form!=null) { String entrada = form["txtComandos"]; Microondas M; M = (Microondas)Session["microondas"]; M.comandos_entrada = entrada; List <String> Erros = M.valida_entrada(); if(M.programa_selecionado>-1) // aplicar as configurações do programa selecionado { M.segundos = M.programas.ElementAt(M.programa_selecionado).tempo; M.minutos = 0; M.potencia = M.programas.ElementAt(M.programa_selecionado).potencia; } else if (M.quick) // atribuo os valores aqui novamente, pq podem ter sido alterados porum comando time ou pot { M.segundos = 30; M.minutos = 0; M.potencia = 8; } ViewBag.erros = Erros; ViewBag.entrada = entrada; Session["microondas"] = M; if(Erros==null) Session["estado"] = "aquecendo"; else Session["estado"] = "aguardando comando"; } return View(); } public ActionResult Index() { Microondas M; if (Session["microondas"] == null) // primeira inicialização do microondas, instanciar + atribuir programas pré estabelecidos { List<Programa> programas = new List<Programa>(); Programa pro = new Programa(); pro.nome = "Frango"; pro.tempo = 90; pro.instrucoes = "Programa utilizado para preparo e descongelamento de Frangos"; pro.potencia = 7; pro.caractere = 'F'; pro.original = true; programas.Add(pro); pro = new Programa(); pro.nome = "Peixe"; pro.tempo = 45; pro.instrucoes = "Programa utilizado para preparo e descongelamento de Peixes"; pro.potencia = 6; pro.caractere = 'P'; pro.original = true; programas.Add(pro); pro = new Programa(); pro.nome = "Carne"; pro.tempo = 100; pro.instrucoes = "Programa utilizado para preparo e descongelamento de Carnes Vermelhas"; pro.potencia = 10; pro.caractere = 'C'; pro.original = true; programas.Add(pro); pro = new Programa(); pro.nome = "Descongelar"; pro.tempo = 120; pro.instrucoes = "Programa utilizado para descongelamento de alimentos em geral"; pro.potencia = 8; pro.caractere = 'D'; pro.original = true; programas.Add(pro); pro = new Programa(); pro.nome = "Cozinhar"; pro.tempo = 110; pro.instrucoes = "Programa utilizado para cozimento de alimentos"; pro.potencia = 9; pro.caractere = '*'; pro.original = true; programas.Add(pro); M = new Microondas(0, 0, 0, false, "", programas, -1); Session["microondas"] = M; } return View(); } [HttpGet] [Filters.VerificaSessao] public ActionResult NovoPrograma() { return View(); } [HttpPost] public ActionResult NovoPrograma(FormCollection form) { List<String> erros = new List<string>(); if (form != null) // validando se o formulário foi preenchido corretamente { if(!(form["nome"]!=null && form["nome"].Length>0)) // validando nome erros.Add("O nome do programa deve ser informado"); if (!(form["instrucoes"] != null && form["instrucoes"].Length > 0)) // validando instrucoes erros.Add("As instruções do programa deve ser informado"); if(form["tempo"]!=null && form["tempo"].Length>0) // validando tempo { int tempo = Convert.ToInt32(form["tempo"]); if (!(tempo>0 && tempo <=120)) erros.Add("O tempo do programa deve estar entre 1 e 120 segundos"); } else erros.Add("O tempo do programa deve ser informado"); if (form["potencia"] != null && form["potencia"].Length > 0) // validando potência { int potencia = Convert.ToInt32(form["potencia"]); if (!(potencia > 0 && potencia <= 10)) erros.Add("A potência do programa deve estar entre 1 e 10"); } else erros.Add("A potência do programa deve ser informado"); if (!(form["caractere"] != null && form["caractere"].Length > 0)) // validando caractere de aquecimento erros.Add("O caractere de aquecimento deve ser informado"); } else erros.Add("Formulário foi submetido nulo"); if(erros.Count>0) // posusi erros no formulário { ViewBag.erros_new_program = erros; // transportando erros para view, para exibição return View(); } else // formulário preenchido corretamente { Programa p = new Programa(); p.nome = form["nome"].ToString(); p.instrucoes = form["instrucoes"].ToString(); p.tempo = Convert.ToInt32(form["tempo"].ToString()); p.potencia = Convert.ToInt32(form["potencia"].ToString()); p.caractere = form["caractere"].ToString()[0]; p.original = false; Microondas M = (Microondas)Session["microondas"]; M.programas.Add(p); Session["microondas"] = M; return View("Index"); } } } }
35.353414
137
0.489719
[ "MIT" ]
wesleyandrade95/microondas-digital-asp.net
avaliacao_wesleyandrade_microondas/Controllers/HomeController.cs
8,829
C#