language
stringclasses
1 value
repo
stringclasses
133 values
path
stringlengths
13
229
class_span
dict
source
stringlengths
14
2.92M
target
stringlengths
1
153
csharp
unoplatform__uno
src/Uno.Foundation/Uno.Core.Extensions/Uno.Core.Extensions.Compatibility/Extensions/StringBuilderExtensions.cs
{ "start": 857, "end": 1096 }
internal static class ____ { public static void AppendFormatInvariant(this StringBuilder builder, string format, params object[] args) { builder.AppendFormat(CultureInfo.InvariantCulture, format, args); } } }
StringBuilderExtensions
csharp
MonoGame__MonoGame
MonoGame.Framework.Content.Pipeline/ProcessorParameterCollection.cs
{ "start": 568, "end": 1035 }
public sealed class ____ : ReadOnlyCollection<ProcessorParameter> { /// <summary> /// Constructs a new ProcessorParameterCollection instance. /// </summary> /// <param name="parameters">The parameters in the collection.</param> internal ProcessorParameterCollection(IEnumerable<ProcessorParameter> parameters) : base(new List<ProcessorParameter>(parameters)) { } } }
ProcessorParameterCollection
csharp
smartstore__Smartstore
src/Smartstore.Core/Migrations/20230918153000_CartCalculationRounding.cs
{ "start": 288, "end": 8505 }
internal class ____ : Migration, ILocaleResourcesProvider, IDataSeeder<SmartDbContext> { const string CurrencyTable = nameof(Currency); const string MidpointRoundingColumn = nameof(Currency.MidpointRounding); const string RoundOrderItemsEnabledColumn = nameof(Currency.RoundOrderItemsEnabled); const string RoundNetPricesColumn = nameof(Currency.RoundNetPrices); const string RoundUnitPricesColumn = nameof(Currency.RoundUnitPrices); public override void Up() { // Make nullable. Alter.Column(RoundOrderItemsEnabledColumn).OnTable(CurrencyTable).AsBoolean().Nullable(); if (!Schema.Table(CurrencyTable).Column(MidpointRoundingColumn).Exists()) { Create.Column(MidpointRoundingColumn).OnTable(CurrencyTable).AsInt32().NotNullable().WithDefaultValue((int)CurrencyMidpointRounding.AwayFromZero); } if (!Schema.Table(CurrencyTable).Column(RoundNetPricesColumn).Exists()) { Create.Column(RoundNetPricesColumn).OnTable(CurrencyTable).AsBoolean().Nullable(); } if (!Schema.Table(CurrencyTable).Column(RoundUnitPricesColumn).Exists()) { Create.Column(RoundUnitPricesColumn).OnTable(CurrencyTable).AsBoolean().Nullable(); } } public override void Down() { if (Schema.Table(CurrencyTable).Column(MidpointRoundingColumn).Exists()) { Delete.Column(MidpointRoundingColumn).FromTable(CurrencyTable); } if (Schema.Table(CurrencyTable).Column(RoundNetPricesColumn).Exists()) { Delete.Column(RoundNetPricesColumn).FromTable(CurrencyTable); } if (Schema.Table(CurrencyTable).Column(RoundUnitPricesColumn).Exists()) { Delete.Column(RoundUnitPricesColumn).FromTable(CurrencyTable); } } public DataSeederStage Stage => DataSeederStage.Early; public bool AbortOnFailure => false; public async Task SeedAsync(SmartDbContext context, CancellationToken cancelToken = default) { // Apply default for backward compatibility. await context.Currencies .Where(x => x.RoundOrderItemsEnabled != null && x.RoundOrderItemsEnabled.Value == true) .ExecuteUpdateAsync(setter => setter .SetProperty(c => c.RoundNetPrices, p => true) .SetProperty(c => c.RoundUnitPrices, p => true), cancelToken); // Set RoundOrderItemsEnabled to "null" to more easily apply CurrencySettings.RoundOrderItemsEnabled (default is "false"). await context.Currencies .Where(x => x.RoundOrderItemsEnabled != null && x.RoundOrderItemsEnabled.Value == false) .ExecuteUpdateAsync(setter => setter.SetProperty(c => c.RoundOrderItemsEnabled, p => null), cancelToken); await context.MigrateLocaleResourcesAsync(MigrateLocaleResources); } public void MigrateLocaleResources(LocaleResourcesBuilder builder) { #region Currency builder.AddOrUpdate("Enums.CurrencyMidpointRounding.AwayFromZero", "Commercial rounding (recommended)", "Kaufmännisches Runden (empfohlen)"); builder.AddOrUpdate("Enums.CurrencyMidpointRounding.ToEven", "Banker's rounding", "Mathematisches Runden"); builder.AddOrUpdate("Enums.CurrencyMidpointRounding.AwayFromZero.Example", "1.225 is rounded up to 1.23. 1.224 is rounded down to 1.22.", "1,225 wird auf 1,23 aufgerundet. 1,224 wird auf 1,22 abgerundet."); builder.AddOrUpdate("Enums.CurrencyMidpointRounding.ToEven.Example", "1.225 is rounded down to 1.22. 1.235 is rounded up to 1.24.", "1,225 wird auf 1,22 abgerundet. 1,235 wird auf 1,24 aufgerundet."); builder.AddOrUpdate("Admin.Configuration.Currencies.Fields.MidpointRounding", "Midpoint rounding", "Mittelwertrundung", "Specifies the rounding strategy of the midway between two amounts. Default is comercial rounding (round midpoint to the nearest amount that is away from zero).", "Legt die Rundungsstrategie für die Mitte zwischen zwei Beträgen fest. Standard ist kaufmännisches Runden (Mittelwert auf den nächstgelegenen, von Null entfernten Betrag runden)."); builder.AddOrUpdate("Admin.Configuration.Currencies.Fields.RoundNetPrices", "Round when net prices are displayed", "Bei Nettopreisanzeige runden", "Specifies whether to round during shopping cart calculation even if net prices are displayed to the customer.", "Legt fest, ob bei der Warenkorbberechnung auch dann gerundet werden soll, wenn dem Kunden Nettopreise angezeigt werden."); builder.AddOrUpdate("Admin.Configuration.Currencies.Fields.RoundUnitPrices", "Round unit price", "Einzelpreis runden", "Specifies whether the product price should be rounded before or after quantity multiplication during shopping cart calculation. If enabled, the unit price is rounded and then multiplied by the quantity. If disabled, the unit price is multiplied by the quantity and then rounded.", "Legt fest, ob der Produktpreis bei der Warenkorbberechnung vor oder nach der Mengenmultiplikation gerundet werden soll. Falls aktiviert wird der Einzelpreis gerundet und dann mit der Menge multipliziert. Falls deaktiviert wird der Einzelpreis mit der Menge multipliziert und erst danach gerundet."); builder.AddOrUpdate("Admin.Configuration.Currencies.Fields.RoundOrderItemsEnabled", "Round all order item amounts", "Beträge aller Bestellpositionen runden", "Specifies whether to round all order item amounts (prices, tax, fees etc.). The general rounding settings are applied, if not specified here.", "Legt fest, ob die Beträge aller Bestellpositionen gerundet werden sollen (Preise, Steuern, Gebühren etc.). Es gelten die allgemeinen Rundungseinstellungen, sofern hier nicht festgelegt."); #endregion #region Currency settings builder.Delete( "Admin.Configuration.Currencies.Fields.ExchangeRateProvider", "Admin.Configuration.Currencies.Fields.ExchangeRateProvider.Hint", "Admin.Configuration.Currencies.Fields.CurrencyRateAutoUpdateEnabled", "Admin.Configuration.Currencies.Fields.CurrencyRateAutoUpdateEnabled.Hint", "Admin.Configuration.Settings.Tax" ); builder.AddOrUpdate("Common.Finance", "Finance", "Finanzen"); builder.AddOrUpdate("Admin.Configuration.Settings.Currency.ExchangeRateProvider", "Online exchange rate service", "Online Wechselkursdienst"); builder.AddOrUpdate("Admin.Configuration.Settings.Currency.AutoUpdateEnabled", "Automatically update exchange rates", "Wechselkurse automatisch aktualisieren", "Specifies whether exchange rates should be automatically updated via the associated scheduled task.", "Legt fest, ob Wechselkurse über die zugehörige geplante Aufgabe automatisch aktualisiert werden sollen."); builder.AddOrUpdate("Admin.Configuration.Settings.Currency.RoundOrderItemsEnabled", "Round all order item amounts", "Beträge aller Bestellpositionen runden", "Specifies whether to round all order item amounts (prices, tax, fees etc.). Rounding settings can optionally also be specified for the respective currency.", "Legt fest, ob die Beträge aller Bestellpositionen gerundet werden sollen (Preise, Steuern, Gebühren etc.). Rundungseinstellungen können optional auch bei der jeweiligen Währung festgelegt werden."); #endregion } } }
CartCalculationRounding
csharp
ServiceStack__ServiceStack
ServiceStack/src/ServiceStack/Host/Handlers/Soap12Handlers.cs
{ "start": 445, "end": 1123 }
public class ____ : Soap12Handler { public Soap12MessageReplyHttpHandler() : base(RequestAttributes.Soap12) { } public override async Task ProcessRequestAsync(IRequest httpReq, IResponse httpRes, string operationName) { if (httpReq.Verb == HttpMethods.Get) { var wsdl = new Soap12WsdlMetadataHandler(); await wsdl.Execute(httpReq, httpRes); return; } var responseMessage = await Send(null, httpReq, httpRes); if (httpRes.IsClosed) return; HostContext.AppHost.WriteSoapMessage(httpReq, responseMessage, httpRes.OutputStream); } } #endif
Soap12MessageReplyHttpHandler
csharp
dotnet__extensions
test/Generators/Microsoft.Gen.Logging/Unit/AttributeParserTests.cs
{ "start": 4534, "end": 4914 }
partial class ____ { [LoggerMessage(0, LogLevel.Debug, ""M {p0}"")] static partial void M(ILogger logger, [PublicData] string p0); } "); Assert.Empty(diagnostics); } [Fact] public async Task MethodNotStatic() { var diagnostics = await RunGenerator(@$"
C
csharp
MassTransit__MassTransit
tests/MassTransit.Tests/InterfaceSubscription_Specs.cs
{ "start": 1258, "end": 1389 }
public interface ____ { string Name { get; } int Age { get; } }
SecondMessageContract
csharp
dotnet__efcore
src/EFCore/Diagnostics/UpdatingIdentityResolutionInterceptor.cs
{ "start": 377, "end": 3290 }
public class ____ : IIdentityResolutionInterceptor { private readonly bool _preserveModifiedValues; private readonly bool _updateOriginalValues; /// <summary> /// Creates a new instance of the interceptor. /// </summary> /// <param name="preserveModifiedValues"> /// If <see langword="true" />, then values for properties marked as modified in the current instance will /// not be updated by values from the new instance. /// </param> /// <param name="updateOriginalValues"> /// If <see langword="true" />, then both current and original values of the current instance are updated to /// current values from the new instance. /// </param> public UpdatingIdentityResolutionInterceptor( bool preserveModifiedValues = false, bool updateOriginalValues = false) { _preserveModifiedValues = preserveModifiedValues; _updateOriginalValues = updateOriginalValues; } /// <summary> /// Called when a <see cref="DbContext" /> attempts to track a new instance of an entity with the same primary key value as /// an already tracked instance. This implementation copies property values from the new entity instance into the /// tracked entity instance. /// </summary> /// <param name="interceptionData">Contextual information about the identity resolution.</param> /// <param name="existingEntry">The entry for the existing tracked entity instance.</param> /// <param name="newEntity">The new entity instance, which will be discarded after this call.</param> public virtual void UpdateTrackedInstance( IdentityResolutionInterceptionData interceptionData, EntityEntry existingEntry, object newEntity) { var tempEntry = interceptionData.Context.Entry(newEntity); if (existingEntry.State == EntityState.Added) { foreach (var propertyEntry in tempEntry.Properties.Where(e => e.Metadata.GetBeforeSaveBehavior() != PropertySaveBehavior.Throw)) { existingEntry.Property(propertyEntry.Metadata.Name).CurrentValue = propertyEntry.CurrentValue; } } else { foreach (var propertyEntry in tempEntry.Properties.Where(e => e.Metadata.GetAfterSaveBehavior() != PropertySaveBehavior.Throw)) { var existingPropertyEntry = existingEntry.Property(propertyEntry.Metadata.Name); if (!_preserveModifiedValues || !existingPropertyEntry.IsModified) { existingPropertyEntry.CurrentValue = propertyEntry.CurrentValue; } if (_updateOriginalValues) { existingPropertyEntry.OriginalValue = propertyEntry.CurrentValue; } } } } }
UpdatingIdentityResolutionInterceptor
csharp
HangfireIO__Hangfire
src/Hangfire.Core/Storage/Monitoring/DeletedJobDto.cs
{ "start": 832, "end": 1281 }
public class ____ { public DeletedJobDto() { InDeletedState = true; } public Job Job { get; set; } public JobLoadException LoadException { get; set; } public InvocationData InvocationData { get; set; } public DateTime? DeletedAt { get; set; } public bool InDeletedState { get; set; } public IDictionary<string, string> StateData { get; set; } } }
DeletedJobDto
csharp
dotnet__maui
src/Essentials/samples/Samples/View/WebAuthenticatorPage.xaml.cs
{ "start": 233, "end": 360 }
public partial class ____ : BasePage { public WebAuthenticatorPage() { InitializeComponent(); } } }
WebAuthenticatorPage
csharp
grandnode__grandnode2
src/Core/Grand.Infrastructure/TypeSearch/TypeSearcher.cs
{ "start": 155, "end": 269 }
class ____ finds types needed by looping assemblies in the /// currently executing AppDomain. /// </summary>
that
csharp
dotnet__orleans
test/DefaultCluster.Tests/GrainReferenceCastTests.cs
{ "start": 5935, "end": 6732 }
interface ____ the initial reference's exactly. IAddressable cast = grain.Cast<ISimpleGrain>(); Assert.Same(grain, cast); Assert.IsAssignableFrom<ISimpleGrain>(cast); } /// <summary> /// Tests upcasting from derived to base grain interfaces. /// Validates that grain references can be cast from derived interfaces /// to their base interfaces, following normal inheritance rules. /// </summary> [Fact, TestCategory("BVT"), TestCategory("Cast")] public void CastInternalCastUpFromChild() { GrainReference grain = (GrainReference)this.GrainFactory.GetGrain<IGeneratorTestDerivedGrain1>(GetRandomGrainId()); // This cast should be a no-op, since the
matches
csharp
nopSolutions__nopCommerce
src/Presentation/Nop.Web/Areas/Admin/Factories/QueuedEmailModelFactory.cs
{ "start": 402, "end": 7168 }
public partial class ____ : IQueuedEmailModelFactory { #region Fields protected readonly IDateTimeHelper _dateTimeHelper; protected readonly IEmailAccountService _emailAccountService; protected readonly ILocalizationService _localizationService; protected readonly IQueuedEmailService _queuedEmailService; #endregion #region Ctor public QueuedEmailModelFactory(IDateTimeHelper dateTimeHelper, IEmailAccountService emailAccountService, ILocalizationService localizationService, IQueuedEmailService queuedEmailService) { _dateTimeHelper = dateTimeHelper; _emailAccountService = emailAccountService; _localizationService = localizationService; _queuedEmailService = queuedEmailService; } #endregion #region Utilities /// <summary> /// Gets a friendly email account name /// </summary> protected virtual string GetEmailAccountName(EmailAccount emailAccount) { if (emailAccount == null) return string.Empty; if (!string.IsNullOrWhiteSpace(emailAccount.DisplayName)) return emailAccount.Email + " (" + emailAccount.DisplayName + ")"; return emailAccount.Email; } #endregion #region Methods /// <summary> /// Prepare queued email search model /// </summary> /// <param name="searchModel">Queued email search model</param> /// <returns> /// A task that represents the asynchronous operation /// The task result contains the queued email search model /// </returns> public virtual Task<QueuedEmailSearchModel> PrepareQueuedEmailSearchModelAsync(QueuedEmailSearchModel searchModel) { ArgumentNullException.ThrowIfNull(searchModel); //prepare default search values searchModel.SearchMaxSentTries = 10; //prepare page parameters searchModel.SetGridPageSize(); return Task.FromResult(searchModel); } /// <summary> /// Prepare paged queued email list model /// </summary> /// <param name="searchModel">Queued email search model</param> /// <returns> /// A task that represents the asynchronous operation /// The task result contains the queued email list model /// </returns> public virtual async Task<QueuedEmailListModel> PrepareQueuedEmailListModelAsync(QueuedEmailSearchModel searchModel) { ArgumentNullException.ThrowIfNull(searchModel); //get parameters to filter emails var startDateValue = !searchModel.SearchStartDate.HasValue ? null : (DateTime?)_dateTimeHelper.ConvertToUtcTime(searchModel.SearchStartDate.Value, await _dateTimeHelper.GetCurrentTimeZoneAsync()); var endDateValue = !searchModel.SearchEndDate.HasValue ? null : (DateTime?)_dateTimeHelper.ConvertToUtcTime(searchModel.SearchEndDate.Value, await _dateTimeHelper.GetCurrentTimeZoneAsync()).AddDays(1); //get queued emails var queuedEmails = await _queuedEmailService.SearchEmailsAsync(fromEmail: searchModel.SearchFromEmail, toEmail: searchModel.SearchToEmail, createdFromUtc: startDateValue, createdToUtc: endDateValue, loadNotSentItemsOnly: searchModel.SearchLoadNotSent, loadOnlyItemsToBeSent: false, maxSendTries: searchModel.SearchMaxSentTries, loadNewest: true, pageIndex: searchModel.Page - 1, pageSize: searchModel.PageSize); //prepare list model var model = await new QueuedEmailListModel().PrepareToGridAsync(searchModel, queuedEmails, () => { return queuedEmails.SelectAwait(async queuedEmail => { //fill in model values from the entity var queuedEmailModel = queuedEmail.ToModel<QueuedEmailModel>(); //little performance optimization: ensure that "Body" is not returned queuedEmailModel.Body = string.Empty; //convert dates to the user time queuedEmailModel.CreatedOn = await _dateTimeHelper.ConvertToUserTimeAsync(queuedEmail.CreatedOnUtc, DateTimeKind.Utc); //fill in additional values (not existing in the entity) var emailAccount = await _emailAccountService.GetEmailAccountByIdAsync(queuedEmail.EmailAccountId); queuedEmailModel.EmailAccountName = GetEmailAccountName(emailAccount); queuedEmailModel.PriorityName = await _localizationService.GetLocalizedEnumAsync(queuedEmail.Priority); if (queuedEmail.DontSendBeforeDateUtc.HasValue) { queuedEmailModel.DontSendBeforeDate = await _dateTimeHelper .ConvertToUserTimeAsync(queuedEmail.DontSendBeforeDateUtc.Value, DateTimeKind.Utc); } if (queuedEmail.SentOnUtc.HasValue) queuedEmailModel.SentOn = await _dateTimeHelper.ConvertToUserTimeAsync(queuedEmail.SentOnUtc.Value, DateTimeKind.Utc); return queuedEmailModel; }); }); return model; } /// <summary> /// Prepare queued email model /// </summary> /// <param name="model">Queued email model</param> /// <param name="queuedEmail">Queued email</param> /// <param name="excludeProperties">Whether to exclude populating of some properties of model</param> /// <returns> /// A task that represents the asynchronous operation /// The task result contains the queued email model /// </returns> public virtual async Task<QueuedEmailModel> PrepareQueuedEmailModelAsync(QueuedEmailModel model, QueuedEmail queuedEmail, bool excludeProperties = false) { if (queuedEmail == null) return model; //fill in model values from the entity model ??= queuedEmail.ToModel<QueuedEmailModel>(); model.EmailAccountName = GetEmailAccountName(await _emailAccountService.GetEmailAccountByIdAsync(queuedEmail.EmailAccountId)); model.PriorityName = await _localizationService.GetLocalizedEnumAsync(queuedEmail.Priority); model.CreatedOn = await _dateTimeHelper.ConvertToUserTimeAsync(queuedEmail.CreatedOnUtc, DateTimeKind.Utc); if (queuedEmail.SentOnUtc.HasValue) model.SentOn = await _dateTimeHelper.ConvertToUserTimeAsync(queuedEmail.SentOnUtc.Value, DateTimeKind.Utc); if (queuedEmail.DontSendBeforeDateUtc.HasValue) model.DontSendBeforeDate = await _dateTimeHelper.ConvertToUserTimeAsync(queuedEmail.DontSendBeforeDateUtc.Value, DateTimeKind.Utc); else model.SendImmediately = true; return model; } #endregion }
QueuedEmailModelFactory
csharp
icsharpcode__ILSpy
ICSharpCode.Decompiler.Tests/TestCases/Pretty/Records.cs
{ "start": 8603, "end": 8916 }
public record ____ RecordCtorChain(int A, string B) { #if EXPECTED_OUTPUT public double C = 0.0; #else public double C; #endif public RecordCtorChain(int A) : this(A, "default") { C = 3.14; } public RecordCtorChain(string B) : this(42, B) { C = 1.41; } } } #endif }
struct
csharp
ServiceStack__ServiceStack
ServiceStack.Redis/tests/ServiceStack.Redis.Tests/RedisClientTestsBase.Async.cs
{ "start": 3054, "end": 4223 }
public abstract class ____ : RedisClientTestsBase { protected IRedisClientAsync RedisAsync => base.Redis; protected IRedisNativeClientAsync NativeAsync => base.Redis; [Obsolete("This should use RedisAsync or RedisRaw", true)] protected new RedisClient Redis => base.Redis; protected RedisClient RedisRaw { get => base.Redis; set => base.Redis = value; } public override void OnBeforeEachTest() { base.OnBeforeEachTest(); _ = RedisRaw.ForAsyncOnly(); } public override void OnAfterEachTest() { #if DEBUG if(RedisRaw is object) RedisRaw.DebugAllowSync = true; #endif base.OnAfterEachTest(); } protected static async ValueTask<List<T>> ToListAsync<T>(IAsyncEnumerable<T> source, CancellationToken token = default) { var list = new List<T>(); await foreach (var value in source.ConfigureAwait(false).WithCancellation(token)) { list.Add(value); } return list; } } }
RedisClientTestsBaseAsync
csharp
AvaloniaUI__Avalonia
src/Avalonia.Base/Media/PreciseEllipticArcHelper.cs
{ "start": 2648, "end": 3025 }
class ____ adapted for use with WPF StreamGeometryContext, and needs to be created explicitly /// for each particular arc. /// /// Some helpers /// /// It can handle ellipses which are not aligned with the x and y reference axes of the plane, /// as well as their parts. /// /// Another improvement is that this
is
csharp
mongodb__mongo-csharp-driver
src/MongoDB.Driver/Core/Events/ClusterOpenedEvent.cs
{ "start": 2470, "end": 2563 }
interface ____ EventType IEvent.Type => EventType.ClusterOpened; } }
implementations
csharp
dotnetcore__FreeSql
Providers/FreeSql.Provider.MySql/Curd/MySqlSelect.cs
{ "start": 18305, "end": 18869 }
class ____ T8 : class { public MySqlSelect(IFreeSql orm, CommonUtils commonUtils, CommonExpression commonExpression, object dywhere) : base(orm, commonUtils, commonExpression, dywhere) { } public override string ToSql(string field = null) => MySqlSelect<T1>.ToSqlStatic(_commonUtils, _commonExpression, _select, _distinct, field ?? this.GetAllFieldExpressionTreeLevel2().Field, _join, _where, _groupby, _having, _orderby, _skip, _limit, _tables, this.GetTableRuleUnions(), _aliasRule, _tosqlAppendContent, _whereGlobalFilter, _orm); }
where
csharp
dotnet__maui
src/Controls/samples/Controls.Sample/ViewModels/LayoutsViewModel.cs
{ "start": 194, "end": 2034 }
public class ____ : BaseGalleryViewModel { protected override IEnumerable<SectionModel> CreateItems() => new[] { new SectionModel(typeof(AbsoluteLayoutPage), "AbsoluteLayout", "An AbsoluteLayout is used to position and size children using explicit values. The position is specified by the upper-left corner of the child relative to the upper-left corner of the AbsoluteLayout, in device-independent units."), new SectionModel(typeof(ContentViewPage), "ContentView", "ContentView contains a single child that is set with the Content property. The Content property can be set to any View derivative, including other Layout derivatives. ContentView is mostly used as a structural element."), new SectionModel(typeof(FlexLayoutPage), "FlexLayout", "FlexLayout is also capable of wrapping its children if there are too many to fit in a single row or column, and also has many options for orientation, alignment, and adapting to various screen sizes."), new SectionModel(typeof(GridPage), "Grid", "The Grid is a layout that organizes its children into rows and columns, which can have proportional or absolute sizes."), new SectionModel(typeof(RelativeLayoutPage), "RelativeLayout", "A RelativeLayout is used to position and size children relative to properties of the layout or sibling elements. This allows UIs to be created that scale proportionally across device sizes."), new SectionModel(typeof(ScrollViewPage), "ScrollView", "ScrollView is capable of scrolling its contents."), new SectionModel(typeof(StackLayoutPage), "StackLayout", "A StackLayout organizes child views in a one-dimensional horizontal or vertical stack."), new SectionModel(typeof(TemplatedViewPage), "TemplatedView", "TemplatedView displays content with a control template, and is the base
LayoutsViewModel
csharp
dotnet__maui
src/Compatibility/Core/src/MacOS/Controls/FormsBoxView.cs
{ "start": 124, "end": 2082 }
internal class ____ : NSView { NSColor _colorToRenderer; NSColor _brushToRenderer; nfloat _topLeft; nfloat _topRight; nfloat _bottomLeft; nfloat _bottomRight; public override void DrawRect(CGRect dirtyRect) { if (_brushToRenderer != null) _brushToRenderer.SetFill(); else _colorToRenderer.SetFill(); var innerRect = NSBezierPath.FromRoundedRect(Bounds, 0, 0); NSBezierPath bezierPath = new NSBezierPath(); bezierPath.MoveTo(new CGPoint(innerRect.Bounds.X, innerRect.Bounds.Y + _bottomLeft)); // Bottom left (origin): bezierPath.AppendPathWithArc(new CGPoint(innerRect.Bounds.X + _bottomLeft, innerRect.Bounds.Y + _bottomLeft), _bottomLeft, (float)180.0, (float)270.0); // Bottom right: bezierPath.AppendPathWithArc(new CGPoint(innerRect.Bounds.X + innerRect.Bounds.Width - _bottomRight, innerRect.Bounds.Y + _bottomRight), _bottomRight, (float)270.0, (float)360.0); // Top right: bezierPath.AppendPathWithArc(new CGPoint(innerRect.Bounds.X + innerRect.Bounds.Width - _topRight, innerRect.Bounds.Y + innerRect.Bounds.Height - _topRight), _topRight, (float)0.0, (float)90.0); // Top left: bezierPath.AppendPathWithArc(new CGPoint(innerRect.Bounds.X + _topLeft, innerRect.Bounds.Y + innerRect.Bounds.Height - _topLeft), _topLeft, (float)90.0, (float)180.0); // Implicitly creates left edge. bezierPath.Fill(); base.DrawRect(dirtyRect); } public void SetColor(NSColor color) { _colorToRenderer = color; _brushToRenderer = null; SetNeedsDisplayInRect(Bounds); } public void SetBrush(NSColor brush) { _brushToRenderer = brush; _colorToRenderer = null; SetNeedsDisplayInRect(Bounds); } public void SetCornerRadius(float topLeft, float topRight, float bottomLeft, float bottomRight) { _topLeft = topLeft; _topRight = topRight; _bottomLeft = bottomLeft; _bottomRight = bottomRight; SetNeedsDisplayInRect(Bounds); } } }
FormsBoxView
csharp
dotnet__aspnetcore
src/Security/Authentication/Facebook/src/FacebookOptions.cs
{ "start": 398, "end": 3705 }
public class ____ : OAuthOptions { /// <summary> /// Initializes a new <see cref="FacebookOptions"/>. /// </summary> public FacebookOptions() { CallbackPath = new PathString("/signin-facebook"); SendAppSecretProof = true; AuthorizationEndpoint = FacebookDefaults.AuthorizationEndpoint; TokenEndpoint = FacebookDefaults.TokenEndpoint; UserInformationEndpoint = FacebookDefaults.UserInformationEndpoint; UsePkce = true; Scope.Add("email"); Fields.Add("name"); Fields.Add("email"); Fields.Add("first_name"); Fields.Add("last_name"); ClaimActions.MapJsonKey(ClaimTypes.NameIdentifier, "id"); ClaimActions.MapJsonSubKey("urn:facebook:age_range_min", "age_range", "min"); ClaimActions.MapJsonSubKey("urn:facebook:age_range_max", "age_range", "max"); ClaimActions.MapJsonKey(ClaimTypes.DateOfBirth, "birthday"); ClaimActions.MapJsonKey(ClaimTypes.Email, "email"); ClaimActions.MapJsonKey(ClaimTypes.Name, "name"); ClaimActions.MapJsonKey(ClaimTypes.GivenName, "first_name"); ClaimActions.MapJsonKey("urn:facebook:middle_name", "middle_name"); ClaimActions.MapJsonKey(ClaimTypes.Surname, "last_name"); ClaimActions.MapJsonKey(ClaimTypes.Gender, "gender"); ClaimActions.MapJsonKey("urn:facebook:link", "link"); ClaimActions.MapJsonSubKey("urn:facebook:location", "location", "name"); ClaimActions.MapJsonKey(ClaimTypes.Locality, "locale"); ClaimActions.MapJsonKey("urn:facebook:timezone", "timezone"); } /// <summary> /// Check that the options are valid. Should throw an exception if things are not ok. /// </summary> public override void Validate() { ArgumentException.ThrowIfNullOrEmpty(AppId); ArgumentException.ThrowIfNullOrEmpty(AppSecret); base.Validate(); } // Facebook uses a non-standard term for this field. /// <summary> /// Gets or sets the Facebook-assigned App ID. /// </summary> public string AppId { get { return ClientId; } set { ClientId = value; } } // Facebook uses a non-standard term for this field. /// <summary> /// Gets or sets the Facebook-assigned app secret. /// </summary> public string AppSecret { get { return ClientSecret; } set { ClientSecret = value; } } /// <summary> /// Gets or sets if the <c>appsecret_proof</c> should be generated and sent with Facebook API calls. /// </summary> /// <remarks>See <see href="https://developers.facebook.com/docs/graph-api/security#appsecret_proof"/> for more details.</remarks> /// <value>Defaults to <see langword="true"/>.</value> public bool SendAppSecretProof { get; set; } /// <summary> /// The list of fields to retrieve from the UserInformationEndpoint. /// </summary> /// <remarks>See <see href="https://developers.facebook.com/docs/graph-api/reference/user"/> for more details.</remarks> /// <value> /// Defaults to include the following fields if none are specified: "name", "email", "first_name", and "last_name". /// </value> public ICollection<string> Fields { get; } = new HashSet<string>(); }
FacebookOptions
csharp
microsoft__FASTER
cs/src/core/Index/Interfaces/CallbackInfos.cs
{ "start": 5059, "end": 5913 }
record ____ operated on /// </summary> public long Address { get; internal set; } /// <summary> /// Hash code of key being operated on /// </summary> public long KeyHash { get; internal set; } /// <summary> /// The ID of session context executing the operation /// </summary> public int SessionID { get; internal set; } /// <summary> /// The header of the record. /// </summary> public RecordInfo RecordInfo { get; internal set; } /// <summary> /// What actions FASTER should perform on a false return from the IFunctions method /// </summary> public DeleteAction Action { get; set; } } /// <summary> /// What actions to take following the RMW IFunctions method call, such as cancellation or
being
csharp
mongodb__mongo-csharp-driver
src/MongoDB.Driver/Core/Events/ConnectionPoolClearedEvent.cs
{ "start": 5048, "end": 5149 }
interface ____ EventType IEvent.Type => EventType.ConnectionPoolCleared; } }
implementations
csharp
dotnet__machinelearning
src/Microsoft.ML.Data/DataView/Transposer.cs
{ "start": 52320, "end": 54384 }
private sealed class ____ : RowBase<NoSplitter<T>> { private readonly bool _isActive; public RowImpl(NoSplitter<T> parent, DataViewRow input, bool isActive) : base(parent, input) { Contracts.Assert(Parent.ColumnCount == 1); _isActive = isActive; } /// <summary> /// Returns whether the given column is active in this row. /// </summary> public override bool IsColumnActive(DataViewSchema.Column column) { Contracts.CheckParam(column.Index < Parent.ColumnCount, nameof(column)); return _isActive; } /// <summary> /// Returns a value getter delegate to fetch the value of column with the given columnIndex, from the row. /// This throws if the column is not active in this row, or if the type /// <typeparamref name="TValue"/> differs from this column's type. /// </summary> /// <typeparam name="TValue"> is the column's content type.</typeparam> /// <param name="column"> is the output column whose getter should be returned.</param> public override ValueGetter<TValue> GetGetter<TValue>(DataViewSchema.Column column) { Contracts.Check(IsColumnActive(column)); return Input.GetGetter<TValue>(Input.Schema[Parent.SrcCol]); } } } /// <summary> /// This splitter enables the partition of a single column into two or more /// columns. /// </summary>
RowImpl
csharp
microsoft__garnet
test/Garnet.test.cluster/ClusterConfigTests.cs
{ "start": 399, "end": 6902 }
internal class ____ { ClusterTestContext context; readonly Dictionary<string, LogLevel> monitorTests = []; [SetUp] public void Setup() { context = new ClusterTestContext(); context.Setup(monitorTests); } [TearDown] public void TearDown() { context.TearDown(); } [Test, Order(1)] [Category("CLUSTER-CONFIG"), CancelAfter(1000)] public void ClusterConfigInitializesUnassignedWorkerTest() { var config = new ClusterConfig().InitializeLocalWorker( Generator.CreateHexId(), "127.0.0.1", 7001, configEpoch: 0, Garnet.cluster.NodeRole.PRIMARY, null, ""); (string address, int port) = config.GetWorkerAddress(0); Assert.That(address == "unassigned"); Assert.That(port == 0); Assert.That(Garnet.cluster.NodeRole.UNASSIGNED == config.GetNodeRoleFromNodeId("asdasdqwe")); var configBytes = config.ToByteArray(); var restoredConfig = ClusterConfig.FromByteArray(configBytes); (address, port) = restoredConfig.GetWorkerAddress(0); Assert.That(address == "unassigned"); Assert.That(port == 0); Assert.That(Garnet.cluster.NodeRole.UNASSIGNED == restoredConfig.GetNodeRoleFromNodeId("asdasdqwe")); } [Test, Order(2)] [Category("CLUSTER-CONFIG"), CancelAfter(1000)] public void ClusterForgetAfterNodeRestartTest() { int nbInstances = 4; context.CreateInstances(nbInstances); context.CreateConnection(); var (shards, slots) = context.clusterTestUtils.SimpleSetupCluster(logger: context.logger); // Restart node with new ACL file context.nodes[0].Dispose(false); context.nodes[0] = context.CreateInstance(context.clusterTestUtils.GetEndPoint(0), useAcl: true, cleanClusterConfig: false); context.nodes[0].Start(); context.CreateConnection(); var firstNode = context.nodes[0]; var nodesResult = context.clusterTestUtils.ClusterNodes(0); Assert.That(nodesResult.Nodes.Count == nbInstances); var server = context.clusterTestUtils.GetServer(context.endpoints[0].ToIPEndPoint()); var args = new List<object>() { "forget", Encoding.ASCII.GetBytes("1ip23j89123no"), Encoding.ASCII.GetBytes("0") }; var ex = Assert.Throws<RedisServerException>(() => server.Execute("cluster", args), "Cluster forget call shouldn't have succeeded for an invalid node id."); Assert.That(ex.Message, Is.EqualTo("ERR I don't know about node 1ip23j89123no.")); nodesResult = context.clusterTestUtils.ClusterNodes(0); Assert.That(nodesResult.Nodes.Count == nbInstances, "No node should've been removed from the cluster after an invalid id was passed."); Assert.That(nodesResult.Nodes.ElementAt(0).IsMyself); Assert.That(nodesResult.Nodes.ElementAt(0).EndPoint.ToIPEndPoint().Port == 7000, "Expected the node to be replying to be the one with port 7000."); context.clusterTestUtils.ClusterForget(0, nodesResult.Nodes.Last().NodeId, 0); nodesResult = context.clusterTestUtils.ClusterNodes(0); Assert.That(nodesResult.Nodes.Count == nbInstances - 1, "A node should've been removed from the cluster."); Assert.That(nodesResult.Nodes.ElementAt(0).IsMyself); Assert.That(nodesResult.Nodes.ElementAt(0).EndPoint.ToIPEndPoint().Port == 7000, "Expected the node to be replying to be the one with port 7000."); } [Test, Order(2)] [Category("CLUSTER-CONFIG"), CancelAfter(1000)] public void ClusterAnnounceRecoverTest() { context.CreateInstances(1); context.CreateConnection(); var config = context.clusterTestUtils.ClusterNodes(0, logger: context.logger); var origin = config.Origin; var clusterNodesEndpoint = origin.ToIPEndPoint(); ClassicAssert.AreEqual("127.0.0.1", clusterNodesEndpoint.Address.ToString()); ClassicAssert.AreEqual(ClusterTestContext.Port, clusterNodesEndpoint.Port); ClassicAssert.IsTrue(IPAddress.TryParse("127.0.0.2", out var ipAddress)); var announcePort = clusterNodesEndpoint.Port + 10000; var clusterAnnounceEndpoint = new IPEndPoint(ipAddress, announcePort); context.nodes[0].Dispose(false); context.nodes[0] = context.CreateInstance(context.clusterTestUtils.GetEndPoint(0), cleanClusterConfig: false, tryRecover: true, clusterAnnounceEndpoint: clusterAnnounceEndpoint); context.nodes[0].Start(); context.CreateConnection(); config = context.clusterTestUtils.ClusterNodes(0, logger: context.logger); origin = config.Origin; clusterNodesEndpoint = origin.ToIPEndPoint(); ClassicAssert.AreEqual(clusterAnnounceEndpoint.Address.ToString(), clusterNodesEndpoint.Address.ToString()); ClassicAssert.AreEqual(clusterAnnounceEndpoint.Port, clusterNodesEndpoint.Port); } [Test, Order(3)] [Category("CLUSTER-CONFIG"), CancelAfter(1000)] public void ClusterAnyIPAnnounce() { context.nodes = new GarnetServer[1]; context.nodes[0] = context.CreateInstance(new IPEndPoint(IPAddress.Any, 7000)); context.nodes[0].Start(); context.endpoints = TestUtils.GetShardEndPoints(1, IPAddress.Loopback, 7000); context.CreateConnection(); var config = context.clusterTestUtils.ClusterNodes(0, logger: context.logger); var origin = config.Origin; var endpoint = origin.ToIPEndPoint(); ClassicAssert.AreEqual(7000, endpoint.Port); using var client = TestUtils.GetGarnetClient(config.Origin); client.Connect(); var resp = client.PingAsync().GetAwaiter().GetResult(); ClassicAssert.AreEqual("PONG", resp); resp = client.QuitAsync().GetAwaiter().GetResult(); ClassicAssert.AreEqual("OK", resp); } } }
ClusterConfigTests
csharp
ChilliCream__graphql-platform
src/HotChocolate/AspNetCore/test/AspNetCore.Authorization.Tests/AuthorizationTestData.cs
{ "start": 199, "end": 1844 }
public class ____ : IEnumerable<object[]> { private readonly string _schemaCode = // lang=graphql """ type Query { default: String @authorize(apply: BEFORE_RESOLVER) age: String @authorize(policy: "HasAgeDefined", apply: BEFORE_RESOLVER) roles: String @authorize(roles: ["a"], apply: BEFORE_RESOLVER) roles_ab: String @authorize(roles: ["a", "b"], apply: BEFORE_RESOLVER) rolesAndPolicy: String @authorize(roles: ["a", "b"], policy: "HasAgeDefined", apply: BEFORE_RESOLVER) piped: String @authorize(policy: "a", apply: BEFORE_RESOLVER) @authorize(policy: "b", apply: BEFORE_RESOLVER) afterResolver: String @authorize(policy: "a", apply: AFTER_RESOLVER) } """; private readonly FieldMiddleware _schemaMiddleware = next => context => { context.Result = "foo"; return next.Invoke(context); }; private Action<IRequestExecutorBuilder> CreateSchema() => sb => sb .AddDocumentFromString(_schemaCode) .AddAuthorization() .UseField(_schemaMiddleware); private Action<IRequestExecutorBuilder> CreateSchemaWithBuilder() => sb => sb .AddDocumentFromString(_schemaCode) .AddAuthorization() .UseField(_schemaMiddleware); public IEnumerator<object[]> GetEnumerator() { yield return [CreateSchema()]; yield return [CreateSchemaWithBuilder()]; } IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); }
AuthorizationTestData
csharp
DuendeSoftware__IdentityServer
identity-server/src/IdentityServer/Validation/Default/DefaultCustomAuthorizeRequestValidator.cs
{ "start": 228, "end": 570 }
internal class ____ : ICustomAuthorizeRequestValidator { /// <summary> /// Custom validation logic for the authorize request. /// </summary> /// <param name="context">The context.</param> public Task ValidateAsync(CustomAuthorizeRequestValidationContext context) => Task.CompletedTask; }
DefaultCustomAuthorizeRequestValidator
csharp
spectreconsole__spectre.console
src/Spectre.Console/Internal/TypeConverterHelper.cs
{ "start": 28, "end": 6370 }
internal static class ____ { internal const DynamicallyAccessedMemberTypes ConverterAnnotation = DynamicallyAccessedMemberTypes.PublicParameterlessConstructor | DynamicallyAccessedMemberTypes.PublicFields; internal static bool IsGetConverterSupported => !AppContext.TryGetSwitch("Spectre.Console.TypeConverterHelper.IsGetConverterSupported ", out var enabled) || enabled; public static string ConvertToString<T>(T input) { var result = GetTypeConverter<T>().ConvertToInvariantString(input); if (result == null) { throw new InvalidOperationException("Could not convert input to a string"); } return result; } public static bool TryConvertFromString<T>(string input, [MaybeNull] out T? result) { try { result = (T?)GetTypeConverter<T>().ConvertFromInvariantString(input); return true; } catch { result = default; return false; } } public static bool TryConvertFromStringWithCulture<T>(string input, CultureInfo? info, [MaybeNull] out T? result) { try { if (info == null) { return TryConvertFromString<T>(input, out result); } else { result = (T?)GetTypeConverter<T>().ConvertFromString(null!, info, input); } return true; } catch { result = default; return false; } } public static TypeConverter GetTypeConverter<T>() { var converter = GetConverter(); if (converter != null) { return converter; } var attribute = typeof(T).GetCustomAttribute<TypeConverterAttribute>(); if (attribute != null) { var type = Type.GetType(attribute.ConverterTypeName, false, false); if (type != null) { converter = Activator.CreateInstance(type) as TypeConverter; if (converter != null) { return converter; } } } throw new InvalidOperationException("Could not find type converter"); [UnconditionalSuppressMessage("ReflectionAnalysis", "IL2087", Justification = "Feature switches are not currently supported in the analyzer")] [UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026", Justification = "Feature switches are not currently supported in the analyzer")] static TypeConverter? GetConverter() { if (IsGetConverterSupported) { // Spectre.Console.TypeConverterHelper.IsGetConverterSupported has been set so // fallback to original behavior return TypeDescriptor.GetConverter(typeof(T)); } // otherwise try and use the intrinsic converter. if we can't find one, then // try and use GetConverter. var intrinsicConverter = GetIntrinsicConverter(typeof(T)); return intrinsicConverter ?? TypeDescriptor.GetConverter(typeof(T)); } } private delegate TypeConverter FuncWithDam([DynamicallyAccessedMembers(ConverterAnnotation)] Type type); private static readonly Dictionary<Type, FuncWithDam> _intrinsicConverters; static TypeConverterHelper() { _intrinsicConverters = new() { [typeof(bool)] = _ => new BooleanConverter(), [typeof(byte)] = _ => new ByteConverter(), [typeof(sbyte)] = _ => new SByteConverter(), [typeof(char)] = _ => new CharConverter(), [typeof(double)] = _ => new DoubleConverter(), [typeof(string)] = _ => new StringConverter(), [typeof(int)] = _ => new Int32Converter(), [typeof(short)] = _ => new Int16Converter(), [typeof(long)] = _ => new Int64Converter(), [typeof(float)] = _ => new SingleConverter(), [typeof(ushort)] = _ => new UInt16Converter(), [typeof(uint)] = _ => new UInt32Converter(), [typeof(ulong)] = _ => new UInt64Converter(), [typeof(object)] = _ => new TypeConverter(), [typeof(CultureInfo)] = _ => new CultureInfoConverter(), [typeof(DateTime)] = _ => new DateTimeConverter(), [typeof(DateTimeOffset)] = _ => new DateTimeOffsetConverter(), [typeof(decimal)] = _ => new DecimalConverter(), [typeof(TimeSpan)] = _ => new TimeSpanConverter(), [typeof(Guid)] = _ => new GuidConverter(), [typeof(Uri)] = _ => new UriTypeConverter(), [typeof(Array)] = _ => new ArrayConverter(), [typeof(ICollection)] = _ => new CollectionConverter(), [typeof(Enum)] = CreateEnumConverter(), #if !NETSTANDARD2_0 [typeof(Int128)] = _ => new Int128Converter(), [typeof(Half)] = _ => new HalfConverter(), [typeof(UInt128)] = _ => new UInt128Converter(), [typeof(DateOnly)] = _ => new DateOnlyConverter(), [typeof(TimeOnly)] = _ => new TimeOnlyConverter(), [typeof(Version)] = _ => new VersionConverter(), #endif }; } [UnconditionalSuppressMessage("ReflectionAnalysis", "IL2111", Justification = "Delegate reflection is safe for all usages in this type.")] private static FuncWithDam CreateEnumConverter() => ([DynamicallyAccessedMembers(ConverterAnnotation)] Type type) => new EnumConverter(type); /// <summary> /// A highly-constrained version of <see cref="TypeDescriptor.GetConverter(Type)" /> that only returns intrinsic converters. /// </summary> private static TypeConverter? GetIntrinsicConverter([DynamicallyAccessedMembers(ConverterAnnotation)] Type type) { if (type.IsArray) { type = typeof(Array); } if (typeof(ICollection).IsAssignableFrom(type)) { type = typeof(ICollection); } if (type.IsEnum) { type = typeof(Enum); } if (_intrinsicConverters.TryGetValue(type, out var factory)) { return factory(type); } return null; } }
TypeConverterHelper
csharp
abpframework__abp
framework/src/Volo.Abp.AspNetCore.Serilog/Volo/Abp/AspNetCore/Serilog/AbpAspNetCoreSerilogOptions.cs
{ "start": 189, "end": 825 }
public class ____ { /// <summary> /// Default value: "TenantId". /// </summary> public string TenantId { get; set; } = "TenantId"; /// <summary> /// Default value: "UserId". /// </summary> public string UserId { get; set; } = "UserId"; /// <summary> /// Default value: "ClientId". /// </summary> public string ClientId { get; set; } = "ClientId"; /// <summary> /// Default value: "CorrelationId". /// </summary> public string CorrelationId { get; set; } = "CorrelationId"; } }
AllEnricherPropertyNames
csharp
dotnet__aspire
src/Aspire.Hosting.Kubernetes/Resources/HorizontalPodAutoscalerBehaviorV2.cs
{ "start": 383, "end": 637 }
class ____ how the scaling process should behave, /// including rules for scaling up and scaling down. This can include settings such as /// stabilization windows and scaling policies to ensure smooth transitions. /// </remarks> [YamlSerializable]
specifies
csharp
dotnet__maui
src/Core/tests/UnitTests/Layouts/GridLayoutManagerTests.cs
{ "start": 527, "end": 112828 }
public class ____ { const string GridSpacing = "GridSpacing"; const string GridAutoSizing = "GridAutoSizing"; const string GridStarSizing = "GridStarSizing"; const string GridAbsoluteSizing = "GridAbsoluteSizing"; const string GridSpan = "GridSpan"; IGridLayout CreateGridLayout(int rowSpacing = 0, int colSpacing = 0, string rows = null, string columns = null, IList<IView> children = null) { IEnumerable<IGridRowDefinition> rowDefs = null; IEnumerable<IGridColumnDefinition> colDefs = null; if (rows != null) { rowDefs = CreateTestRows(rows.Split(",")); } if (columns != null) { colDefs = CreateTestColumns(columns.Split(",")); } var grid = Substitute.For<IGridLayout>(); grid.Height.Returns(Dimension.Unset); grid.Width.Returns(Dimension.Unset); grid.MinimumHeight.Returns(Dimension.Minimum); grid.MinimumWidth.Returns(Dimension.Minimum); grid.MaximumHeight.Returns(Dimension.Maximum); grid.MaximumWidth.Returns(Dimension.Maximum); grid.RowSpacing.Returns(rowSpacing); grid.ColumnSpacing.Returns(colSpacing); SubRowDefs(grid, rowDefs); SubColDefs(grid, colDefs); if (children != null) { SubstituteChildren(grid, children); } return grid; } void SubRowDefs(IGridLayout grid, IEnumerable<IGridRowDefinition> rows = null) { if (rows == null) { var rowDefs = new List<IGridRowDefinition>(); grid.RowDefinitions.Returns(rowDefs); } else { grid.RowDefinitions.Returns(rows); } } void SubColDefs(IGridLayout grid, IEnumerable<IGridColumnDefinition> cols = null) { if (cols == null) { var colDefs = new List<IGridColumnDefinition>(); grid.ColumnDefinitions.Returns(colDefs); } else { grid.ColumnDefinitions.Returns(cols); } } static GridLength GridLengthFromString(string gridLength) { CultureInfo usCulture = new CultureInfo("en-US"); // Ensure we're using a period as the decimal separator gridLength = gridLength.Trim(); if (gridLength.EndsWith("*")) { gridLength = gridLength.Substring(0, gridLength.Length - 1); if (gridLength.Length == 0) { return GridLength.Star; } return new GridLength(double.Parse(gridLength, usCulture), GridUnitType.Star); } if (gridLength.ToLowerInvariant() == "auto") { return GridLength.Auto; } return new GridLength(double.Parse(gridLength, CultureInfo.InvariantCulture)); } List<IGridColumnDefinition> CreateTestColumns(params string[] columnWidths) { var colDefs = new List<IGridColumnDefinition>(); foreach (var width in columnWidths) { var gridLength = GridLengthFromString(width); var colDef = Substitute.For<IGridColumnDefinition>(); colDef.Width.Returns(gridLength); colDefs.Add(colDef); } return colDefs; } List<IGridRowDefinition> CreateTestRows(params string[] rowHeights) { var rowDefs = new List<IGridRowDefinition>(); foreach (var height in rowHeights) { var gridLength = GridLengthFromString(height); var rowDef = Substitute.For<IGridRowDefinition>(); rowDef.Height.Returns(gridLength); rowDefs.Add(rowDef); } return rowDefs; } void SetLocation(IGridLayout grid, IView view, int row = 0, int col = 0, int rowSpan = 1, int colSpan = 1) { grid.GetRow(view).Returns(row); grid.GetRowSpan(view).Returns(rowSpan); grid.GetColumn(view).Returns(col); grid.GetColumnSpan(view).Returns(colSpan); } static Size MeasureAndArrangeFixed(IGridLayout grid, double widthConstraint, double heightConstraint, double left = 0, double top = 0) { var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(widthConstraint, heightConstraint); var arrangeSize = new Size(widthConstraint, heightConstraint); manager.ArrangeChildren(new Rect(new Point(left, top), arrangeSize)); return measuredSize; } static Size MeasureAndArrange(IGridLayout grid, double widthConstraint = double.PositiveInfinity, double heightConstraint = double.PositiveInfinity, double left = 0, double top = 0) { var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(widthConstraint, heightConstraint); manager.ArrangeChildren(new Rect(new Point(left, top), measuredSize)); return measuredSize; } static Size MeasureAndArrangeAuto(IGridLayout grid) { return MeasureAndArrange(grid, double.PositiveInfinity, double.PositiveInfinity, 0, 0); } [Category(GridAutoSizing)] [Fact] public void OneAutoRowOneAutoColumn() { // A one-row, one-column grid var grid = CreateGridLayout(); // A 100x100 IView var view = CreateTestView(new Size(100, 100)); // Set up the grid to have a single child SubstituteChildren(grid, view); // Set up the row/column values and spans SetLocation(grid, view); MeasureAndArrangeAuto(grid); // No rows/columns were specified, so the implied */* is used; we're measuring with infinity, so // we expect that the view will be arranged at its measured size AssertArranged(view, 0, 0, 100, 100); } [Category(GridAbsoluteSizing)] [Fact] public void TwoAbsoluteColumnsOneAbsoluteRow() { var grid = CreateGridLayout(columns: "100, 100", rows: "10"); var viewSize = new Size(10, 10); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); // Assuming no constraints on space MeasureAndArrangeAuto(grid); // Column width is 100, viewSize is less than that, so it should be able to layout out at full size AssertArranged(view0, 0, 0, 100, 10); // Since the first column is 100 wide, we expect the view in the second column to start at x = 100 AssertArranged(view1, 100, 0, 100, 10); } [Category(GridAbsoluteSizing)] [Fact] public void TwoAbsoluteRowsAndColumns() { var grid = CreateGridLayout(columns: "100, 100", rows: "10, 30"); var viewSize = new Size(10, 10); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); var view2 = CreateTestView(viewSize); var view3 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1, view2, view3); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); SetLocation(grid, view2, row: 1); SetLocation(grid, view3, row: 1, col: 1); // Assuming no constraints on space MeasureAndArrangeAuto(grid); // Verify that the views are getting measured at all, and that they're being measured at // the appropriate sizes view0.Received().Measure(Arg.Is<double>(100), Arg.Is<double>(10)); view1.Received().Measure(Arg.Is<double>(100), Arg.Is<double>(10)); view2.Received().Measure(Arg.Is<double>(100), Arg.Is<double>(30)); view3.Received().Measure(Arg.Is<double>(100), Arg.Is<double>(30)); AssertArranged(view0, 0, 0, 100, 10); // Since the first column is 100 wide, we expect the view in the second column to start at x = 100 AssertArranged(view1, 100, 0, 100, 10); // First column, second row, so y should be 10 AssertArranged(view2, 0, 10, 100, 30); // Second column, second row, so 100, 10 AssertArranged(view3, 100, 10, 100, 30); } [Category(GridAbsoluteSizing), Category(GridAutoSizing)] [Fact] public void TwoAbsoluteColumnsOneAutoRow() { var grid = CreateGridLayout(columns: "100, 100"); var viewSize = new Size(10, 10); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); // Assuming no constraints on space MeasureAndArrangeAuto(grid); // Column width is 100, viewSize is less, so it should be able to layout at full size AssertArranged(view0, 0, 0, 100, viewSize.Height); // Since the first column is 100 wide, we expect the view in the second column to start at x = 100 AssertArranged(view1, 100, 0, 100, viewSize.Height); } [Category(GridAbsoluteSizing), Category(GridAutoSizing)] [Fact] public void TwoAbsoluteRowsOneAutoColumn() { var grid = CreateGridLayout(rows: "100, 100"); var viewSize = new Size(10, 10); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, row: 1); // Assuming no constraints on space MeasureAndArrangeAuto(grid); // Row height is 100, so full view should fit AssertArranged(view0, 0, 0, viewSize.Width, 100); // Since the first row is 100 tall, we expect the view in the second row to start at y = 100 AssertArranged(view1, 0, 100, viewSize.Width, 100); } [Category(GridSpacing)] [Fact(DisplayName = "Row spacing shouldn't affect a single-row grid")] public void SingleRowIgnoresRowSpacing() { var grid = CreateGridLayout(rowSpacing: 10); var view = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); MeasureAndArrangeAuto(grid); AssertArranged(view, 0, 0, 100, 100); } [Category(GridSpacing)] [Fact(DisplayName = "Two rows should include the row spacing once")] public void TwoRowsWithSpacing() { var grid = CreateGridLayout(rows: "100, 100", rowSpacing: 10); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, row: 1); MeasureAndArrangeAuto(grid); AssertArranged(view0, 0, 0, 100, 100); // With column width 100 and spacing of 10, we expect the second column to start at 110 AssertArranged(view1, 0, 110, 100, 100); } [Category(GridSpacing)] [Fact(DisplayName = "Measure should include row spacing")] public void MeasureTwoRowsWithSpacing() { var grid = CreateGridLayout(rows: "100, 100", rowSpacing: 10); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, row: 1); var manager = new GridLayoutManager(grid); var measure = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(100 + 100 + 10, measure.Height); } [Category(GridAutoSizing)] [Fact(DisplayName = "Auto rows without content have height zero")] public void EmptyAutoRowsHaveNoHeight() { var grid = CreateGridLayout(rows: "100, auto, 100"); var view0 = CreateTestView(new Size(100, 100)); var view2 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view2); SetLocation(grid, view0); SetLocation(grid, view2, row: 2); var manager = new GridLayoutManager(grid); var measure = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); manager.ArrangeChildren(new Rect(0, 0, measure.Width, measure.Height)); // Because the auto row has no content, we expect it to have height zero Assert.Equal(100 + 100, measure.Height); // Verify the offset for the third row AssertArranged(view2, 0, 100, 100, 100); } [Category(GridSpacing, GridAutoSizing)] [Fact(DisplayName = "Empty rows should still count for row spacing")] public void RowSpacingForEmptyRows() { var grid = CreateGridLayout(rows: "100, auto, 100", rowSpacing: 10); var view0 = CreateTestView(new Size(100, 100)); var view2 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view2); SetLocation(grid, view0); SetLocation(grid, view2, row: 2); var measure = MeasureAndArrange(grid, double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(100 + 100 + 10 + 10, measure.Height); AssertArranged(view2, new Rect(0, 100 + 10 + 10, 100, 100)); } [Category(GridSpacing, GridAutoSizing)] [Fact(DisplayName = "Auto rows with collapsed views should still count for row spacing")] public void RowSpacingForAutoRowsWithCollapsedViews() { var grid = CreateGridLayout(rows: "100, auto, 100", rowSpacing: 10); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(100, 100)); var view2 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view2); SetLocation(grid, view0); SetLocation(grid, view1, row: 1); SetLocation(grid, view2, row: 2); view1.Visibility.Returns(Visibility.Collapsed); var measure = MeasureAndArrange(grid, double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(100 + 100 + 10 + 10, measure.Height); AssertArranged(view2, new Rect(0, 100 + 10 + 10, 100, 100)); } [Category(GridSpacing)] [Fact(DisplayName = "Column spacing shouldn't affect a single-column grid")] public void SingleColumnIgnoresColumnSpacing() { var grid = CreateGridLayout(colSpacing: 10); var view = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); MeasureAndArrangeAuto(grid); AssertArranged(view, 0, 0, 100, 100); } [Category(GridSpacing)] [Fact(DisplayName = "Two columns should include the column spacing once")] public void TwoColumnsWithSpacing() { var grid = CreateGridLayout(columns: "100, 100", colSpacing: 10); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); MeasureAndArrangeAuto(grid); AssertArranged(view0, 0, 0, 100, 100); // With column width 100 and spacing of 10, we expect the second column to start at 110 AssertArranged(view1, 110, 0, 100, 100); } [Category(GridSpacing)] [Fact(DisplayName = "Measure should include column spacing")] public void MeasureTwoColumnsWithSpacing() { var grid = CreateGridLayout(columns: "100, 100", colSpacing: 10); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); var manager = new GridLayoutManager(grid); var measure = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(100 + 100 + 10, measure.Width); } [Category(GridAutoSizing)] [Fact(DisplayName = "Auto columns without content have width zero")] public void EmptyAutoColumnsHaveNoWidth() { var grid = CreateGridLayout(columns: "100, auto, 100"); var view0 = CreateTestView(new Size(100, 100)); var view2 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view2); SetLocation(grid, view0); SetLocation(grid, view2, col: 2); var manager = new GridLayoutManager(grid); var measure = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); manager.ArrangeChildren(new Rect(0, 0, measure.Width, measure.Height)); // Because the auto column has no content, we expect it to have width zero Assert.Equal(100 + 100, measure.Width); // Verify the offset for the third column AssertArranged(view2, 100, 0, 100, 100); } [Category(GridSpacing, GridAutoSizing)] [Fact(DisplayName = "Empty columns still count for column spacing")] public void ColumnSpacingForEmptyColumns() { var grid = CreateGridLayout(columns: "auto, auto, auto", colSpacing: 10); var view0 = CreateTestView(new Size(100, 100)); var view2 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view2); SetLocation(grid, view0); SetLocation(grid, view2, col: 2); var measure = MeasureAndArrange(grid, double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(100 + 100 + 10 + 10, measure.Width); AssertArranged(view2, new Rect(100 + 10 + 10, 0, 100, 100)); } [Category(GridSpacing, GridAutoSizing)] [Fact(DisplayName = "Auto columns with collapsed views should still count for column spacing")] public void AutoColumnsWithCollapsedViews() { var grid = CreateGridLayout(columns: "100, auto, 100", colSpacing: 10); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(100, 100)); var view2 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view2); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); SetLocation(grid, view2, col: 2); view1.Visibility.Returns(Visibility.Collapsed); var measure = MeasureAndArrange(grid, double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(100 + 100 + 10 + 10, measure.Width); AssertArranged(view2, new Rect(100 + 10 + 10, 0, 100, 100)); } [Category(GridSpan)] [Fact(DisplayName = "Simple row spanning")] public void ViewSpansRows() { var grid = CreateGridLayout(rows: "auto, auto"); var view0 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0); SetLocation(grid, view0, rowSpan: 2); var measuredSize = MeasureAndArrangeAuto(grid); AssertArranged(view0, 0, 0, 100, 100); Assert.Equal(100, measuredSize.Width); // We expect the rows to each get half the view height Assert.Equal(100, measuredSize.Height); } [Category(GridSpan)] [Fact(DisplayName = "Simple row spanning with multiple views")] public void ViewSpansRowsWhenOtherViewsPresent() { var grid = CreateGridLayout(rows: "auto, auto", columns: "auto, auto"); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(50, 50)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, rowSpan: 2); SetLocation(grid, view1, row: 1, col: 1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(100 + 50, measuredSize.Width); Assert.Equal(100, measuredSize.Height); AssertArranged(view0, 0, 0, 100, 100); AssertArranged(view1, 100, 25, 50, 75); } [Category(GridSpacing, GridSpan)] [Fact(DisplayName = "Row spanning with row spacing")] public void RowSpanningShouldAccountForSpacing() { var grid = CreateGridLayout(rows: "auto, auto", columns: "auto, auto", rowSpacing: 5); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(50, 50)); var view2 = CreateTestView(new Size(50, 50)); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0, rowSpan: 2); SetLocation(grid, view1, row: 0, col: 1); SetLocation(grid, view2, row: 1, col: 1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(150, measuredSize.Width); Assert.Equal(50 + 50 + 5, measuredSize.Height); // Starts a Y = 0 AssertArranged(view1, 100, 0, 50, 50); // Starts at the first row's height + the row spacing value, so Y = 50 + 5 = 55 AssertArranged(view2, 100, 55, 50, 50); // We expect the height for the view spanning the rows to include the space between the rows, // so 50 + 5 + 50 = 105 AssertArranged(view0, 0, 0, 100, 105); } [Category(GridSpan)] [Fact(DisplayName = "Simple column spanning with multiple views")] public void ViewSpansColumnsWhenOtherViewsPresent() { var grid = CreateGridLayout(rows: "auto, auto", columns: "auto, auto"); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(50, 50)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, colSpan: 2); SetLocation(grid, view1, row: 1, col: 1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(100, measuredSize.Width); Assert.Equal(100 + 50, measuredSize.Height); AssertArranged(view0, 0, 0, 100, 100); AssertArranged(view1, 25, 100, 75, 50); } [Category(GridSpan)] [Category(GridSpacing)] [Fact(DisplayName = "Column spanning with column spacing")] public void ColumnSpanningShouldAccountForSpacing() { var grid = CreateGridLayout(rows: "auto, auto", columns: "auto, auto", colSpacing: 5); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(50, 50)); var view2 = CreateTestView(new Size(50, 50)); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0, colSpan: 2); SetLocation(grid, view1, row: 1, col: 0); SetLocation(grid, view2, row: 1, col: 1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(50 + 50 + 5, measuredSize.Width); Assert.Equal(100 + 50, measuredSize.Height); // Starts a X = 0 AssertArranged(view1, 0, 100, 50, 50); // Starts at the first column's width + the column spacing, so X = 50 + 5 = 55 AssertArranged(view2, 55, 100, 50, 50); // We expect the width for the view spanning the columns to include the space between the columns, // so 50 + 5 + 50 = 105 AssertArranged(view0, 0, 0, 105, 100); } [Category(GridSpan)] [Fact(DisplayName = "Row-spanning views smaller than the views confined to the row should not affect row size")] public void SmallerSpanningViewsShouldNotAffectRowSize() { var grid = CreateGridLayout(rows: "auto, auto", columns: "auto, auto"); var view0 = CreateTestView(new Size(30, 30)); var view1 = CreateTestView(new Size(50, 50)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, rowSpan: 2); SetLocation(grid, view1, row: 0, col: 1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(30 + 50, measuredSize.Width); Assert.Equal(50, measuredSize.Height); AssertArranged(view0, 0, 0, 30, 50); AssertArranged(view1, 30, 0, 50, 50); } [Category(GridSpan)] [Fact(DisplayName = "Column-spanning views smaller than the views confined to the column should not affect column size")] public void SmallerSpanningViewsShouldNotAffectColumnSize() { var grid = CreateGridLayout(rows: "auto, auto", columns: "auto, auto"); var view0 = CreateTestView(new Size(30, 30)); var view1 = CreateTestView(new Size(50, 50)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, colSpan: 2); SetLocation(grid, view1, row: 1, col: 0); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(50, measuredSize.Width); Assert.Equal(30 + 50, measuredSize.Height); AssertArranged(view0, 0, 0, 50, 30); AssertArranged(view1, 0, 30, 50, 50); } [Category(GridAbsoluteSizing)] [Fact(DisplayName = "Empty absolute rows/columns still affect Grid size")] public void EmptyAbsoluteRowsAndColumnsAffectSize() { var grid = CreateGridLayout(rows: "10, 40", columns: "15, 85"); var view0 = CreateTestView(new Size(30, 30)); SubstituteChildren(grid, view0); SetLocation(grid, view0, row: 1, col: 1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(15 + 85, measuredSize.Width); Assert.Equal(10 + 40, measuredSize.Height); AssertArranged(view0, 15, 10, 85, 40); } [Category(GridSpan)] [Fact(DisplayName = "Row and column spans should be able to mix")] public void MixedRowAndColumnSpans() { var grid = CreateGridLayout(rows: "auto, auto", columns: "auto, auto"); var view0 = CreateTestView(new Size(60, 30)); var view1 = CreateTestView(new Size(30, 60)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, row: 0, col: 0, colSpan: 2); SetLocation(grid, view1, row: 0, col: 1, rowSpan: 2); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(60, measuredSize.Width); Assert.Equal(60, measuredSize.Height); AssertArranged(view0, 0, 0, 60, 45); AssertArranged(view1, 15, 0, 45, 60); } [Category(GridSpan)] [Fact(DisplayName = "Row span including absolute row should not modify absolute size")] public void RowSpanShouldNotModifyAbsoluteRowSize() { var grid = CreateGridLayout(rows: "auto, 20", columns: "auto, auto"); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(50, 10)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, rowSpan: 2); SetLocation(grid, view1, row: 1, col: 1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(100 + 50, measuredSize.Width); Assert.Equal(100, measuredSize.Height); AssertArranged(view0, 0, 0, 100, 100); // The item in the second row starts at y = 80 because the auto row above had to distribute // all the extra space into row 0; row 1 is absolute, so no tinkering with it to make stuff fit AssertArranged(view1, 100, 80, 50, 20); } [Category(GridSpan)] [Fact(DisplayName = "Column span including absolute column should not modify absolute size")] public void ColumnSpanShouldNotModifyAbsoluteColumnSize() { var grid = CreateGridLayout(rows: "auto, auto", columns: "auto, 20"); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(50, 10)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, colSpan: 2); SetLocation(grid, view1, row: 1, col: 1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(100, measuredSize.Width); Assert.Equal(100 + 10, measuredSize.Height); AssertArranged(view0, 0, 0, 100, 100); // The item in the second row starts at x = 80 because the auto column before it had to distribute // all the extra space into column 0; column 1 is absolute, so no tinkering with it to make stuff fit AssertArranged(view1, 80, 100, 20, 10); } [Category(GridSpan)] [Fact] public void CanSpanAbsoluteColumns() { var grid = CreateGridLayout(rows: "auto", columns: "100,100"); var view0 = CreateTestView(new Size(150, 100)); SubstituteChildren(grid, view0); SetLocation(grid, view0, colSpan: 2); var manager = new GridLayoutManager(grid); manager.Measure(200, 100); manager.ArrangeChildren(new Rect(0, 0, 200, 100)); // View should be arranged to span both columns (200 points) AssertArranged(view0, 0, 0, 200, 100); } [Category(GridSpan)] [Fact] public void CanSpanAbsoluteRows() { var grid = CreateGridLayout(rows: "100,100", columns: "auto"); var view0 = CreateTestView(new Size(100, 150)); SubstituteChildren(grid, view0); SetLocation(grid, view0, rowSpan: 2); var manager = new GridLayoutManager(grid); manager.Measure(100, 200); manager.ArrangeChildren(new Rect(0, 0, 100, 200)); // View should be arranged to span both rows (200 points) AssertArranged(view0, 0, 0, 100, 200); } [Category(GridStarSizing)] [Fact(DisplayName = "Single star column consumes all horizontal space")] public void SingleStarColumn() { var screenWidth = 400; var screenHeight = 600; var grid = CreateGridLayout(rows: "auto", columns: $"*"); var view0 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0); SetLocation(grid, view0); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Row height is auto, so it gets the height of the view // Column is *, so it should get the whole width AssertArranged(view0, 0, 0, screenWidth, 100); } [Category(GridStarSizing)] [Fact] public void SingleWeightedStarColumn() { var screenWidth = 400; var screenHeight = 600; var grid = CreateGridLayout(rows: "auto", columns: $"3*"); var view0 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0); SetLocation(grid, view0); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Row height is auto, so it gets the height of the view // The column is 3*, but it's the only column, so it should get the full width AssertArranged(view0, 0, 0, screenWidth, 100); } [Category(GridStarSizing)] [Fact(DisplayName = "Multiple star columns consume equal space")] public void MultipleStarColumns() { var screenWidth = 300; var screenHeight = 600; var viewSize = new Size(50, 50); var grid = CreateGridLayout(rows: "auto", columns: $"*,*,*"); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); var view2 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); SetLocation(grid, view2, col: 2); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Row height is auto, so it gets the height of the view // Columns are *,*,*, so each view should be arranged at 1/3 the width var expectedWidth = screenWidth / 3; var expectedHeight = viewSize.Height; // Make sure that the views in the columns are actually getting measured at the column width, // and not just at the width of the whole grid view1.Received().Measure(Arg.Is<double>(expectedWidth), Arg.Any<double>()); view2.Received().Measure(Arg.Is<double>(expectedWidth), Arg.Any<double>()); AssertArranged(view0, 0, 0, expectedWidth, expectedHeight); AssertArranged(view1, expectedWidth, 0, expectedWidth, expectedHeight); AssertArranged(view2, expectedWidth * 2, 0, expectedWidth, expectedHeight); } [Category(GridStarSizing)] [Fact(DisplayName = "Weighted star column gets proportional space")] public void WeightedStarColumn() { var screenWidth = 300; var screenHeight = 600; var viewSize = new Size(50, 50); var grid = CreateGridLayout(rows: "auto", columns: $"*,2*"); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Row height is auto, so it gets the height of the view // First column should get 1/3 of the width, second should get 2/3 var expectedWidth0 = screenWidth / 3; var expectedWidth1 = expectedWidth0 * 2; var expectedHeight = viewSize.Height; AssertArranged(view0, 0, 0, expectedWidth0, expectedHeight); AssertArranged(view1, expectedWidth0, 0, expectedWidth1, expectedHeight); } [Category(GridStarSizing)] [Fact(DisplayName = "Totally empty star columns measured at infinite width have zero width")] public void EmptyStarColumnInfiniteWidthMeasure() { var grid = CreateGridLayout(rows: "auto", columns: $"*"); var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(0, measuredSize.Width); } [Category(GridStarSizing)] [Fact(DisplayName = "Single star column with a view measured at infinite width gets width of the view")] public void StarColumnWithViewInfiniteWidthMeasure() { var grid = CreateGridLayout(rows: "auto", columns: $"*"); var view0 = CreateTestView(new Size(100, 50)); SubstituteChildren(grid, view0); SetLocation(grid, view0); var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(100, measuredSize.Width); Assert.Equal(50, measuredSize.Height); } [Category(GridStarSizing)] [Fact(DisplayName = "Single star row consumes all vertical space")] public void SingleStarRow() { var screenWidth = 400; var screenHeight = 600; var grid = CreateGridLayout(rows: "*", columns: "auto"); var view0 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0); SetLocation(grid, view0); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Column width is auto, so it gets the width of the view // Row is *, so it should get the whole height AssertArranged(view0, 0, 0, 100, screenHeight); } [Category(GridStarSizing)] [Fact] public void SingleWeightedStarRow() { var screenWidth = 400; var screenHeight = 600; var grid = CreateGridLayout(rows: "3*", columns: "auto"); var view0 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0); SetLocation(grid, view0); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Column width is auto, so it gets the width of the view // The row is 3*, but it's the only row, so it should get the full height AssertArranged(view0, 0, 0, 100, screenHeight); } [Category(GridStarSizing)] [Fact(DisplayName = "Multiple star rows consume equal space")] public void MultipleStarRows() { var screenWidth = 300; var screenHeight = 600; var viewSize = new Size(50, 50); var grid = CreateGridLayout(rows: "*,*,*", columns: "auto"); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); var view2 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0); SetLocation(grid, view1, row: 1); SetLocation(grid, view2, row: 2); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Column width is auto, so it gets the width of the view // Rows are *,*,*, so each view should be arranged at 1/3 the height var expectedHeight = screenHeight / 3; var expectedWidth = viewSize.Width; AssertArranged(view0, 0, 0, expectedWidth, expectedHeight); AssertArranged(view1, 0, expectedHeight, expectedWidth, expectedHeight); AssertArranged(view2, 0, expectedHeight * 2, expectedWidth, expectedHeight); } [Category(GridStarSizing)] [Fact(DisplayName = "Weighted star row gets proportional space")] public void WeightedStarRow() { var screenWidth = 300; var screenHeight = 600; var viewSize = new Size(50, 50); var grid = CreateGridLayout(rows: "*,2*", columns: "auto"); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, row: 1); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Column width is auto, so it gets the width of the view // First row should get 1/3 of the height, second should get 2/3 var expectedHeight0 = screenHeight / 3; var expectedHeight1 = expectedHeight0 * 2; var expectedWidth = viewSize.Width; AssertArranged(view0, 0, 0, expectedWidth, expectedHeight0); AssertArranged(view1, 0, expectedHeight0, expectedWidth, expectedHeight1); } [Category(GridStarSizing)] [Fact(DisplayName = "Totally empty star rows measured at infinite height have zero height")] public void EmptyStarRowInfiniteHeightMeasure() { var grid = CreateGridLayout(rows: "*", columns: $"auto"); var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(0, measuredSize.Height); } [Category(GridStarSizing)] [Fact(DisplayName = "Single star row with a view measured at infinite height gets height of the view")] public void StarRowWithViewInfiniteHeightMeasure() { var grid = CreateGridLayout(rows: "*", columns: $"auto"); var view0 = CreateTestView(new Size(100, 50)); SubstituteChildren(grid, view0); SetLocation(grid, view0); var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(100, measuredSize.Width); Assert.Equal(50, measuredSize.Height); } [Category(GridAbsoluteSizing)] [Category(GridStarSizing)] [Fact] public void MixStarsAndExplicitSizes() { var screenWidth = 300; var screenHeight = 600; var viewSize = new Size(50, 50); var grid = CreateGridLayout(rows: "auto", columns: $"3*,100,*"); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); var view2 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); SetLocation(grid, view2, col: 2); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Row height is auto, so it gets the height of the view // Columns are 3*,100,* // So we expect the center column to be 100, leaving 200 for the stars // 3/4 of that goes to the first column, so 150; the remaining 50 is the last column var expectedStarWidth = (screenWidth - 100) / 4; var expectedHeight = viewSize.Height; AssertArranged(view0, 0, 0, expectedStarWidth * 3, expectedHeight); AssertArranged(view1, expectedStarWidth * 3, 0, 100, expectedHeight); AssertArranged(view2, (expectedStarWidth * 3) + 100, 0, expectedStarWidth, expectedHeight); } [Fact] public void UsesImpliedRowAndColumnIfNothingDefined() { var grid = CreateGridLayout(); var view0 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0); SetLocation(grid, view0); // Using 300,300 - the implied row/column are GridLength.Star MeasureAndArrangeFixed(grid, 300, 300); // Since it's using GridLength.Star, we expect the view to be arranged at the full size of the grid AssertArranged(view0, 0, 0, 300, 300); } [Fact] public void IgnoresCollapsedViews() { var view = LayoutTestHelpers.CreateTestView(new Size(100, 100)); var collapsedView = LayoutTestHelpers.CreateTestView(new Size(100, 100)); collapsedView.Visibility.Returns(Visibility.Collapsed); var grid = CreateGridLayout(children: new List<IView>() { view, collapsedView }); var manager = new GridLayoutManager(grid); var measure = manager.Measure(100, double.PositiveInfinity); manager.ArrangeChildren(new Rect(Point.Zero, measure)); // View is visible, so we expect it to be measured and arranged view.Received().Measure(Arg.Any<double>(), Arg.Any<double>()); view.Received().Arrange(Arg.Any<Rect>()); // View is collapsed, so we expect it not to be measured or arranged collapsedView.DidNotReceive().Measure(Arg.Any<double>(), Arg.Any<double>()); collapsedView.DidNotReceive().Arrange(Arg.Any<Rect>()); } [Fact] public void DoesNotIgnoreHiddenViews() { var view = LayoutTestHelpers.CreateTestView(new Size(100, 100)); var hiddenView = LayoutTestHelpers.CreateTestView(new Size(100, 100)); hiddenView.Visibility.Returns(Visibility.Hidden); var grid = CreateGridLayout(children: new List<IView>() { view, hiddenView }); var manager = new GridLayoutManager(grid); var measure = manager.Measure(100, double.PositiveInfinity); manager.ArrangeChildren(new Rect(Point.Zero, measure)); // View is visible, so we expect it to be measured and arranged view.Received().Measure(Arg.Any<double>(), Arg.Any<double>()); view.Received().Arrange(Arg.Any<Rect>()); // View is hidden, so we expect it to be measured and arranged (since it'll need to take up space) hiddenView.Received().Measure(Arg.Any<double>(), Arg.Any<double>()); hiddenView.Received().Arrange(Arg.Any<Rect>()); } IGridLayout BuildPaddedGrid(Thickness padding, double viewWidth, double viewHeight) { var grid = CreateGridLayout(); var view = CreateTestView(new Size(viewWidth, viewHeight)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.Padding.Returns(padding); return grid; } [Theory] [InlineData(0, 0, 0, 0)] [InlineData(10, 10, 10, 10)] [InlineData(10, 0, 10, 0)] [InlineData(0, 10, 0, 10)] [InlineData(23, 5, 3, 15)] public void MeasureAccountsForPadding(double left, double top, double right, double bottom) { var viewWidth = 100d; var viewHeight = 100d; var padding = new Thickness(left, top, right, bottom); var expectedHeight = padding.VerticalThickness + viewHeight; var expectedWidth = padding.HorizontalThickness + viewWidth; var grid = BuildPaddedGrid(padding, viewWidth, viewHeight); var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(expectedHeight, measuredSize.Height); Assert.Equal(expectedWidth, measuredSize.Width); } [Theory] [InlineData(0, 0, 0, 0)] [InlineData(10, 10, 10, 10)] [InlineData(10, 0, 10, 0)] [InlineData(0, 10, 0, 10)] [InlineData(23, 5, 3, 15)] public void ArrangeAccountsForPadding(double left, double top, double right, double bottom) { var viewWidth = 100d; var viewHeight = 100d; var padding = new Thickness(left, top, right, bottom); var grid = BuildPaddedGrid(padding, viewWidth, viewHeight); var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); manager.ArrangeChildren(new Rect(Point.Zero, measuredSize)); AssertArranged(grid[0], padding.Left, padding.Top, viewWidth, viewHeight); } [Category(GridStarSizing)] [Fact] public void StarValuesAreAutoWhenConstraintsAreInfinite() { // A one-row, one-column grid var grid = CreateGridLayout(); // A 100x100 IView var view = CreateTestView(new Size(100, 100)); // Set up the grid to have a single child SubstituteChildren(grid, view); // Set up the row/column values and spans SetLocation(grid, view); var size = MeasureAndArrangeAuto(grid); Assert.Equal(100, size.Width); Assert.Equal(100, size.Height); } [Category(GridAbsoluteSizing)] [Fact] public void GridMeasureShouldUseExplicitHeight() { var grid = CreateGridLayout(); var view = CreateTestView(new Size(10, 10)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.Height.Returns(50); var gridLayoutManager = new GridLayoutManager(grid); var measure = gridLayoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(50, measure.Height); } [Category(GridAbsoluteSizing)] [Fact] public void GridMeasureShouldUseExplicitWidth() { var grid = CreateGridLayout(); var view = CreateTestView(new Size(10, 10)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.Width.Returns(50); var gridLayoutManager = new GridLayoutManager(grid); var measure = gridLayoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(50, measure.Width); } [Theory] // at 0, 0 [InlineData(1, 1, 0, 0, 0, 0)] [InlineData(1, 2, 0, 0, 0, 0)] [InlineData(2, 1, 0, 0, 0, 0)] [InlineData(2, 2, 0, 0, 0, 0)] // at 1, 0 [InlineData(1, 1, 1, 0, 0, 0)] [InlineData(1, 2, 1, 0, 0, 0)] [InlineData(2, 1, 1, 0, 1, 0)] [InlineData(2, 2, 1, 0, 1, 0)] // at 0, 1 [InlineData(1, 1, 0, 1, 0, 0)] [InlineData(1, 2, 0, 1, 0, 1)] [InlineData(2, 1, 0, 1, 0, 0)] [InlineData(2, 2, 0, 1, 0, 1)] // at 1, 1 [InlineData(1, 1, 1, 1, 0, 0)] [InlineData(1, 2, 1, 1, 0, 1)] [InlineData(2, 1, 1, 1, 1, 0)] [InlineData(2, 2, 1, 1, 1, 1)] public void ViewOutsideRowsAndColsClampsToGrid(int rows, int cols, int row, int col, int actualRow, int actualCol) { var r = string.Join(",", Enumerable.Repeat("100", rows)); var c = string.Join(",", Enumerable.Repeat("100", cols)); var grid = CreateGridLayout(rows: r, columns: c); var view0 = CreateTestView(new Size(10, 10)); SubstituteChildren(grid, view0); SetLocation(grid, view0, row, col); MeasureAndArrangeFixed(grid, 100 * cols, 100 * rows); AssertArranged(view0, 100 * actualCol, 100 * actualRow, 100, 100); } [Theory] // normal [InlineData(0, 0, 1, 1, 0, 0, 1, 1)] [InlineData(1, 1, 1, 1, 1, 1, 1, 1)] [InlineData(1, 1, 2, 1, 1, 1, 2, 1)] // negative origin [InlineData(-1, 0, 1, 1, 0, 0, 1, 1)] [InlineData(0, -1, 1, 1, 0, 0, 1, 1)] [InlineData(-1, -1, 1, 1, 0, 0, 1, 1)] // negative span [InlineData(1, 1, -1, 0, 1, 1, 1, 1)] [InlineData(1, 1, 0, -1, 1, 1, 1, 1)] [InlineData(1, 1, -1, -1, 1, 1, 1, 1)] // positive origin [InlineData(5, 0, 1, 1, 3, 0, 1, 1)] [InlineData(0, 5, 1, 1, 0, 3, 1, 1)] [InlineData(5, 5, 1, 1, 3, 3, 1, 1)] // positive span [InlineData(0, 0, 1, 5, 0, 0, 1, 4)] [InlineData(0, 0, 5, 1, 0, 0, 4, 1)] [InlineData(0, 0, 5, 5, 0, 0, 4, 4)] // normal origin + positive span [InlineData(1, 1, 1, 5, 1, 1, 1, 3)] [InlineData(1, 1, 5, 1, 1, 1, 3, 1)] [InlineData(1, 1, 5, 5, 1, 1, 3, 3)] // positive origin + positive span [InlineData(5, 5, 1, 5, 3, 3, 1, 1)] [InlineData(5, 5, 5, 1, 3, 3, 1, 1)] [InlineData(5, 5, 5, 5, 3, 3, 1, 1)] public void SpansOutsideRowsAndColsClampsToGrid(int row, int col, int rowSpan, int colSpan, int actualRow, int actualCol, int actualRowSpan, int actualColSpan) { const int GridSize = 4; var r = string.Join(",", Enumerable.Repeat("100", GridSize)); var c = string.Join(",", Enumerable.Repeat("100", GridSize)); var grid = CreateGridLayout(rows: r, columns: c); var view0 = CreateTestView(new Size(10, 10)); SubstituteChildren(grid, view0); SetLocation(grid, view0, row, col, rowSpan, colSpan); MeasureAndArrangeFixed(grid, 100 * GridSize, 100 * GridSize); AssertArranged( view0, 100 * actualCol, 100 * actualRow, 100 * actualColSpan, 100 * actualRowSpan); } [Fact] public void ArrangeRespectsBounds() { var grid = CreateGridLayout(); var view = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); var measure = MeasureAndArrange(grid, double.PositiveInfinity, double.PositiveInfinity, 10, 15); var expectedRectangle = new Rect(10, 15, measure.Width, measure.Height); view.Received().Arrange(Arg.Is(expectedRectangle)); } [Category(GridAbsoluteSizing)] [Theory] [InlineData(50, 100, 50)] [InlineData(100, 100, 100)] [InlineData(100, 50, 50)] [InlineData(0, 50, 0)] [InlineData(-1, 50, 50)] public void MeasureRespectsMaxHeight(double maxHeight, double viewHeight, double expectedHeight) { var grid = CreateGridLayout(); var view = CreateTestView(new Size(100, viewHeight)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.MaximumHeight.Returns(maxHeight); var layoutManager = new GridLayoutManager(grid); var measure = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(expectedHeight, measure.Height); } [Category(GridAbsoluteSizing)] [Theory] [InlineData(50, 100, 50)] [InlineData(100, 100, 100)] [InlineData(100, 50, 50)] [InlineData(0, 50, 0)] [InlineData(-1, 50, 50)] public void MeasureRespectsMaxWidth(double maxWidth, double viewWidth, double expectedWidth) { var grid = CreateGridLayout(); var view = CreateTestView(new Size(viewWidth, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.MaximumWidth.Returns(maxWidth); var layoutManager = new GridLayoutManager(grid); var measure = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(expectedWidth, measure.Width); } [Category(GridAbsoluteSizing)] [Theory] [InlineData(50, 10, 50)] [InlineData(100, 100, 100)] [InlineData(10, 50, 50)] [InlineData(-1, 50, 50)] public void MeasureRespectsMinHeight(double minHeight, double viewHeight, double expectedHeight) { var grid = CreateGridLayout(); var view = CreateTestView(new Size(100, viewHeight)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.MinimumHeight.Returns(minHeight); var layoutManager = new GridLayoutManager(grid); var measure = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(expectedHeight, measure.Height); } [Theory] [InlineData("*", "*")] [InlineData("auto", "auto")] public void MeasureRespectsLargestChildMinimumSize(string columns, string rows) { var grid = CreateGridLayout(columns: columns, rows: rows); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(200, 200)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1); var layoutManager = new GridLayoutManager(grid); var measure = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(200, measure.Height); Assert.Equal(200, measure.Width); } [Category(GridAbsoluteSizing)] [Theory] [InlineData(50, 10, 50)] [InlineData(100, 100, 100)] [InlineData(10, 50, 50)] [InlineData(-1, 50, 50)] public void MeasureRespectsMinWidth(double minWidth, double viewWidth, double expectedWidth) { var grid = CreateGridLayout(); var view = CreateTestView(new Size(viewWidth, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.MinimumWidth.Returns(minWidth); var layoutManager = new GridLayoutManager(grid); var measure = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); Assert.Equal(expectedWidth, measure.Width); } [Fact] [Category(GridAbsoluteSizing)] public void MaxWidthDominatesWidth() { var grid = CreateGridLayout(); var view = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.Width.Returns(75); grid.MaximumWidth.Returns(50); var layoutManager = new GridLayoutManager(grid); var measure = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); // The maximum value beats out the explicit value Assert.Equal(50, measure.Width); } [Fact] [Category(GridAbsoluteSizing)] public void MinWidthDominatesMaxWidth() { var grid = CreateGridLayout(); var view = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.MinimumWidth.Returns(75); grid.MaximumWidth.Returns(50); var layoutManager = new GridLayoutManager(grid); var measure = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); // The minimum value should beat out the maximum value Assert.Equal(75, measure.Width); } [Fact] [Category(GridAbsoluteSizing)] public void MaxHeightDominatesHeight() { var grid = CreateGridLayout(); var view = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.Height.Returns(75); grid.MaximumHeight.Returns(50); var layoutManager = new GridLayoutManager(grid); var measure = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); // The maximum value beats out the explicit value Assert.Equal(50, measure.Height); } [Fact] [Category(GridAbsoluteSizing)] public void MinHeightDominatesMaxHeight() { var grid = CreateGridLayout(); var view = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.MinimumHeight.Returns(75); grid.MaximumHeight.Returns(50); var layoutManager = new GridLayoutManager(grid); var measure = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); // The minimum value should beat out the maximum value Assert.Equal(75, measure.Height); } [Theory] [InlineData(100, 200, 210, 200)] [InlineData(200, 100, 210, 200)] [InlineData(100, 100, 210, 100)] [InlineData(100, 100, 100, 100)] public void AutoCellsSizeToLargestView(double view0Size, double view1Size, double constraintSize, double expectedSize) { var grid = CreateGridLayout(rows: "Auto", columns: "Auto"); // Simulate views which size to their constraints but max out at a certain size var view0 = CreateTestView(); view0.Measure(Arg.Any<double>(), Arg.Any<double>()).Returns( (args) => new Size((double)args[0] >= view0Size ? view0Size : (double)args[0], (double)args[1] >= view0Size ? view0Size : (double)args[1])); var view1 = CreateTestView(); view1.Measure(Arg.Any<double>(), Arg.Any<double>()).Returns( (args) => new Size((double)args[0] >= view1Size ? view1Size : (double)args[0], (double)args[1] >= view1Size ? view1Size : (double)args[1])); SubstituteChildren(grid, view0, view1); // Put both views in row/column 0/0 SetLocation(grid, view0); SetLocation(grid, view1); MeasureAndArrangeFixed(grid, constraintSize, constraintSize); var expectedRectangle = new Rect(0, 0, expectedSize, expectedSize); // We expect the Auto row/col to take on the size of the largest of the two views AssertArranged(view0, expectedRectangle); AssertArranged(view1, expectedRectangle); } [Fact] public void ArrangeAccountsForFill() { var grid = CreateGridLayout(); var view = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view); SetLocation(grid, view); grid.HorizontalLayoutAlignment.Returns(Primitives.LayoutAlignment.Fill); grid.VerticalLayoutAlignment.Returns(Primitives.LayoutAlignment.Fill); var layoutManager = new GridLayoutManager(grid); _ = layoutManager.Measure(double.PositiveInfinity, double.PositiveInfinity); var arrangedWidth = 1000; var arrangedHeight = 1000; var target = new Rect(Point.Zero, new Size(arrangedWidth, arrangedHeight)); var actual = layoutManager.ArrangeChildren(target); // Since we're arranging in a space larger than needed and the layout is set to Fill in both directions, // we expect the returned actual arrangement size to be as large as the target space Assert.Equal(arrangedWidth, actual.Width); Assert.Equal(arrangedHeight, actual.Height); } [Category(GridStarSizing)] [Fact(DisplayName = "We can specify fractional star sizes for columns")] public void FractionalStarColumns() { var screenWidth = 300; var screenHeight = 600; var viewSize = new Size(50, 50); var grid = CreateGridLayout(rows: "auto", columns: $"*,0.5*,0.5*"); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); var view2 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); SetLocation(grid, view2, col: 2); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Row height is auto, so it gets the height of the view var expectedHeight = viewSize.Height; // Columns are *,0.5*,0.5*, so the first column should be half the space // and the other two columns should be a quarter of the space var expectedWidthColumn0 = screenWidth / 2; var expectedWidthOthers = screenWidth / 4; // Make sure that the views in the columns are actually getting measured at the column width, // and not just at the width of the whole grid view0.Received().Measure(Arg.Is<double>(expectedWidthColumn0), Arg.Any<double>()); view1.Received().Measure(Arg.Is<double>(expectedWidthOthers), Arg.Any<double>()); view2.Received().Measure(Arg.Is<double>(expectedWidthOthers), Arg.Any<double>()); AssertArranged(view0, 0, 0, expectedWidthColumn0, expectedHeight); AssertArranged(view1, expectedWidthColumn0, 0, expectedWidthOthers, expectedHeight); AssertArranged(view2, expectedWidthColumn0 + expectedWidthOthers, 0, expectedWidthOthers, expectedHeight); } [Category(GridStarSizing)] [Fact(DisplayName = "We can specify fractional star sizes for rows")] public void FractionalStarRows() { var screenWidth = 300; var screenHeight = 600; var viewSize = new Size(50, 50); var grid = CreateGridLayout(rows: "*,0.5*,0.5*", columns: "auto"); var view0 = CreateTestView(viewSize); var view1 = CreateTestView(viewSize); var view2 = CreateTestView(viewSize); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0); SetLocation(grid, view1, row: 1); SetLocation(grid, view2, row: 2); MeasureAndArrangeFixed(grid, screenWidth, screenHeight); // Column width is auto, so it gets the width of the view var expectedWidth = viewSize.Width; // Rows are *,0.5*,0.5*, so row 0 should be half the screen height // And the other rows should be one quarter the screen height var expectedHeightRow0 = screenHeight / 2; var expectedHeightOther = screenHeight / 4; // Make sure that the views in the columns are actually getting measured at the column width, // and not just at the width of the whole grid view0.Received().Measure(Arg.Any<double>(), Arg.Is<double>(expectedHeightRow0)); view1.Received().Measure(Arg.Any<double>(), Arg.Is<double>(expectedHeightOther)); view2.Received().Measure(Arg.Any<double>(), Arg.Is<double>(expectedHeightOther)); AssertArranged(view0, 0, 0, expectedWidth, expectedHeightRow0); AssertArranged(view1, 0, expectedHeightRow0, expectedWidth, expectedHeightOther); AssertArranged(view2, 0, expectedHeightRow0 + expectedHeightOther, expectedWidth, expectedHeightOther); } [Category(GridSpacing, GridStarSizing)] [Fact("Star columns don't appropriate column spacing during measurement")] public void StarColumnMeasureDoesNotIncludeSpacing() { var colSpacing = 10; var grid = CreateGridLayout(columns: "100, *, 100", colSpacing: colSpacing); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, col: 2); var manager = new GridLayoutManager(grid); var widthConstraint = 100 + colSpacing + 100 + colSpacing + 100; var measure = manager.Measure(widthConstraint, double.PositiveInfinity); var expectedWidthMeasure = 100 + colSpacing + 0 + colSpacing + 100; Assert.Equal(expectedWidthMeasure, measure.Width); manager.ArrangeChildren(new Rect(0, 0, widthConstraint, measure.Height)); AssertArranged(view0, new Rect(0, 0, 100, 100)); AssertArranged(view1, new Rect(220, 0, 100, 100)); } [Category(GridSpacing, GridStarSizing)] [Fact("Star rows don't appropriate row spacing during measurement")] public void StarRowMeasureDoesNotIncludeSpacing() { var rowSpacing = 10; var grid = CreateGridLayout(rows: "100, *, 100", rowSpacing: rowSpacing); var view0 = CreateTestView(new Size(100, 100)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0); SetLocation(grid, view1, row: 2); var manager = new GridLayoutManager(grid); var heightConstraint = 100 + rowSpacing + 100 + rowSpacing + 100; var measure = manager.Measure(double.PositiveInfinity, heightConstraint); var expectedHeightMeasure = 100 + rowSpacing + 0 + rowSpacing + 100; Assert.Equal(expectedHeightMeasure, measure.Height); manager.ArrangeChildren(new Rect(0, 0, measure.Width, heightConstraint)); AssertArranged(view0, new Rect(0, 0, 100, 100)); AssertArranged(view1, new Rect(0, 220, 100, 100)); } [Fact] [Category(GridStarSizing), Category(GridAutoSizing)] public void AutoStarColumnSpansDoNotAffectAutoColumnSize() { var grid = CreateGridLayout(rows: "Auto, *", columns: "Auto, *"); var view0 = CreateTestView(new Size(10, 10)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, col: 1); SetLocation(grid, view1, row: 1, colSpan: 2); MeasureAndArrangeFixed(grid, 200, 200); // We expect that column 0 has no width, so view0 will be arranged at 0,0 AssertArranged(view0, 0, 0, 200, 10); // Since column 0 has no width, we expect view1 to start at 0,10 AssertArranged(view1, 0, 10, 200, 190); } [Fact] [Category(GridAbsoluteSizing)] public void AbsoluteRowsConstrainMeasureHeight() { var grid = CreateGridLayout(rows: "50"); var viewSize = new Size(10, 10); var view0 = CreateTestView(viewSize); SubstituteChildren(grid, view0); // Assuming no constraints on space MeasureAndArrangeAuto(grid); // Verify that the view is getting measured at the appropriate height (50) view0.Received().Measure(Arg.Any<double>(), Arg.Is<double>(50)); // And only at that height view0.DidNotReceive().Measure(Arg.Any<double>(), Arg.Is<double>((h) => h != 50)); } [Fact] [Category(GridStarSizing), Category(GridAutoSizing)] public void AutoStarRowSpansDoNotAffectAutoRowSize() { var grid = CreateGridLayout(rows: "Auto, *", columns: "Auto, *"); var view0 = CreateTestView(new Size(10, 10)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, row: 1); SetLocation(grid, view1, col: 1, rowSpan: 2); MeasureAndArrangeFixed(grid, 200, 200); // We expect that row 0 has no height, so view0 will be arranged at 0,0 AssertArranged(view0, 0, 0, 10, 200); // Since row 0 has no height, we expect view1 to start at 10,0 AssertArranged(view1, 10, 0, 190, 200); } [Fact] [Category(GridAbsoluteSizing)] public void AbsoluteColumnsConstrainMeasureWidth() { var grid = CreateGridLayout(columns: "50"); var viewSize = new Size(10, 10); var view0 = CreateTestView(viewSize); SubstituteChildren(grid, view0); // Assuming no constraints on space MeasureAndArrangeAuto(grid); // Verify that the view is getting measured at the appropriate width (50) view0.Received().Measure(Arg.Is<double>(50), Arg.Any<double>()); // And only at that width view0.DidNotReceive().Measure(Arg.Is<double>((h) => h != 50), Arg.Any<double>()); } [Fact("Children of Auto rows should be measured using an infinite height")] [Category(GridAutoSizing)] public void AutoRowsMeasureChildrenAtInfinity() { var grid = CreateGridLayout(rows: "Auto"); var viewSize = new Size(10, 10); var view0 = CreateTestView(viewSize); SubstituteChildren(grid, view0); MeasureAndArrangeFixed(grid, 500, 500); // Verify that the view is getting measured at the appropriate height (infinity) view0.Received().Measure(Arg.Is<double>(500), Arg.Is(double.PositiveInfinity)); } [Fact("Children of Auto columns should be measured using an infinite width")] [Category(GridAutoSizing)] public void AutoColumnsMeasureChildrenAtInfinity() { var grid = CreateGridLayout(columns: "Auto"); var viewSize = new Size(10, 10); var view0 = CreateTestView(viewSize); SubstituteChildren(grid, view0); MeasureAndArrangeFixed(grid, 500, 500); // Verify that the view is getting measured at the appropriate width (infinity) view0.Received().Measure(Arg.Is(double.PositiveInfinity), Arg.Is<double>(500)); } [Fact("Star Row Height is correct when the first child is Collapsed")] [Category(GridStarSizing)] public void StarRowHeightCorrectWhenFirstChildCollapsed() { var grid = CreateGridLayout(rows: "*"); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(10, 10)); // Since this is collapsed, it should not count toward the star row height view0.Visibility.Returns(Visibility.Collapsed); SubstituteChildren(grid, view0, view1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(10, measuredSize.Height); } [Fact("Star Column Width is correct when the first child is Collapsed")] [Category(GridStarSizing)] public void StarColumnWidthCorrectWhenFirstChildCollapsed() { var grid = CreateGridLayout(columns: "*"); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(10, 10)); // Since this is collapsed, it should not count toward the star column width view0.Visibility.Returns(Visibility.Collapsed); SubstituteChildren(grid, view0, view1); var measuredSize = MeasureAndArrangeAuto(grid); Assert.Equal(10, measuredSize.Width); } [Fact("ArrangeChildren should arrange within measured size")] [Category(GridStarSizing)] public void ArrangeChildrenShouldArrangeWithinMeasuredSize() { var grid = CreateGridLayout(rows: "*"); grid.Width.Returns(105); grid.Height.Returns(120); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); var measuredSize = MeasureAndArrange(grid, 300, double.PositiveInfinity); // we expect that the child will be arranged within measured size // TODO this test might be improperly named, and also the measuredSize.Width should probably be 300 AssertArranged(view0, 0, 0, measuredSize.Width, measuredSize.Height); } [Theory, Category(GridStarSizing)] [InlineData(LayoutAlignment.Center, true)] [InlineData(LayoutAlignment.Center, false)] [InlineData(LayoutAlignment.Start, true)] [InlineData(LayoutAlignment.Start, false)] [InlineData(LayoutAlignment.End, true)] [InlineData(LayoutAlignment.End, false)] [InlineData(LayoutAlignment.Fill, true)] [InlineData(LayoutAlignment.Fill, false)] public void GridMeasuresStarColumnToChildWidth(LayoutAlignment alignment, bool impliedColumn) { string layoutData = impliedColumn ? null : "*"; var grid = CreateGridLayout(columns: layoutData); grid.HorizontalLayoutAlignment.Returns(alignment); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(100, 100); Assert.Equal(20, measuredSize.Width); } [Theory, Category(GridStarSizing)] [InlineData(true, 100)] [InlineData(false, 100)] [InlineData(true, 15)] [InlineData(false, 15)] public void FillGridArrangesStarColumnToWidthConstraint(bool implied, double constraint) { string layoutData = implied ? null : "*"; var grid = CreateGridLayout(columns: layoutData); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); MeasureAndArrangeFixed(grid, constraint, 100); AssertArranged(view0, new Rect(0, 0, constraint, 100)); } [Theory, Category(GridStarSizing)] [InlineData(LayoutAlignment.Center, true)] [InlineData(LayoutAlignment.Center, false)] [InlineData(LayoutAlignment.Start, true)] [InlineData(LayoutAlignment.Start, false)] [InlineData(LayoutAlignment.End, true)] [InlineData(LayoutAlignment.End, false)] public void NonFillGridArrangesStarColumnToChildWidth(LayoutAlignment alignment, bool implied) { string layoutData = implied ? null : "*"; var grid = CreateGridLayout(columns: layoutData); grid.HorizontalLayoutAlignment.Returns(alignment); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); var measure = MeasureAndArrangeFixed(grid, 100, 100); Assert.Equal(20, measure.Height); Assert.Equal(20, measure.Width); AssertArranged(view0, new Rect(0, 0, 20, 100)); } [Theory, Category(GridStarSizing)] [InlineData(LayoutAlignment.Center, true)] [InlineData(LayoutAlignment.Center, false)] [InlineData(LayoutAlignment.Start, true)] [InlineData(LayoutAlignment.Start, false)] [InlineData(LayoutAlignment.End, true)] [InlineData(LayoutAlignment.End, false)] [InlineData(LayoutAlignment.Fill, true)] [InlineData(LayoutAlignment.Fill, false)] public void GridMeasuresStarRowToChildHeight(LayoutAlignment alignment, bool implied) { string layoutData = implied ? null : "*"; var grid = CreateGridLayout(rows: layoutData); grid.VerticalLayoutAlignment.Returns(alignment); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); var manager = new GridLayoutManager(grid); var measuredSize = manager.Measure(100, 100); Assert.Equal(20, measuredSize.Height); } [Theory, Category(GridStarSizing)] [InlineData(true, 100)] [InlineData(false, 100)] [InlineData(true, 15)] [InlineData(false, 15)] public void FillGridArrangesStarRowToHeightConstraint(bool implied, double constraint) { string layoutData = implied ? null : "*"; var grid = CreateGridLayout(rows: layoutData); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); MeasureAndArrangeFixed(grid, 100, constraint); AssertArranged(view0, new Rect(0, 0, 100, constraint)); } [Theory, Category(GridStarSizing)] [InlineData(LayoutAlignment.Center, true)] [InlineData(LayoutAlignment.Center, false)] [InlineData(LayoutAlignment.Start, true)] [InlineData(LayoutAlignment.Start, false)] [InlineData(LayoutAlignment.End, true)] [InlineData(LayoutAlignment.End, false)] public void NonFillGridArrangesStarRowToChildHeight(LayoutAlignment alignment, bool impliedRow) { string layoutData = impliedRow ? null : "*"; var grid = CreateGridLayout(rows: layoutData); grid.VerticalLayoutAlignment.Returns(alignment); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); var measure = MeasureAndArrangeFixed(grid, 100, 100); Assert.Equal(20, measure.Height); Assert.Equal(20, measure.Width); AssertArranged(view0, new Rect(0, 0, 100, 20)); } [Fact, Category(GridStarSizing)] public void StarRowsResizeWhenGridExpandsToFill() { var grid = CreateGridLayout(rows: "*"); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); var manager = new GridLayoutManager(grid); // Measuring at infinite height, we expect the Grid's only row (*) to act like an // Auto row and get the height of the view var measuredSize = manager.Measure(20, double.PositiveInfinity); Assert.Equal(20, measuredSize.Height); grid.DesiredSize.Returns(measuredSize); // We arrange at a height taller than the Grid's measurement; because the Grid // is set to vertically Fill, we expect it to expand to the arranged height manager.ArrangeChildren(new Rect(0, 0, 20, 100)); // And we expect the * row to fill up that new height AssertArranged(view0, new Rect(0, 0, 20, 100)); } [Fact, Category(GridStarSizing)] public void StarColumnsResizeWhenGridExpandsToFill() { var grid = CreateGridLayout(columns: "*"); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); var manager = new GridLayoutManager(grid); // Measuring at infinite width, we expect the Grid's only column (*) to act like an // Auto column and get the width of the view var measuredSize = manager.Measure(double.PositiveInfinity, 20); Assert.Equal(20, measuredSize.Width); grid.DesiredSize.Returns(measuredSize); // We arrange at a width wider than the Grid's measurement; because the Grid // is set to horizontally Fill, we expect it to expand to the arranged width manager.ArrangeChildren(new Rect(0, 0, 100, 20)); // And we expect the * column to fill up that new width AssertArranged(view0, new Rect(0, 0, 100, 20)); } [Theory, Category(GridStarSizing)] [InlineData(LayoutAlignment.Center)] [InlineData(LayoutAlignment.Start)] [InlineData(LayoutAlignment.End)] [InlineData(LayoutAlignment.Fill)] public void StarRowsShouldFitExplicitDimensions(LayoutAlignment verticalAlignment) { var grid = CreateGridLayout(rows: "*"); grid.VerticalLayoutAlignment.Returns(verticalAlignment); grid.Height.Returns(100); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); var manager = new GridLayoutManager(grid); var gridMeasure = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); // Because the Grid has an explicit height, we expect the measurement to have that height Assert.Equal(100, gridMeasure.Height); manager.ArrangeChildren(new Rect(Point.Zero, gridMeasure)); // Because the child has VerticalAlignment.Fill, we expect it to fill up the 100 // units in the Star row AssertArranged(view0, new Rect(0, 0, 20, 100)); } [Theory, Category(GridStarSizing)] [InlineData(LayoutAlignment.Center)] [InlineData(LayoutAlignment.Start)] [InlineData(LayoutAlignment.End)] [InlineData(LayoutAlignment.Fill)] public void StarColumnsShouldFitExplicitDimensions(LayoutAlignment horizontalAlignment) { var grid = CreateGridLayout(columns: "*"); grid.HorizontalLayoutAlignment.Returns(horizontalAlignment); grid.Width.Returns(100); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); var manager = new GridLayoutManager(grid); var gridMeasure = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); // Because the Grid has an explicit width, we expect the measurement to have that width. Assert.Equal(100, gridMeasure.Width); manager.ArrangeChildren(new Rect(Point.Zero, gridMeasure)); // Because the child has HorizontalAlignment.Fill, we expect it to fill up the 100 // units in the Star column AssertArranged(view0, new Rect(0, 0, 100, 20)); } [Fact] [Category(GridStarSizing)] public void StarColumnsHaveChildWidthsWhenGridCentered() { var grid = CreateGridLayout(columns: "*,*"); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Center); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(10, 10)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view1, col: 1); _ = MeasureAndArrange(grid, 200, 200); AssertArranged(view0, new Rect(0, 0, 20, 20)); AssertArranged(view1, new Rect(20, 0, 10, 20)); } [Fact] [Category(GridStarSizing, GridSpan)] public void MeasureStarAndExplicitColumnSpan() { var grid = CreateGridLayout(columns: "40,*"); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Center); var view0 = CreateTestView(new Size(100, 20)); view0.Width.Returns(100); SubstituteChildren(grid, view0); SetLocation(grid, view0, col: 0, colSpan: 2); var measure = MeasureAndArrange(grid, 200, 200); Assert.Equal(100, measure.Width); AssertArranged(view0, new Rect(0, 0, 100, 20)); } [Fact] [Category(GridStarSizing, GridSpan)] public void MeasureAutoAndExplicitColumnSpan() { var grid = CreateGridLayout(columns: "40,auto"); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Center); var view0 = CreateTestView(new Size(100, 20)); view0.Width.Returns(100); SubstituteChildren(grid, view0); SetLocation(grid, view0, col: 0, colSpan: 2); var measure = MeasureAndArrange(grid, 200, 200); Assert.Equal(100, measure.Width); AssertArranged(view0, new Rect(0, 0, 100, 20)); } [Fact] [Category(GridStarSizing, GridSpan)] public void MeasureStarAndExplicitRowSpan() { var grid = CreateGridLayout(rows: "40,*"); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Center); var view0 = CreateTestView(new Size(20, 100)); view0.Height.Returns(100); SubstituteChildren(grid, view0); SetLocation(grid, view0, row: 0, rowSpan: 2); var measure = MeasureAndArrange(grid, 200, 200); Assert.Equal(100, measure.Height); AssertArranged(view0, new Rect(0, 0, 20, 100)); } [Fact] [Category(GridStarSizing)] public void ChildInStarRowWithInfiniteSpaceIsMeasuredWithInfinity() { var grid = CreateGridLayout(rows: "*"); var view0 = CreateTestView(new Size(20, 100)); SubstituteChildren(grid, view0); _ = MeasureAndArrange(grid, 200, double.PositiveInfinity); view0.Received().Measure(Arg.Any<double>(), Arg.Is(double.PositiveInfinity)); } [Fact] [Category(GridStarSizing)] public void ChildInStarColumnWithInfiniteSpaceIsMeasuredWithInfinity() { var grid = CreateGridLayout(columns: "*"); var view0 = CreateTestView(new Size(100, 20)); SubstituteChildren(grid, view0); _ = MeasureAndArrange(grid, double.PositiveInfinity, 200); view0.Received().Measure(Arg.Is(double.PositiveInfinity), Arg.Any<double>()); } [Fact] [Category(GridStarSizing)] public void StarColumnWidthLimitedToGridWidth() { var grid = CreateGridLayout(columns: "*", rows: "Auto, Auto"); var screenWidth = 500; var view0 = CreateTestView(new Size(600, 20)); var view1 = CreateTestView(new Size(100, 20)); SetLocation(grid, view0); SetLocation(grid, view1, row: 1); SubstituteChildren(grid, view0, view1); _ = MeasureAndArrange(grid, screenWidth, 200); AssertArranged(view1, new Rect(0, 20, 500, 20)); } [Fact] [Category(GridStarSizing)] public void StarRowHeightLimitedToGridHeight() { var grid = CreateGridLayout(rows: "*", columns: "Auto, Auto"); var screenHeight = 500; var view0 = CreateTestView(new Size(20, 600)); var view1 = CreateTestView(new Size(20, 100)); SetLocation(grid, view0); SetLocation(grid, view1, col: 1); SubstituteChildren(grid, view0, view1); _ = MeasureAndArrange(grid, 200, screenHeight); AssertArranged(view1, new Rect(20, 0, 20, 500)); } [Fact] [Category(GridStarSizing)] public void StarsExpandToFixedSizes() { var grid = CreateGridLayout(); grid.DesiredSize.Returns(new Size(100, 120)); grid.Width.Returns(100); grid.Height.Returns(120); var view0 = CreateTestView(new Size(20, 20)); view0.Width.Returns(20); view0.Height.Returns(20); view0.HorizontalLayoutAlignment.Returns(LayoutAlignment.End); view0.VerticalLayoutAlignment.Returns(LayoutAlignment.Start); SetLocation(grid, view0); SubstituteChildren(grid, view0); _ = MeasureAndArrange(grid); AssertArranged(view0, new Rect(0, 0, 100, 120)); } [Fact] public void AutoStarColumnsRespectUnconstrainedHeight() { var grid = CreateGridLayout(columns: "Auto, *"); var view0 = CreateTestView(new Size(20, 20)); view0.HorizontalLayoutAlignment.Returns(LayoutAlignment.Start); view0.VerticalLayoutAlignment.Returns(LayoutAlignment.Start); SubstituteChildren(grid, view0); SetLocation(grid, view0, col: 1); _ = MeasureAndArrange(grid, widthConstraint: 200, heightConstraint: double.PositiveInfinity); // The Grid only has one view; since we're measuring the Grid without height constraints, // and the single view does not have an explicit height, then there should have been at least // one measurement with an unconstrained height view0.Received().Measure(Arg.Any<double>(), double.PositiveInfinity); // The Auto column has no Views, so we expect it to have zero width; the single view should // be arranged at the top left corner AssertArranged(view0, new Rect(0, 0, 20, 20)); } [Fact] public void AutoStarRowsRespectUnconstrainedWidth() { var grid = CreateGridLayout(rows: "Auto, *"); var view0 = CreateTestView(new Size(20, 20)); view0.HorizontalLayoutAlignment.Returns(LayoutAlignment.Start); view0.VerticalLayoutAlignment.Returns(LayoutAlignment.Start); SubstituteChildren(grid, view0); SetLocation(grid, view0, row: 1); _ = MeasureAndArrange(grid, widthConstraint: double.PositiveInfinity, heightConstraint: 200); // The Grid only has one view; since we're measuring the Grid without width constraints, // and the single view does not have an explicit width, then there should have been at least // one measurement with an unconstrained width view0.Received().Measure(double.PositiveInfinity, Arg.Any<double>()); // The Auto row has no Views, so we expect it to have zero height; the single view should // be arranged at the top left corner AssertArranged(view0, new Rect(0, 0, 20, 20)); } [Fact, Category(GridStarSizing)] public void UnconstrainedStarRowsRetainTheirHeightsWhenArrangedAtMeasuredSize() { // This test accounts for the situation where a Grid has Rows marked as "*", is measured // without height constraint, is vertically set to Fill, and is arranged at the measured height. // Basically, a situation where the Grid is inside a vertically-oriented StackLayout, // and the concept of vertical "Fill" doesn't mean anything. In that situation, the rows should // retain their automatic sizing, rather than being evenly distributed as usual. var grid = CreateGridLayout(rows: "*, *, *"); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(20, 40)); var view2 = CreateTestView(new Size(20, 60)); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0, row: 0); SetLocation(grid, view1, row: 1); SetLocation(grid, view2, row: 2); // Measure the Grid with no height constraint, then arrange it using the resulting size // Unconstrained, we expect the views to total 20 + 40 + 60 = 120 height // Since we're arranging it at that same height, there's no reason for it to expand the items // so we expect them to be arranged at the same heights var measure = MeasureAndArrange(grid, widthConstraint: 200, heightConstraint: double.PositiveInfinity); Assert.Equal(120, measure.Height); AssertArranged(view0, new Rect(0, 0, 20, 20)); AssertArranged(view1, new Rect(0, 20, 20, 40)); AssertArranged(view2, new Rect(0, 60, 20, 60)); } [Theory, Category(GridStarSizing)] [InlineData(0.1)] [InlineData(1)] [InlineData(10)] [InlineData(60)] [InlineData(1000)] [InlineData(-0.1)] [InlineData(-10)] [InlineData(-60)] public void ViewsInUnconstrainedStarRowsDoNotOverlapWhenArrangeHeightChanges(double heightDelta) { // Basically checking for https://github.com/dotnet/maui/issues/14694 here // This test accounts for the situation where a Grid has Rows marked as "*", is measured // without height constraint, is vertically set to Fill, and is arranged at a different height // than the measured height. This can happen for a couple of reasons - either because of // adjustments made on the native side to handle rounding/conversion issues (e.g., Android // density conversions), or because of ScrollView's "Fill the viewport" behavior. var grid = CreateGridLayout(rows: "*, *, *"); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(20, 40)); var view2 = CreateTestView(new Size(20, 60)); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0, row: 0); SetLocation(grid, view1, row: 1); SetLocation(grid, view2, row: 2); // Measure the Grid with no height constraint // Unconstrained, we expect the views to total 20 + 40 + 60 = 120 height var manager = new GridLayoutManager(grid); var measure = manager.Measure(200, double.PositiveInfinity); Assert.Equal(120, measure.Height); // Now arrange it at a _different_ height manager.ArrangeChildren(new Rect(0, 0, measure.Width, measure.Height + heightDelta)); // Determine the destination Rect values that the manager passed in when calling Arrange() for each view var v0ArrangeArgs = view0.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view0Dest = (Rect)v0ArrangeArgs[0]; var v1ArrangeArgs = view1.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view1Dest = (Rect)v1ArrangeArgs[0]; var v2ArrangeArgs = view2.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view2Dest = (Rect)v2ArrangeArgs[0]; // Ensure that the destination rect for each view is large enough // for that view (that the grid isn't somehow shrinking their destination area) Assert.True(view0Dest.Height >= 20); Assert.True(view1Dest.Height >= 40); Assert.True(view2Dest.Height >= 60); // Ensure that the destination rects for the views don't overlap Assert.False(view0Dest.IntersectsWith(view1Dest)); Assert.False(view1Dest.IntersectsWith(view2Dest)); Assert.False(view0Dest.IntersectsWith(view2Dest)); // And ensure that the destination rects are actually tall enough to fill up the arranged height. // They might be taller (e.g., if a window is resized to be too small for the content), but they should // _not_ be _less_ than the target arrangement height. var destinationHeight = view0Dest.Height + view1Dest.Height + view2Dest.Height; Assert.True(destinationHeight >= measure.Height + heightDelta); } [Theory, Category(GridStarSizing)] [InlineData(0.1)] [InlineData(1)] [InlineData(10)] [InlineData(60)] [InlineData(1000)] [InlineData(-0.1)] [InlineData(-10)] [InlineData(-60)] public void ViewsInUnconstrainedStarColumnsDoNotOverlapWhenArrangeWidthChanges(double widthDelta) { // Basically checking for https://github.com/dotnet/maui/issues/14694 here // This test accounts for the situation where a Grid has Columns marked as "*", is measured // without width constraint, is horizontally set to Fill, and is arranged at a different width // than the measured width. This can happen for a couple of reasons - either because of // adjustments made on the native side to handle rounding/conversion issues (e.g., Android // density conversions), or because of ScrollView's "Fill the viewport" behavior. var grid = CreateGridLayout(columns: "*, *, *"); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(40, 20)); var view2 = CreateTestView(new Size(60, 20)); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0, col: 0); SetLocation(grid, view1, col: 1); SetLocation(grid, view2, col: 2); // Measure the Grid with no width constraint // Unconstrained, we expect the views to total 20 + 40 + 60 = 120 width var manager = new GridLayoutManager(grid); var measure = manager.Measure(double.PositiveInfinity, 200); Assert.Equal(120, measure.Width); // Now arrange it at a _different_ width manager.ArrangeChildren(new Rect(0, 0, measure.Width + widthDelta, measure.Height)); // Determine the destination Rect values that the manager passed in when calling Arrange() for each view var v0ArrangeArgs = view0.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view0Dest = (Rect)v0ArrangeArgs[0]; var v1ArrangeArgs = view1.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view1Dest = (Rect)v1ArrangeArgs[0]; var v2ArrangeArgs = view2.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view2Dest = (Rect)v2ArrangeArgs[0]; // Ensure that the destination rect for each view is large enough // for that view (that the grid isn't somehow shrinking their destination area) Assert.True(view0Dest.Width >= 20); Assert.True(view1Dest.Width >= 40); Assert.True(view2Dest.Width >= 60); // Ensure that the destination rects for the views don't overlap Assert.False(view0Dest.IntersectsWith(view1Dest)); Assert.False(view1Dest.IntersectsWith(view2Dest)); Assert.False(view0Dest.IntersectsWith(view2Dest)); } [Fact, Category(GridStarSizing)] public void UnconstrainedStarColumnsRetainTheirWidthsWhenArrangedAtMeasuredSize() { // This test accounts for the situation where a Grid has Columns marked as "*", is measured // without width constraint, is horizontally set to Fill, and is arranged at the measured width. // Basically, a situation where the Grid is inside a horizontally-oriented ScrollView or StackLayout, // and the concept of horizontal "Fill" doesn't mean anything. In that situation, the columns should // retain their automatic sizing, rather than being evenly distributed as usual. var grid = CreateGridLayout(columns: "*, *, *"); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(40, 20)); var view2 = CreateTestView(new Size(60, 20)); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0, col: 0); SetLocation(grid, view1, col: 1); SetLocation(grid, view2, col: 2); // Measure the Grid with no width constraint, then arrange it using the resulting size // Unconstrained, we expect the views to total 20 + 40 + 60 = 120 width // Since we're arranging it at that same width, there's no reason for it to expand the items // so we expect them to be arranged at the same widths var measure = MeasureAndArrange(grid, widthConstraint: double.PositiveInfinity, heightConstraint: 200); Assert.Equal(120, measure.Width); AssertArranged(view0, new Rect(0, 0, 20, 20)); AssertArranged(view1, new Rect(20, 0, 40, 20)); AssertArranged(view2, new Rect(60, 0, 60, 20)); } // These next two tests validate cases where the Grid structure necessitates multiple // measure passes (because a Star value intersects with multiple Auto values) // and the items being measured may have a different Auto height/width on the second pass. [Theory, Category(GridAutoSizing)] [InlineData(10, 30)] // Replicating the situation from https://github.com/dotnet/maui/issues/14296 [InlineData(40, 30)] // Simulating something like an Image where the height shrinks as the width constraint gets tighter public void AutoRowIsDominatedByTallestView(double unconstrainedHeight, double constrainedHeight) { var grid = CreateGridLayout(rows: "Auto", columns: "Auto, *"); var view0 = CreateTestView(new Size(20, 20)); // The view changes size depending on how wide its measurement constraints are var view1 = CreateWidthDominatedView(100, unconstrainedHeight, new Tuple<double, double>(200, constrainedHeight)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, row: 0, col: 0); SetLocation(grid, view1, row: 0, col: 1); var measure = MeasureAndArrange(grid, widthConstraint: 200, heightConstraint: 200); // We expect the Grid to grow to accommodate the full height of view1 at this width Assert.Equal(constrainedHeight, measure.Height); } [Theory, Category(GridAutoSizing)] [InlineData(10, 30)] // Replicating https://github.com/dotnet/maui/issues/14296 but for columns [InlineData(50, 30)] // Simulating something like an Image where the width shrinks as the height constraint gets tighter public void AutoColumnIsDominatedByWidestView(double unconstrainedWidth, double constrainedWidth) { var grid = CreateGridLayout(columns: "Auto", rows: "Auto, *"); var view0 = CreateTestView(new Size(20, 20)); // The view changes size depending on how high its measurement constraints are var view1 = CreateHeightDominatedView(unconstrainedWidth, 100, new Tuple<double, double>(constrainedWidth, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, row: 0, col: 0); SetLocation(grid, view1, row: 1, col: 0); var measure = MeasureAndArrange(grid, widthConstraint: 100, heightConstraint: 100); // We expect the Grid to group to accommodate the full width of view1 at this height Assert.Equal(constrainedWidth, measure.Width); } [Theory] [InlineData(20, 100)] [InlineData(200, 100)] public void AutoStarColumnSpanMeasureIsSumOfAutoAndStar(double determinantViewWidth, double widthConstraint) { var grid = CreateGridLayout(columns: "Auto, *", rows: "Auto, Auto"); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(determinantViewWidth, 20)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, row: 0, col: 0, colSpan: 2); SetLocation(grid, view1, row: 1, col: 0, colSpan: 1); var measure = MeasureAndArrange(grid, widthConstraint: widthConstraint, heightConstraint: 100); // view1 should make column 0 at least `determinantViewWidth` units wide // So view0 should be getting measured at _at least_ that value; if the widthConstraint is larger // than that, the * should bump the measure value up to match the widthConstraint var expectedMeasureWidth = Math.Max(determinantViewWidth, widthConstraint); view0.Received().Measure(Arg.Is<double>(expectedMeasureWidth), Arg.Any<double>()); } [Theory] [InlineData(20, 100)] [InlineData(200, 100)] public void AutoStarRowSpanMeasureIsSumOfAutoAndStar(double determinantViewHeight, double heightConstraint) { var grid = CreateGridLayout(columns: "Auto, Auto", rows: "Auto, *"); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(20, determinantViewHeight)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, row: 0, col: 0, rowSpan: 2); SetLocation(grid, view1, row: 0, col: 1, rowSpan: 1); var measure = MeasureAndArrange(grid, widthConstraint: 100, heightConstraint: heightConstraint); // view1 should make row 0 at least `determinantViewHeight` units tall // So view0 should be getting measured at _at least_ that value; if the heightConstraint is larger // than that, the * should bump the measure value up to match the heightConstraint var expectedMeasureHeight = Math.Max(determinantViewHeight, heightConstraint); view0.Received().Measure(Arg.Any<double>(), Arg.Is<double>(expectedMeasureHeight)); } [Theory, Category(GridStarSizing)] [InlineData(0.1)] [InlineData(1)] [InlineData(10)] [InlineData(60)] [InlineData(1000)] [InlineData(-0.1)] [InlineData(-10)] [InlineData(-60)] public void MultipleArrangeCallsProduceConsistentResults(double delta) { var grid = CreateGridLayout(rows: "*, *, *", columns: "*, *, *"); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Fill); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 20)); var view1 = CreateTestView(new Size(40, 20)); var view2 = CreateTestView(new Size(60, 20)); SubstituteChildren(grid, view0, view1, view2); SetLocation(grid, view0, col: 0, row: 0); SetLocation(grid, view1, col: 1, row: 1); SetLocation(grid, view2, col: 2, row: 2); // Measure the Grid with no constraints, then arrange it using the resulting size var manager = new GridLayoutManager(grid); var measure = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); // Now arrange it at a _different_ size manager.ArrangeChildren(new Rect(0, 0, measure.Width + delta, measure.Height + delta)); // Determine the destination Rect values that the manager passed in when calling Arrange() for each view var v0ArrangeArgs1 = view0.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view0Dest1 = (Rect)v0ArrangeArgs1[0]; var v1ArrangeArgs1 = view1.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view1Dest1 = (Rect)v1ArrangeArgs1[0]; var v2ArrangeArgs1 = view2.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view2Dest1 = (Rect)v2ArrangeArgs1[0]; view0.ClearReceivedCalls(); view1.ClearReceivedCalls(); view2.ClearReceivedCalls(); // Now arrange it at the same size again manager.ArrangeChildren(new Rect(0, 0, measure.Width + delta, measure.Height + delta)); // Determine the destination Rect values that the manager passed in when calling Arrange() for each view var v0ArrangeArgs2 = view0.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view0Dest2 = (Rect)v0ArrangeArgs2[0]; var v1ArrangeArgs2 = view1.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view1Dest2 = (Rect)v1ArrangeArgs2[0]; var v2ArrangeArgs2 = view2.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); var view2Dest2 = (Rect)v2ArrangeArgs2[0]; // Ensure that Arrange was called with the same destination rect for each view both times Assert.Equal(view0Dest1, view0Dest2); Assert.Equal(view1Dest1, view1Dest2); Assert.Equal(view2Dest1, view2Dest2); } /* * These next two test cover the specific situation from https://github.com/dotnet/maui/issues/14818 * Where the control in the * row/column is one that doesn't play nice with measurement. The control * is determined to return a measure size larger than the constraints it's given. We have to ensure * that the sizes of the containing * row/column don't expand beyond their natural confines even if * the containing control asks for it. (The control itself may still be larger than the row/column * size, and when arranged it may exceed the boundaries of the row/column. But the row/column itself * must still adhere to the constraints given by the column definitions and grid size constraints.) */ [Theory] [InlineData("100, *, 100")] [InlineData("Auto, *, Auto")] [InlineData("Auto, *, 100")] [InlineData("100, *, Auto")] public void StarRowsHandleGreedyMeasures(string rowDefinitions) { var grid = CreateGridLayout(rows: rowDefinitions); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Fill); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Fill); // view0 is going to be a view that returns 500 high no matter what var view0 = CreateTestView(new Size(100, 500)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, col: 0, row: 1); SetLocation(grid, view1, col: 0, row: 2); // Measure the grid with a height constraint var manager = new GridLayoutManager(grid); var measure = manager.Measure(100, 500); manager.ArrangeChildren(new Rect(0, 0, 100, 500)); // At a height constraint of 500 we expect the star row to be 300 high // So view1 should be arranged at a Y value of 100 + 300 = 400 AssertArranged(view1, new Rect(0, 400, 100, 100)); } [Theory] [InlineData("100, *, 100")] [InlineData("Auto, *, Auto")] [InlineData("Auto, *, 100")] [InlineData("100, *, Auto")] public void StarColumnsHandleGreedyMeasures(string columnDefinitions) { var grid = CreateGridLayout(columns: columnDefinitions); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Fill); grid.HorizontalLayoutAlignment.Returns(LayoutAlignment.Fill); // view0 is going to be a view that returns 500 wide no matter what var view0 = CreateTestView(new Size(500, 100)); var view1 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, col: 1, row: 0); SetLocation(grid, view1, col: 2, row: 0); // Measure the grid with a width constraint var manager = new GridLayoutManager(grid); var measure = manager.Measure(500, 100); manager.ArrangeChildren(new Rect(0, 0, 500, 100)); // At a width constraint of 500, we expect the star column to be 300 wide // So view1 should be arranged at an X value of 100 + 300 = 400 AssertArranged(view1, new Rect(400, 0, 100, 100)); } [Theory] [InlineData("100", 1, 10, 100)] [InlineData("100, 100", 2, 0, 100 + 0 + 100)] [InlineData("100, 100", 2, 10, 100 + 10 + 100)] [InlineData("100, 100, 50", 3, 20, 100 + 20 + 100 + 20 + 50)] public void SpannedColumnMeasureIncludesSpacing(string columnDefinitions, int columnSpan, double spacing, double expectedWidth) { var grid = CreateGridLayout(columns: columnDefinitions); grid.ColumnSpacing.Returns(spacing); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); SetLocation(grid, view0, row: 0, col: 0, colSpan: columnSpan); MeasureAndArrange(grid, double.PositiveInfinity, double.PositiveInfinity); view0.Received().Measure(Arg.Is<double>(expectedWidth), Arg.Any<Double>()); } [Theory] [InlineData("100", 1, 10, 100)] [InlineData("100, 100", 2, 0, 100 + 0 + 100)] [InlineData("100, 100", 2, 10, 100 + 10 + 100)] [InlineData("100, 100, 50", 3, 20, 100 + 20 + 100 + 20 + 50)] public void SpannedRowMeasureIncludesSpacing(string rowDefinitions, int rowSpan, double spacing, double expectedHeight) { var grid = CreateGridLayout(rows: rowDefinitions); grid.RowSpacing.Returns(spacing); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); SetLocation(grid, view0, row: 0, col: 0, rowSpan: rowSpan); MeasureAndArrange(grid, double.PositiveInfinity, double.PositiveInfinity); view0.Received().Measure(Arg.Any<double>(), Arg.Is<Double>(expectedHeight)); } [Theory, Category(GridStarSizing)] [InlineData(0, 0)] [InlineData(16, 0)] [InlineData(0, 16)] [InlineData(16, 16)] [InlineData(-16, 16)] [InlineData(-16, -16)] [InlineData(16, -16)] public void StarColumnsAccountForPadding(double left, double right) { var grid = CreateGridLayout(columns: "*,48", rows: "200"); grid.Width.Returns(200); grid.Padding.Returns(new Thickness(left, 0, right, 0)); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); SetLocation(grid, view0, col: 0, row: 0, colSpan: 2); MeasureAndArrange(grid, 900, 900); // We expect the left edge of the view to be inset by the left padding, // and the width of the view to be the width of the Grid minus all padding AssertArranged(view0, new Rect(left, 0, 200 - left - right, 200)); } [Theory, Category(GridStarSizing)] [InlineData(0, 0)] [InlineData(16, 0)] [InlineData(0, 16)] [InlineData(16, 16)] [InlineData(-16, 16)] [InlineData(-16, -16)] [InlineData(16, -16)] public void StarRowsAccountForPadding(double top, double bottom) { var grid = CreateGridLayout(rows: "*,48", columns: "200"); grid.Height.Returns(200); grid.Padding.Returns(new Thickness(0, top, 0, bottom)); var view0 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0); SetLocation(grid, view0, col: 0, row: 0, rowSpan: 2); MeasureAndArrange(grid, 900, 900); // We expect the top edge of the view to be inset by the top padding, // and the height of the view to be the height of the Grid minus all padding AssertArranged(view0, new Rect(0, top, 200, 200 - top - bottom)); } // Test for https://github.com/dotnet/maui/issues/16815 // Because the padding is added to the grid's min possible size // and then that size is used to calculate cell sizes, we need to // remove the padding as the cells are not placed in the padding. // This means the issue appears when: // grid width = view widths + (2 * padding) [Theory, Category(GridStarSizing)] [InlineData(40, 21, 24)] [InlineData(39, 20, 23)] [InlineData(38, 19, 22)] [InlineData(37, 18, 21)] public void StarRowsCalculateCorrectlyWhenGridWidthNearsMinWidth(double widthConstraint, double view0ExpectedWidth, double view1ExpectedX) { var heights = 100; var paddingL = 3; var paddingR = 6; var viewWidth = 10; var grid = CreateGridLayout(rows: "*", columns: "*, Auto"); grid.Padding.Returns(new Thickness(paddingL, 0, paddingR, 0)); var view0 = CreateTestView(new Size(viewWidth, heights)); var view1 = CreateTestView(new Size(viewWidth, heights)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, col: 0); SetLocation(grid, view1, col: 1); _ = MeasureAndArrangeFixed(grid, widthConstraint: widthConstraint, heightConstraint: heights); AssertArranged(view0, new Rect(paddingL, 0, view0ExpectedWidth, heights)); AssertArranged(view1, new Rect(view1ExpectedX, 0, viewWidth, heights)); } [Theory, Category(GridStarSizing)] [InlineData(40, 21, 24)] [InlineData(39, 20, 23)] [InlineData(38, 19, 22)] [InlineData(37, 18, 21)] public void StarColsCalculateCorrectlyWhenGridHeightNearsMinHeight(double widthConstraint, double view0ExpectedWidth, double view1ExpectedX) { var widths = 100; var paddingT = 3; var paddingB = 6; var viewHeight = 10; var grid = CreateGridLayout(rows: "*, Auto", columns: "*"); grid.Padding.Returns(new Thickness(0, paddingT, 0, paddingB)); var view0 = CreateTestView(new Size(widths, viewHeight)); var view1 = CreateTestView(new Size(widths, viewHeight)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, row: 0); SetLocation(grid, view1, row: 1); _ = MeasureAndArrangeFixed(grid, widthConstraint: widths, heightConstraint: widthConstraint); AssertArranged(view0, new Rect(0, paddingT, widths, view0ExpectedWidth)); AssertArranged(view1, new Rect(0, view1ExpectedX, widths, viewHeight)); } [Fact] public void StarRowExpansionWorksWithDifferingScalars() { var grid = CreateGridLayout(rows: "*, 4.5*, *, 4.5*"); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(100, 20)); var view1 = CreateTestView(new Size(100, 100)); var view2 = CreateTestView(new Size(100, 20)); var view3 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1, view2, view3); SetLocation(grid, view0, row: 0); SetLocation(grid, view1, row: 1); SetLocation(grid, view2, row: 2); SetLocation(grid, view3, row: 3); // Measure the Grid with no constraints var manager = new GridLayoutManager(grid); var measure = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); // Our expected height, unconstrained, where all the views get their desired height: double expectedHeight = 20 + 100 + 20 + 100; Assert.Equal(expectedHeight, measure.Height); // Now we'll arrange it at a larger height (as if we were filling up the height of a layout) double arrangeHeight = measure.Height + 100; manager.ArrangeChildren(new Rect(0, 0, measure.Width, arrangeHeight)); // Determine the destination Rect values that the manager passed in when calling Arrange() for each view var view0Dest = GetArrangedRect(view0); var view1Dest = GetArrangedRect(view1); var view2Dest = GetArrangedRect(view2); var view3Dest = GetArrangedRect(view3); // We have four rows: 1*, 4.5*, 1*, 4.5* double starCount = 1 + 4.5 + 1 + 4.5; // We expect the odd rows to get 1* each double expectedOddRowHeight = arrangeHeight / starCount; // And the event rows to get 4.5* each double expectedEvenRowHeight = expectedOddRowHeight * 4.5; // Verify that the views were arranged at those sizes (within tolerance) Assert.Equal(expectedOddRowHeight, view0Dest.Height, 1.0); Assert.Equal(expectedEvenRowHeight, view1Dest.Height, 1.0); Assert.Equal(expectedOddRowHeight, view2Dest.Height, 1.0); Assert.Equal(expectedEvenRowHeight, view3Dest.Height, 1.0); } [Fact] public void StarColumnExpansionWorksWithDifferingScalars() { var grid = CreateGridLayout(columns: "*, 4.5*, *, 4.5*"); grid.VerticalLayoutAlignment.Returns(LayoutAlignment.Fill); var view0 = CreateTestView(new Size(20, 100)); var view1 = CreateTestView(new Size(100, 100)); var view2 = CreateTestView(new Size(20, 100)); var view3 = CreateTestView(new Size(100, 100)); SubstituteChildren(grid, view0, view1, view2, view3); SetLocation(grid, view0, col: 0); SetLocation(grid, view1, col: 1); SetLocation(grid, view2, col: 2); SetLocation(grid, view3, col: 3); // Measure the Grid with no constraints var manager = new GridLayoutManager(grid); var measure = manager.Measure(double.PositiveInfinity, double.PositiveInfinity); // Our expected width, unconstrained, where all the views get their desired width: double expectedWidth = 20 + 100 + 20 + 100; Assert.Equal(expectedWidth, measure.Width); // Now we'll arrange it at a larger width (as if we were filling up the width of a layout) double arrangeWidth = measure.Width + 100; manager.ArrangeChildren(new Rect(0, 0, arrangeWidth, measure.Height)); // Determine the destination Rect values that the manager passed in when calling Arrange() for each view var view0Dest = GetArrangedRect(view0); var view1Dest = GetArrangedRect(view1); var view2Dest = GetArrangedRect(view2); var view3Dest = GetArrangedRect(view3); // We have four columns: 1*, 4.5*, 1*, 4.5* double starCount = 1 + 4.5 + 1 + 4.5; // We expect the odd columns to get 1* each double expectedOddRowWidth = arrangeWidth / starCount; // And the event columns to get 4.5* each double expectedEvenRowWidth = expectedOddRowWidth * 4.5; // Verify that the views were arranged at those sizes (within tolerance) Assert.Equal(expectedOddRowWidth, view0Dest.Width, 1.0); Assert.Equal(expectedEvenRowWidth, view1Dest.Width, 1.0); Assert.Equal(expectedOddRowWidth, view2Dest.Width, 1.0); Assert.Equal(expectedEvenRowWidth, view3Dest.Width, 1.0); } static Rect GetArrangedRect(IView view) { var args = view.ReceivedCalls().Single(c => c.GetMethodInfo().Name == nameof(IView.Arrange)).GetArguments(); return (Rect)args[0]; } // The next two tests look at a corner case where the Grid is measured in one dimension without constraint // (for instance, inside of a StackLayout); the Star in the unconstrained dimension should be treated // as an Auto value. The explicit Auto value in the constrained dimension forces us to make a second measure // pass to resolve all the measurements; we have to ensure that this second measure pass is updating // the measurement of the unconstrained Star if the Views measured during the second pass are larger // in that dimension. [Fact] public void AutoColumnIntersectionWithUnconstrainedMeasure() { var grid = CreateGridLayout(columns: "*, Auto", rows: "*"); var view0 = CreateTestView(new Size(20, 40)); var view1 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, col: 0); SetLocation(grid, view1, col: 1); // The infinite height means we treat the * Row as Auto _ = MeasureAndArrange(grid, widthConstraint: 200, heightConstraint: double.PositiveInfinity); // Ensure that the * Row height was updated to include the taller view AssertArranged(view0, new Rect(0, 0, 20, 40)); } [Fact] public void AutoRowIntersectionWithUnconstrainedMeasure() { var grid = CreateGridLayout(rows: "*, Auto", columns: "*"); var view0 = CreateTestView(new Size(40, 20)); var view1 = CreateTestView(new Size(20, 20)); SubstituteChildren(grid, view0, view1); SetLocation(grid, view0, row: 0); SetLocation(grid, view1, row: 1); // The infinite width means we treat the * Column as Auto _ = MeasureAndArrange(grid, widthConstraint: double.PositiveInfinity, heightConstraint: 200); // Ensure that the * Column width was updated to include the wider view AssertArranged(view0, new Rect(0, 0, 40, 20)); } [Theory, Category(GridStarSizing)] [InlineData(926, 845)] [InlineData(926, 926)] [InlineData(926, 1026)] public void StarsAdjustWhenArrangeAndMeasureHeightDiffer(double heightConstraint, double arrangedHeight) { var grid = CreateGridLayout(rows: "*, *", columns: "*"); var smallerView = CreateTestView(new Size(20, 20)); var largerView = CreateTestView(new Size(20, 500)); SubstituteChildren(grid, largerView, smallerView); SetLocation(grid, smallerView, col: 0, row: 0); SetLocation(grid, largerView, row: 1, col: 0); var gridLayoutManager = new GridLayoutManager(grid); double widthConstraint = 400; _ = gridLayoutManager.Measure(widthConstraint, heightConstraint); // Arranging at a different size than the measurement constraints gridLayoutManager.ArrangeChildren(new Rect(0, 0, widthConstraint, arrangedHeight)); double expectedHeight = arrangedHeight / 2; AssertArranged(smallerView, new Rect(0, 0, widthConstraint, expectedHeight)); AssertArranged(largerView, new Rect(0, expectedHeight, widthConstraint, expectedHeight)); } [Theory, Category(GridStarSizing)] [InlineData(926, 845)] [InlineData(926, 926)] [InlineData(926, 1026)] public void StarsAdjustWhenArrangeAndMeasureWidthDiffer(double widthConstraint, double arrangedWidth) { var grid = CreateGridLayout(rows: "*", columns: "*, *"); var smallerView = CreateTestView(new Size(20, 20)); var largerView = CreateTestView(new Size(500, 20)); SubstituteChildren(grid, largerView, smallerView); SetLocation(grid, smallerView, col: 0, row: 0); SetLocation(grid, largerView, row: 0, col: 1); var gridLayoutManager = new GridLayoutManager(grid); double heightConstraint = 400; _ = gridLayoutManager.Measure(widthConstraint, heightConstraint); // Arranging at a different size than the measurement constraints gridLayoutManager.ArrangeChildren(new Rect(0, 0, arrangedWidth, heightConstraint)); double expectedWidth = arrangedWidth / 2; AssertArranged(smallerView, new Rect(0, 0, expectedWidth, heightConstraint)); AssertArranged(largerView, new Rect(expectedWidth, 0, expectedWidth, heightConstraint)); } } }
GridLayoutManagerTests
csharp
ChilliCream__graphql-platform
src/HotChocolate/Fusion-vnext/test/Fusion.Tests.Shared/AutomaticMockingTests.cs
{ "start": 18309, "end": 19342 }
interface ____ { id: ID! str: String! } type Object implements Interface { id: ID! str: String! num: Int! } """; const string request = """ query { interfaces { __typename id str ... on Object { num } } } """; // act var result = await ExecuteRequestAgainstSchemaAsync(request, schema); // assert result.MatchInlineSnapshot( """ { "data": { "interfaces": null } } """); } [Fact] public async Task Interface_List_Error() { // arrange const string schema = """ type Query { interfaces: [Interface] @error }
Interface
csharp
AutoFixture__AutoFixture
Src/AutoFixtureUnitTest/Kernel/SeededRequestTest.cs
{ "start": 149, "end": 8728 }
public class ____ { [Fact] public void SeedIsCorrect() { // Arrange var expectedSeed = "Anonymous value"; var sut = new SeededRequest(typeof(string), expectedSeed); // Act var result = sut.Seed; // Assert Assert.Equal(expectedSeed, result); } [Fact] public void CreateWithNullRequestWillThrow() { // Arrange // Act & assert Assert.Throws<ArgumentNullException>(() => new SeededRequest(null, new object())); } [Fact] public void RequestIsCorrect() { // Arrange var expectedRequest = new object(); var sut = new SeededRequest(expectedRequest, "Anonymous value"); // Act var result = sut.Request; // Assert Assert.Equal(expectedRequest, result); } [Fact] public void SutIsEquatable() { // Arrange // Act var sut = new SeededRequest(typeof(decimal), 1); // Assert Assert.IsAssignableFrom<IEquatable<SeededRequest>>(sut); } [Fact] public void SutDoesNotEqualNullObject() { // Arrange var sut = new SeededRequest(typeof(DateTime), new DateTime(103029)); object other = null; // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualNullSut() { // Arrange var sut = new SeededRequest(typeof(TimeSpan), new object()); SeededRequest other = null; // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualAnonymousObject() { // Arrange var sut = new SeededRequest(new object(), "Anonymous value"); object anonymousObject = new ConcreteType(); // Act var result = sut.Equals(anonymousObject); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualOtherObjectWhenRequestsDiffer() { // Arrange var anonymousValue = 1; var sut = new SeededRequest(new object(), anonymousValue); object other = new SeededRequest(typeof(TimeSpan), anonymousValue); // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualOtherSutWhenRequestsDiffer() { // Arrange var anonymousValue = 1; var sut = new SeededRequest(new object(), anonymousValue); var other = new SeededRequest(typeof(TimeSpan), anonymousValue); // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualOtherObjectWhenSeedsDiffer() { // Arrange var anonymousRequest = new object(); var sut = new SeededRequest(anonymousRequest, 98); object other = new SeededRequest(anonymousRequest, "Anonymous value"); // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualOtherSutWhenSeedsDiffer() { // Arrange var anonymousRequest = 1; var sut = new SeededRequest(anonymousRequest, 98); var other = new SeededRequest(anonymousRequest, "Anonymous value"); // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualOtherObjectWhenSutSeedIsNull() { // Arrange var anonymousRequest = string.Empty; var sut = new SeededRequest(anonymousRequest, null); object other = new SeededRequest(anonymousRequest, 2.9f); // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualOtherSutWhenSutSeedIsNull() { // Arrange var anonymousRequest = typeof(float); var sut = new SeededRequest(anonymousRequest, null); var other = new SeededRequest(anonymousRequest, 2.9f); // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualOtherObjectWhenOtherSeedIsNull() { // Arrange var anonymousRequest = typeof(Buffer); var sut = new SeededRequest(anonymousRequest, new ConcreteType()); object other = new SeededRequest(anonymousRequest, null); // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutDoesNotEqualOtherSutWhenOtherSeedIsNull() { // Arrange var anonymousRequest = typeof(Buffer); var sut = new SeededRequest(anonymousRequest, new ConcreteType()); var other = new SeededRequest(anonymousRequest, null); // Act var result = sut.Equals(other); // Assert Assert.False(result, "Equals"); } [Fact] public void SutEqualsOtherObjectWhenRequestAndSeedEquals() { // Arrange var request = typeof(ConcreteType); var seed = new TypeWithConcreteParameterMethod(); var sut = new SeededRequest(request, seed); object other = new SeededRequest(request, seed); // Act var result = sut.Equals(other); // Assert Assert.True(result, "Equals"); } [Fact] public void SutEqualsOtherSutWhenRequestsAndSeedEquals() { // Arrange var request = typeof(ConcreteType); var seed = new TypeWithConcreteParameterMethod(); var sut = new SeededRequest(request, seed); var other = new SeededRequest(request, seed); // Act var result = sut.Equals(other); // Assert Assert.True(result, "Equals"); } [Fact] public void SutEqualsOtherObjectWhenRequestsAreEqualAndSeedsAreNull() { // Arrange var request = typeof(WeakReference); var sut = new SeededRequest(request, null); object other = new SeededRequest(request, null); // Act var result = sut.Equals(other); // Assert Assert.True(result, "Equals"); } [Fact] public void SutEqualsOtherSutWhenRequestsAreEqualAndSeedsAreNull() { // Arrange var request = typeof(WeakReference); var sut = new SeededRequest(request, null); var other = new SeededRequest(request, null); // Act var result = sut.Equals(other); // Assert Assert.True(result, "Equals"); } [Fact] public void GetHashCodeWillReturnCorrectResultWhenSeedIsNull() { // Arrange var request = typeof(Version); var sut = new SeededRequest(request, null); var expectedHashCode = request.GetHashCode(); // Act var result = sut.GetHashCode(); // Assert Assert.Equal(expectedHashCode, result); } [Fact] public void GetHashCodeWillReturnCorrectResult() { // Arrange var request = typeof(ConcreteType); var value = Missing.Value; var sut = new SeededRequest(request, value); var expectedHashCode = request.GetHashCode() ^ value.GetHashCode(); // Act var result = sut.GetHashCode(); // Assert Assert.Equal(expectedHashCode, result); } } }
SeededRequestTest
csharp
AvaloniaUI__Avalonia
src/Android/Avalonia.Android/Platform/Input/AndroidInputMethod.cs
{ "start": 844, "end": 7289 }
internal class ____<TView> : ITextInputMethodImpl, IAndroidInputMethod where TView : View, IInitEditorInfo { private readonly TView _host; private readonly InputMethodManager _imm; private TextInputMethodClient? _client; private AvaloniaInputConnection? _inputConnection; public AndroidInputMethod(TView host) { _host = host; _imm = host.Context?.GetSystemService(Context.InputMethodService).JavaCast<InputMethodManager>() ?? throw new InvalidOperationException("Context.InputMethodService is expected to be not null."); _host.Focusable = true; _host.FocusableInTouchMode = true; } public View View => _host; [MemberNotNullWhen(true, nameof(Client))] [MemberNotNullWhen(true, nameof(_client))] public bool IsActive => Client != null; public TextInputMethodClient? Client => _client; public InputMethodManager IMM => _imm; public void Reset() { } public void SetClient(TextInputMethodClient? client) { if(_client != null) { _client.SurroundingTextChanged -= _client_SurroundingTextChanged; _client.SelectionChanged -= _client_SelectionChanged; _client.InputPaneActivationRequested -= _client_InputPaneActivationRequested; } _client = client; if (IsActive) { _host.RequestFocus(); _imm.RestartInput(View); _imm.ShowSoftInput(_host, ShowFlags.Implicit); _inputConnection?.UpdateState(); _client.SurroundingTextChanged += _client_SurroundingTextChanged; _client.SelectionChanged += _client_SelectionChanged; _client.InputPaneActivationRequested += _client_InputPaneActivationRequested; } else { _imm.RestartInput(View); _inputConnection = null; _imm.HideSoftInputFromWindow(_host.WindowToken, HideSoftInputFlags.ImplicitOnly); } } private void _client_InputPaneActivationRequested(object? sender, EventArgs e) { if(IsActive) { _imm.ShowSoftInput(_host, ShowFlags.Implicit); } } private void _client_SelectionChanged(object? sender, EventArgs e) { if (_inputConnection is null || _inputConnection.IsInBatchEdit || _inputConnection.IsInUpdate) return; OnSelectionChanged(); } private void OnSelectionChanged() { if (Client is null || _inputConnection is null || _inputConnection.IsInUpdate) { return; } OnSurroundingTextChanged(); _inputConnection.IsInUpdate = true; var selection = Client.Selection; var composition = _inputConnection.EditBuffer.HasComposition ? _inputConnection.EditBuffer.Composition!.Value : new TextSelection(-1,-1); _imm.UpdateSelection(_host, selection.Start, selection.End, composition.Start, composition.End); _inputConnection.IsInUpdate = false; } private void _client_SurroundingTextChanged(object? sender, EventArgs e) { if (_inputConnection is null || _inputConnection.IsInBatchEdit || _inputConnection.IsInUpdate) return; OnSurroundingTextChanged(); } public void OnBatchEditEnded() { if (_inputConnection is null || _inputConnection.IsInBatchEdit) return; OnSelectionChanged(); } private void OnSurroundingTextChanged() { _inputConnection?.UpdateState(); } public void SetCursorRect(Rect rect) { } public void SetOptions(TextInputOptions options) { _host.InitEditorInfo((topLevel, outAttrs) => { if (_client == null) { return null!; } _inputConnection = new AvaloniaInputConnection(topLevel, this); outAttrs.InputType = options.ContentType switch { TextInputContentType.Email => InputTypes.TextVariationEmailAddress, TextInputContentType.Number => InputTypes.ClassNumber, TextInputContentType.Password => InputTypes.TextVariationPassword, TextInputContentType.Digits => InputTypes.ClassPhone, TextInputContentType.Url => InputTypes.TextVariationUri, _ => InputTypes.ClassText }; if (options.AutoCapitalization) { outAttrs.InitialCapsMode = CapitalizationMode.Sentences; outAttrs.InputType |= InputTypes.TextFlagCapSentences; } if (options.Multiline) outAttrs.InputType |= InputTypes.TextFlagMultiLine; if (outAttrs.InputType is InputTypes.ClassText && options.ShowSuggestions == false) outAttrs.InputType |= InputTypes.TextVariationPassword | InputTypes.TextFlagNoSuggestions; outAttrs.ImeOptions = options.ReturnKeyType switch { TextInputReturnKeyType.Return => ImeFlags.NoEnterAction, TextInputReturnKeyType.Go => (ImeFlags)CustomImeFlags.ActionGo, TextInputReturnKeyType.Send => (ImeFlags)CustomImeFlags.ActionSend, TextInputReturnKeyType.Search => (ImeFlags)CustomImeFlags.ActionSearch, TextInputReturnKeyType.Next => (ImeFlags)CustomImeFlags.ActionNext, TextInputReturnKeyType.Previous => (ImeFlags)CustomImeFlags.ActionPrevious, TextInputReturnKeyType.Done => (ImeFlags)CustomImeFlags.ActionDone, _ => options.Multiline ? ImeFlags.NoEnterAction : (ImeFlags)CustomImeFlags.ActionDone }; outAttrs.ImeOptions |= ImeFlags.NoFullscreen | ImeFlags.NoExtractUi; return _inputConnection; }); } } }
AndroidInputMethod
csharp
DuendeSoftware__IdentityServer
identity-server/src/IdentityServer/Services/Default/LogoutNotificationService.cs
{ "start": 462, "end": 4774 }
public class ____ : ILogoutNotificationService { private readonly IClientStore _clientStore; private readonly IIssuerNameService _issuerNameService; private readonly SanitizedLogger<LogoutNotificationService> _sanitizedLogger; /// <summary> /// Ctor. /// </summary> public LogoutNotificationService( IClientStore clientStore, IIssuerNameService issuerNameService, ILogger<LogoutNotificationService> logger) { _clientStore = clientStore; _issuerNameService = issuerNameService; _sanitizedLogger = new SanitizedLogger<LogoutNotificationService>(logger); } /// <inheritdoc/> public async Task<IEnumerable<string>> GetFrontChannelLogoutNotificationsUrlsAsync(LogoutNotificationContext context) { using var activity = Tracing.ServiceActivitySource.StartActivity("LogoutNotificationService.GetFrontChannelLogoutNotificationsUrls"); var frontChannelUrls = new List<string>(); foreach (var clientId in context.ClientIds) { var client = await _clientStore.FindEnabledClientByIdAsync(clientId); if (client != null) { if (client.FrontChannelLogoutUri.IsPresent()) { var url = client.FrontChannelLogoutUri; // add session id if required if (client.ProtocolType == IdentityServerConstants.ProtocolTypes.OpenIdConnect) { if (client.FrontChannelLogoutSessionRequired) { url = url.AddQueryString(OidcConstants.EndSessionRequest.Sid, context.SessionId); url = url.AddQueryString(OidcConstants.EndSessionRequest.Issuer, await _issuerNameService.GetCurrentAsync()); } } else if (client.ProtocolType == IdentityServerConstants.ProtocolTypes.WsFederation) { url = url.AddQueryString(Constants.WsFedSignOut.LogoutUriParameterName, Constants.WsFedSignOut.LogoutUriParameterValue); } frontChannelUrls.Add(url); } } } if (frontChannelUrls.Count > 0) { var msg = frontChannelUrls.Aggregate((x, y) => x + ", " + y); _sanitizedLogger.LogDebug("Client front-channel logout URLs: {0}", msg); } else { _sanitizedLogger.LogDebug("No client front-channel logout URLs"); } return frontChannelUrls; } /// <inheritdoc/> public async Task<IEnumerable<BackChannelLogoutRequest>> GetBackChannelLogoutNotificationsAsync(LogoutNotificationContext context) { using var activity = Tracing.ServiceActivitySource.StartActivity("LogoutNotificationService.GetBackChannelLogoutNotifications"); var backChannelLogouts = new List<BackChannelLogoutRequest>(); foreach (var clientId in context.ClientIds) { var client = await _clientStore.FindEnabledClientByIdAsync(clientId); if (client != null) { if (client.BackChannelLogoutUri.IsPresent()) { var back = new BackChannelLogoutRequest { ClientId = clientId, LogoutUri = client.BackChannelLogoutUri, SubjectId = context.SubjectId, SessionId = context.SessionId, SessionIdRequired = client.BackChannelLogoutSessionRequired, Issuer = context.Issuer, LogoutReason = context.LogoutReason, }; backChannelLogouts.Add(back); } } } if (backChannelLogouts.Count > 0) { var msg = backChannelLogouts.Select(x => x.LogoutUri).Aggregate((x, y) => x + ", " + y); _sanitizedLogger.LogDebug("Client back-channel logout URLs: {0}", msg); } else { _sanitizedLogger.LogDebug("No client back-channel logout URLs"); } return backChannelLogouts; } }
LogoutNotificationService
csharp
grpc__grpc-dotnet
src/Grpc.AspNetCore.Server.Reflection/GrpcReflectionServiceExtensions.cs
{ "start": 6172, "end": 6749 }
partial class ____ { [LoggerMessage(Level = LogLevel.Debug, EventId = 1, EventName = "ServiceDescriptorNotResolved", Message = "Could not resolve service descriptor for '{ServiceType}'. The service metadata will not be exposed by the reflection service.")] private static partial void ServiceDescriptorNotResolved(ILogger logger, string serviceType); public static void ServiceDescriptorNotResolved(ILogger logger, Type serviceType) { ServiceDescriptorNotResolved(logger, serviceType.FullName ?? string.Empty); } } }
Log
csharp
smartstore__Smartstore
src/Smartstore.Core/Platform/Search/Facets/Facet.cs
{ "start": 170, "end": 1359 }
public class ____ { public Facet(FacetValue value) : this(value.ToString(), value) { } public Facet(string key, FacetValue value) { Guard.NotEmpty(key, nameof(key)); Guard.NotNull(value, nameof(value)); Key = key; Value = value; Children = new List<Facet>(); IsChoice = true; Published = true; } public string Key { get; private set; } public FacetValue Value { get; private set; } /// <summary> /// Gets or sets whether the facet can be selected /// </summary> public bool IsChoice { get; set; } public bool Published { get; set; } public long HitCount { get; set; } public FacetGroup FacetGroup { get; internal set; } public IList<Facet> Children { get; set; } } }
Facet
csharp
nunit__nunit
src/NUnitFramework/tests/SolutionTests/NuspecDependenciesTests.cs
{ "start": 9935, "end": 14329 }
private sealed class ____ { public static void ComparePackages(Dictionary<string, List<PackageWithVersion>> csprojPackages, Dictionary<string, List<PackageWithVersion>> nuspecPackages) { // Iterate through the frameworks in the csprojPackages dictionary foreach (var csprojFramework in csprojPackages.Keys) { if (csprojFramework == NotSpecified) { TestContext.Out.WriteLine("Checking for packages that should be in all frameworks in the .nuspec file"); // Check if the packages from the csproj are present in all nuspec framework Assert.Multiple(() => { foreach (var framework in nuspecPackages.Keys) { MatchForSingleFramework(framework, csprojPackages[csprojFramework], nuspecPackages[framework]); } }); } else { TestContext.Out.WriteLine($"Checking for packages that should be in corresponding '{csprojFramework}' in the .nuspec file"); // Handle specific framework case var matchingNuspecFramework = nuspecPackages.Keys.FirstOrDefault(nuspecFramework => (csprojFramework == ".NETFramework" && nuspecFramework.StartsWith("net") && int.TryParse(nuspecFramework.Substring(3), out var version) && version >= 462) || (csprojFramework != ".NETFramework" && nuspecFramework == csprojFramework)); // Assert that the matching framework was found Assert.That(matchingNuspecFramework, Is.Not.Null, $"Framework '{csprojFramework}' is in .csproj but not in .nuspec."); // Find packages in csproj that are missing in nuspec MatchForSingleFramework(matchingNuspecFramework, csprojPackages[csprojFramework], nuspecPackages[matchingNuspecFramework]); } } } private static void MatchForSingleFramework(string framework, List<PackageWithVersion> csprojPackages, List<PackageWithVersion> nuspecPackages) { List<string> csProjPackagesForFramework = csprojPackages.Select(x => x.Package).ToList(); List<string> nuspecPackagesForFramework = nuspecPackages.Select(x => x.Package).ToList(); var missingPackages = csProjPackagesForFramework.Except(nuspecPackagesForFramework).ToList(); Assert.Multiple(() => { // Assert that there are no missing packages in any nuspec framework Assert.That(missingPackages, Is.Empty, $"Missing packages in framework '{framework}' in .nuspec: {string.Join(", ", missingPackages)}"); foreach (var pair in csprojPackages) { var nuspecVersion = nuspecPackages.First(x => x.Package == pair.Package).Version; Assert.That(nuspecVersion, Is.EqualTo(pair.Version), $"Package {pair.Package} in .csproj should have version '{pair.Version}' in .nuspec"); } }); } public static void CheckNuspecPackages(Dictionary<string, List<PackageWithVersion>> nuspecPackages, Dictionary<string, List<PackageWithVersion>> csprojPackages) { // Extract all packages from csproj var allCsprojPackages = csprojPackages.Values.SelectMany(x => x).Select(x => x.Package).ToList(); // Extract all packages from nuspec var allNuspecPackages = nuspecPackages.Values.SelectMany(x => x).Select(x => x.Package).ToList(); // Find packages in nuspec that are missing in csproj var missingPackages = allNuspecPackages.Except(allCsprojPackages).ToList(); Assert.That(missingPackages, Is.Empty, $"Packages in .nuspec that are not in .csproj and should be deleted from nuspec: {string.Join(", ", missingPackages)}"); } } } }
VerifyDependencies
csharp
jellyfin__jellyfin
MediaBrowser.Controller/LiveTv/ITunerHost.cs
{ "start": 289, "end": 2225 }
public interface ____ { /// <summary> /// Gets the name. /// </summary> /// <value>The name.</value> string Name { get; } /// <summary> /// Gets the type. /// </summary> /// <value>The type.</value> string Type { get; } bool IsSupported { get; } /// <summary> /// Gets the channels. /// </summary> /// <param name="enableCache">Option to enable using cache.</param> /// <param name="cancellationToken">The CancellationToken for this operation.</param> /// <returns>Task&lt;IEnumerable&lt;ChannelInfo&gt;&gt;.</returns> Task<List<ChannelInfo>> GetChannels(bool enableCache, CancellationToken cancellationToken); /// <summary> /// Gets the channel stream. /// </summary> /// <param name="channelId">The channel identifier.</param> /// <param name="streamId">The stream identifier.</param> /// <param name="currentLiveStreams">The current live streams.</param> /// <param name="cancellationToken">The cancellation token to cancel operation.</param> /// <returns>Live stream wrapped in a task.</returns> Task<ILiveStream> GetChannelStream(string channelId, string streamId, IList<ILiveStream> currentLiveStreams, CancellationToken cancellationToken); /// <summary> /// Gets the channel stream media sources. /// </summary> /// <param name="channelId">The channel identifier.</param> /// <param name="cancellationToken">The cancellation token.</param> /// <returns>Task&lt;List&lt;MediaSourceInfo&gt;&gt;.</returns> Task<List<MediaSourceInfo>> GetChannelStreamMediaSources(string channelId, CancellationToken cancellationToken); Task<List<TunerHostInfo>> DiscoverDevices(int discoveryDurationMs, CancellationToken cancellationToken); }
ITunerHost
csharp
Xabaril__AspNetCore.Diagnostics.HealthChecks
src/HealthChecks.EventStore/DependencyInjection/EventStoreHealthCheckBuilderExtensions.cs
{ "start": 239, "end": 4140 }
public static class ____ { private const string NAME = "eventstore"; /// <summary> /// Add a health check for EventStore services. /// </summary> /// <param name="builder">The <see cref="IHealthChecksBuilder"/>.</param> /// <param name="eventStoreConnection">The EventStore connection string to be used.</param> /// <param name="login">The EventStore user login. Optional. If <c>null</c> the healthcheck will be processed without authentication.</param> /// <param name="password">The EventStore user password. Optional. If <c>null</c> the healthcheck will be processed without authentication.</param> /// <param name="name">The health check name. Optional. If <c>null</c> the type name 'eventstore' will be used for the name.</param> /// <param name="failureStatus"> /// The <see cref="HealthStatus"/> that should be reported when the health check fails. Optional. If <c>null</c> then /// the default status of <see cref="HealthStatus.Unhealthy"/> will be reported. /// </param> /// <param name="tags">A list of tags that can be used to filter sets of health checks. Optional.</param> /// <param name="timeout">An optional <see cref="TimeSpan"/> representing the timeout of the check.</param> /// <returns>The specified <paramref name="builder"/>.</returns> public static IHealthChecksBuilder AddEventStore( this IHealthChecksBuilder builder, string eventStoreConnection, string? login = default, string? password = default, string? name = default, HealthStatus? failureStatus = default, IEnumerable<string>? tags = default, TimeSpan? timeout = default) { return builder.Add(new HealthCheckRegistration( name ?? NAME, sp => new EventStoreHealthCheck(eventStoreConnection, login, password), failureStatus, tags, timeout)); } /// <summary> /// Add a health check for EventStore services. /// </summary> /// <param name="builder">The <see cref="IHealthChecksBuilder"/>.</param> /// <param name="eventStoreConnectionFactory">A function returning the EventStore connection string to be used.</param> /// <param name="login">The EventStore user login. Optional. If <c>null</c> the healthcheck will be processed without authentication.</param> /// <param name="password">The EventStore user password. Optional. If <c>null</c> the healthcheck will be processed without authentication.</param> /// <param name="name">The health check name. Optional. If <c>null</c> the type name 'eventstore' will be used for the name.</param> /// <param name="failureStatus"> /// The <see cref="HealthStatus"/> that should be reported when the health check fails. Optional. If <c>null</c> then /// the default status of <see cref="HealthStatus.Unhealthy"/> will be reported. /// </param> /// <param name="tags">A list of tags that can be used to filter sets of health checks. Optional.</param> /// <param name="timeout">An optional <see cref="TimeSpan"/> representing the timeout of the check.</param> /// <returns>The specified <paramref name="builder"/>.</returns> public static IHealthChecksBuilder AddEventStore( this IHealthChecksBuilder builder, Func<IServiceProvider, string> eventStoreConnectionFactory, string? login = default, string? password = default, string? name = default, HealthStatus? failureStatus = default, IEnumerable<string>? tags = default, TimeSpan? timeout = default) { return builder.Add(new HealthCheckRegistration( name ?? NAME, sp => new EventStoreHealthCheck(eventStoreConnectionFactory(sp), login, password), failureStatus, tags, timeout)); } }
EventStoreHealthCheckBuilderExtensions
csharp
dotnet__maui
src/Controls/tests/TestCases.HostApp/Issues/Issue1931.cs
{ "start": 237, "end": 335 }
public class ____ : NavigationPage { public Issue1931() : base(new MainPage()) { }
Issue1931
csharp
microsoft__PowerToys
src/modules/peek/Peek.Common/WIC/WICImagingFactory.cs
{ "start": 367, "end": 507 }
public class ____ { } [ComImport] [Guid(IID.IWICImagingFactory)] [CoClass(typeof(WICImagingFactory))]
WICImagingFactory
csharp
ShareX__ShareX
ShareX.UploadersLib/URLShorteners/FirebaseDynamicLinksURLShortener.cs
{ "start": 2600, "end": 2741 }
public class ____ { public string shortLink { get; set; } public string previewLink { get; set; } }
FirebaseResponse
csharp
mongodb__mongo-csharp-driver
src/MongoDB.Driver/Core/ConnectionPools/ExclusiveConnectionPool.Helpers.cs
{ "start": 1106, "end": 3170 }
partial class ____ { // private methods private TimeSpan CalculateRemainingTimeout(TimeSpan timeout, Stopwatch stopwatch) { if (timeout == Timeout.InfiniteTimeSpan) { return Timeout.InfiniteTimeSpan; } var elapsed = stopwatch.Elapsed; var remainingTimeout = timeout - elapsed; if (remainingTimeout < TimeSpan.Zero) { throw CreateTimeoutException(elapsed, $"Timed out waiting for a connection after {elapsed.TotalMilliseconds}ms."); } return remainingTimeout; } private Exception CreateTimeoutException(TimeSpan elapsed, string message) { var checkOutsForCursorCount = _checkOutReasonCounter.GetCheckOutsCount(CheckOutReason.Cursor); var checkOutsForTransactionCount = _checkOutReasonCounter.GetCheckOutsCount(CheckOutReason.Transaction); // only use the expanded message format when connected to a load balancer if (checkOutsForCursorCount != 0 || checkOutsForTransactionCount != 0) { var maxPoolSize = _settings.MaxConnections; var availableConnectionsCount = AvailableCount; var checkOutsCount = maxPoolSize - availableConnectionsCount; var checkOutsForOtherCount = checkOutsCount - checkOutsForCursorCount - checkOutsForTransactionCount; message = $"Timed out after {elapsed.TotalMilliseconds}ms waiting for a connection from the connection pool. " + $"maxPoolSize: {maxPoolSize}, " + $"connections in use by cursors: {checkOutsForCursorCount}, " + $"connections in use by transactions: {checkOutsForTransactionCount}, " + $"connections in use by other operations: {checkOutsForOtherCount}."; } return new TimeoutException(message); } // nested classes
ExclusiveConnectionPool
csharp
dotnet__machinelearning
docs/samples/Microsoft.ML.AutoML.Samples/DataStructures/Movie.cs
{ "start": 290, "end": 487 }
public class ____ { [LoadColumn(0)] public string UserId; [LoadColumn(1)] public string MovieId; [LoadColumn(2)] public float Rating; } }
Movie
csharp
smartstore__Smartstore
src/Smartstore.Core/Platform/Search/Facets/FacetUrlHelperBase.cs
{ "start": 161, "end": 4229 }
partial class ____ : IFacetUrlHelper { protected FacetUrlHelperBase(HttpRequest request) { Guard.NotNull(request); Url = UrlPolicy.CombineSegments(request.PathBase, request.Path); InitialQuery = request.QueryString; } protected string Url { get; init; } protected QueryString InitialQuery { get; init; } public abstract int Order { get; } public abstract string Scope { get; } public virtual string Add(params Facet[] facets) { // Remove page index (i) from query string. var qs = new MutableQueryCollection(InitialQuery) .Remove("i"); foreach (var facet in facets) { var parts = GetQueryParts(facet); foreach (var name in parts.Keys) { qs.Add(name, parts[name], !facet.FacetGroup.IsMultiSelect); } } return Url + qs.ToString(); } public virtual string Remove(params Facet[] facets) { // Remove page index (i) from query string. var qs = new MutableQueryCollection(InitialQuery) .Remove("i"); foreach (var facet in facets) { var parts = GetQueryParts(facet); foreach (var name in parts.Keys) { var qsName = name; if (!qs.Store.ContainsKey(name)) { // Query string does not contain that name. Try the unmapped name. qsName = GetUnmappedQueryName(facet); } string[] currentValues = null; // The query string value is not necessarily equal to the facet value. // We must skip subsequent lines here to not add the removed value again and again. if (facet.FacetGroup.Kind != FacetGroupKind.Price && facet.FacetGroup.Kind != FacetGroupKind.Availability && facet.FacetGroup.Kind != FacetGroupKind.NewArrivals) { if (qs.TryGetValue(qsName, out var rawValue)) { currentValues = rawValue.ToString()? .Split(',', StringSplitOptions.RemoveEmptyEntries) .Select(x => x.Trim()) .ToArray(); } } qs.Remove(qsName); if (currentValues != null) { var newValues = parts.TryGetValue(name, out var removeValue) ? currentValues.Where(x => !x.EqualsNoCase(removeValue)) : currentValues; if (newValues.Any()) { newValues.Each(x => qs.Add(name, x, false)); } } } } return Url + qs.ToString(); } public virtual string Toggle(Facet facet) { if (facet.Value.IsSelected) { return Remove(facet); } else { return Add(facet); } } public virtual string GetQueryName(Facet facet) { var parts = GetQueryParts(facet); return parts.Keys?.FirstOrDefault(); } /// <summary> /// Gets the unmapped name of a query part, e.g. "m" if the URL contains the brand\manufacturer name. /// </summary> protected abstract string GetUnmappedQueryName(Facet facet); /// <summary> /// Gets a name-to-value map of all query parts. /// </summary> protected abstract Dictionary<string, string> GetQueryParts(Facet facet); } }
FacetUrlHelperBase
csharp
protobuf-net__protobuf-net
src/protobuf-net.Test/Serializers/Collections.cs
{ "start": 10609, "end": 11581 }
public class ____ : ICollection<bool> { #region nope int ICollection<bool>.Count => throw new NotImplementedException(); bool ICollection<bool>.IsReadOnly => throw new NotImplementedException(); void ICollection<bool>.Add(bool item) => throw new NotImplementedException(); void ICollection<bool>.Clear() => throw new NotImplementedException(); bool ICollection<bool>.Contains(bool item) => throw new NotImplementedException(); void ICollection<bool>.CopyTo(bool[] array, int arrayIndex) => throw new NotImplementedException(); IEnumerator<bool> IEnumerable<bool>.GetEnumerator() => throw new NotImplementedException(); IEnumerator IEnumerable.GetEnumerator() => throw new NotImplementedException(); bool ICollection<bool>.Remove(bool item) => throw new NotImplementedException(); #endregion }
CustomNonGenericCollection
csharp
dotnet__maui
src/Core/tests/DeviceTests/Stubs/ContentViewStub.cs
{ "start": 201, "end": 798 }
public class ____ : StubBase, IContentView { ILayoutManager? _layoutManager; public object? Content { get; set; } public IView? PresentedContent { get; set; } public Thickness Padding { get; set; } ILayoutManager LayoutManager => _layoutManager ??= new LayoutManagerStub(); public Size CrossPlatformMeasure(double widthConstraint, double heightConstraint) { return PresentedContent?.Measure(widthConstraint, heightConstraint) ?? Size.Zero; } public Size CrossPlatformArrange(Rect bounds) { return PresentedContent?.Arrange(bounds) ?? Size.Zero; } } }
ContentViewStub
csharp
GtkSharp__GtkSharp
Source/Samples/Sections/Widgets/EditableCellsSection.cs
{ "start": 1588, "end": 6574 }
private enum ____ { Text, Num }; private ListStore CreateItemsModel() { ListStore model; TreeIter iter; /* create array */ _articles = new List<Item>(); AddItems(); /* create list store */ model = new ListStore(typeof(int), typeof(string), typeof(int), typeof(bool)); /* add items */ for (int i = 0; i < _articles.Count; i++) { iter = model.Append(); model.SetValue(iter, (int)ColumnItem.Number, _articles[i].Number); model.SetValue(iter, (int)ColumnItem.Product, _articles[i].Product); model.SetValue(iter, (int)ColumnItem.Yummy, _articles[i].Yummy); } return model; } private static ListStore CreateNumbersModel() { ListStore model; TreeIter iter; /* create list store */ model = new ListStore(typeof(string), typeof(int)); /* add numbers */ for (int i = 0; i < 10; i++) { iter = model.Append(); model.SetValue(iter, (int)ColumnNumber.Text, i.ToString()); } return model; } private void AddItems() { Item foo = new Item { Number = 3, Product = "bottles of coke", Yummy = 20 }; _articles.Add(foo); foo = new Item { Number = 5, Product = "packages of noodles", Yummy = 50 }; _articles.Add(foo); foo = new Item { Number = 2, Product = "packages of chocolate chip cookies", Yummy = 90 }; _articles.Add(foo); foo = new Item { Number = 1, Product = "can vanilla ice cream", Yummy = 60 }; _articles.Add(foo); foo = new Item { Number = 6, Product = "eggs", Yummy = 10 }; _articles.Add(foo); } private void AddColumns(ITreeModel numbersModel) { /* number column */ CellRendererCombo rendererCombo = new CellRendererCombo { Model = numbersModel, TextColumn = (int)ColumnNumber.Text, HasEntry = false, Editable = true }; rendererCombo.Edited += CellEdited; rendererCombo.EditingStarted += EditingStarted; _cellColumnsRender.Add(rendererCombo, (int)ColumnItem.Number); _treeView.InsertColumn(-1, "Number", rendererCombo, "text", (int)ColumnItem.Number); /* product column */ CellRendererText rendererText = new CellRendererText { Editable = true }; rendererText.Edited += CellEdited; _cellColumnsRender.Add(rendererText, (int)ColumnItem.Product); _treeView.InsertColumn(-1, "Product", rendererText, "text", (int)ColumnItem.Product); /* yummy column */ CellRendererProgress rendererProgress = new CellRendererProgress(); _cellColumnsRender.Add(rendererProgress, (int)ColumnItem.Yummy); _treeView.InsertColumn(-1, "Yummy", rendererProgress, "value", (int)ColumnItem.Yummy); } private void AddItem(object sender, EventArgs e) { TreeIter iter; if (_articles == null) { return; } Item foo = new Item { Number = 0, Product = "Description here", Yummy = 50 }; _articles.Add(foo); /* Insert a new row below the current one */ _treeView.GetCursor(out TreePath path, out _); if (path != null) { _ = _itemsModel.GetIter(out TreeIter current, path); iter = _itemsModel.InsertAfter(current); } else { iter = _itemsModel.Insert(-1); } /* Set the data for the new row */ _itemsModel.SetValue(iter, (int)ColumnItem.Number, foo.Number); _itemsModel.SetValue(iter, (int)ColumnItem.Product, foo.Product); _itemsModel.SetValue(iter, (int)ColumnItem.Yummy, foo.Yummy); /* Move focus to the new row */ path = _itemsModel.GetPath(iter); TreeViewColumn column = _treeView.GetColumn(0); _treeView.SetCursor(path, column, false); } private void RemoveItem(object sender, EventArgs e) { TreeSelection selection = _treeView.Selection; if (selection.GetSelected(out TreeIter iter)) { TreePath path = _itemsModel.GetPath(iter); int i = path.Indices[0]; _itemsModel.Remove(ref iter); _articles.RemoveAt(i); } } private void CellEdited(object data, EditedArgs args) { TreePath path = new TreePath(args.Path); int column = _cellColumnsRender[(CellRenderer)data]; _itemsModel.GetIter(out TreeIter iter, path); switch (column) { case (int)ColumnItem.Number: { int i = path.Indices[0]; _articles[i].Number = int.Parse(args.NewText); _itemsModel.SetValue(iter, column, _articles[i].Number); } break; case (int)ColumnItem.Product: { string oldText = (string)_itemsModel.GetValue(iter, column); int i = path.Indices[0]; _articles[i].Product = args.NewText; _itemsModel.SetValue(iter, column, _articles[i].Product); } break; } } private void EditingStarted(object o, EditingStartedArgs args) { ((ComboBox)args.Editable).RowSeparatorFunc += SeparatorRow; } private bool SeparatorRow(ITreeModel model, TreeIter iter) { TreePath path = model.GetPath(iter); int idx = path.Indices[0]; return idx == 5; } } }
ColumnNumber
csharp
icsharpcode__ILSpy
ICSharpCode.Decompiler.PowerShell/ErrorIds.cs
{ "start": 115, "end": 269 }
public static class ____ { public static readonly string AssemblyLoadFailed = "1"; public static readonly string DecompilationFailed = "2"; } }
ErrorIds
csharp
smartstore__Smartstore
src/Smartstore.Web/Areas/Admin/Controllers/TopicController.cs
{ "start": 557, "end": 19636 }
public class ____ : AdminController { private readonly SmartDbContext _db; private readonly ILocalizedEntityService _localizedEntityService; private readonly IStoreMappingService _storeMappingService; private readonly IAclService _aclService; private readonly ILinkResolver _linkResolver; private readonly IUrlService _urlService; private readonly IStoreContext _storeContext; public TopicController( SmartDbContext db, ILocalizedEntityService localizedEntityService, IStoreMappingService storeMappingService, IAclService aclService, ILinkResolver linkResolver, IUrlService urlService, IStoreContext storeContext) { _db = db; _localizedEntityService = localizedEntityService; _storeMappingService = storeMappingService; _aclService = aclService; _linkResolver = linkResolver; _urlService = urlService; _storeContext = storeContext; } public IActionResult Index() { return RedirectToAction(nameof(List)); } [Permission(Permissions.Cms.Topic.Read)] public IActionResult List() { var model = new TopicListModel { IsSingleStoreMode = _storeContext.IsSingleStoreMode() }; return View(model); } [HttpPost] [Permission(Permissions.Cms.Topic.Read)] public async Task<IActionResult> TopicList(GridCommand command, TopicListModel model) { var query = _db.Topics .AsNoTracking() .ApplyStoreFilter(model.SearchStoreId); if (model.SystemName.HasValue()) { query = query.Where(x => x.SystemName.Contains(model.SystemName)); } if (model.Title.HasValue()) { query = query.ApplySearchFilter(model.Title, Core.Rules.LogicalRuleOperator.Or, x => x.Title, x => x.ShortTitle); } if (model.RenderAsWidget.HasValue) { query = query.Where(x => x.RenderAsWidget == model.RenderAsWidget.Value); } if (model.WidgetZone.HasValue()) { query = query.Where(x => x.WidgetZone.Contains(model.WidgetZone)); } var topics = await query .OrderBy(x => x.SystemName) .ApplyGridCommand(command) .SelectSummary() .ToPagedList(command) .LoadAsync(); var mapper = MapperFactory.GetMapper<Topic, TopicModel>(); var rows = await topics .SelectAwait(async x => { var model = await mapper.MapAsync(x); await PrepareTopicModelAsync(x, model); model.WidgetZoneValue = string.Join(", ", x.GetWidgetZones()); model.CookieType = (int?)x.CookieType; model.Body = string.Empty; // Otherwise maxJsonLength could be exceeded. model.Intro = string.Empty; // Otherwise grind may slow down model.ViewUrl = Url.Action(nameof(Edit), "Topic", new { id = x.Id }); return model; }) .AsyncToList(); var gridModel = new GridModel<TopicModel> { Rows = rows, Total = await topics.GetTotalCountAsync() }; return Json(gridModel); } [HttpPost] [Permission(Permissions.Cms.Topic.Delete)] public async Task<IActionResult> TopicDelete(GridSelection selection) { var success = false; var count = 0; var entities = await _db.Topics.GetManyAsync(selection.GetEntityIds(), true); if (entities.Count > 0) { try { _db.Topics.RemoveRange(entities); count = await _db.SaveChangesAsync(); success = true; } catch (Exception ex) { NotifyError(ex); } } return Json(new { Success = success, Count = count }); } [Permission(Permissions.Cms.Topic.Create)] public IActionResult Create() { var model = new TopicModel { TitleTag = "h1" }; AddLocales(model.Locales); AddCookieTypes(model); return View(model); } [HttpPost, ParameterBasedOnFormName("save-continue", "continueEditing")] [Permission(Permissions.Cms.Topic.Create)] public async Task<IActionResult> Create(TopicModel model, bool continueEditing) { if (ModelState.IsValid) { if (!model.IsPasswordProtected) { model.Password = null; } var topic = await MapperFactory.MapAsync<TopicModel, Topic>(model); if (model.WidgetZone != null) { topic.WidgetZone = string.Join(',', model.WidgetZone); } topic.CookieType = (CookieType?)model.CookieType; _db.Topics.Add(topic); await _db.SaveChangesAsync(); var slugResult = await _urlService.SaveSlugAsync(topic, model.SeName, topic.Title.NullEmpty() ?? topic.SystemName, true); model.SeName = slugResult.Slug; await UpdateLocalesAsync(topic, model); await _storeMappingService.ApplyStoreMappingsAsync(topic, model.SelectedStoreIds); await _aclService.ApplyAclMappingsAsync(topic, model.SelectedCustomerRoleIds); AddCookieTypes(model, model.CookieType); await Services.EventPublisher.PublishAsync(new ModelBoundEvent(model, topic, Request.Form)); NotifySuccess(T("Admin.ContentManagement.Topics.Updated")); return continueEditing ? RedirectToAction(nameof(Edit), new { id = topic.Id }) : RedirectToAction(nameof(List)); } // If we got this far something failed. Redisplay form. return View(model); } [Permission(Permissions.Cms.Topic.Read)] public async Task<IActionResult> Edit(int id) { var topic = await _db.Topics.FindByIdAsync(id, false); if (topic == null) { return RedirectToAction(nameof(List)); } var model = await MapperFactory.MapAsync<Topic, TopicModel>(topic); await PrepareTopicModelAsync(topic, model); model.WidgetZone = topic.WidgetZone.SplitSafe(',').ToArray(); model.CookieType = (int?)topic.CookieType; await AddLocalesAsync(model.Locales, async (locale, languageId) => { locale.ShortTitle = topic.GetLocalized(x => x.ShortTitle, languageId, false, false); locale.Title = topic.GetLocalized(x => x.Title, languageId, false, false); locale.Intro = topic.GetLocalized(x => x.Intro, languageId, false, false); locale.Body = topic.GetLocalized(x => x.Body, languageId, false, false); locale.MetaKeywords = topic.GetLocalized(x => x.MetaKeywords, languageId, false, false); locale.MetaDescription = topic.GetLocalized(x => x.MetaDescription, languageId, false, false); locale.MetaTitle = topic.GetLocalized(x => x.MetaTitle, languageId, false, false); locale.SeName = await topic.GetActiveSlugAsync(languageId, false, false); }); // Get menu links. IPagedList<MenuEntity> menus = null; var pageIndex = 0; do { menus = await _db.Menus .ApplyStandardFilter(includeHidden: true) .ApplySorting() .ToPagedList(pageIndex++, 500) .LoadAsync(); foreach (var menu in menus) { foreach (var item in menu.Items.Where(x => x.ProviderName != null && x.ProviderName == "entity")) { try { var link = await _linkResolver.ResolveAsync(item.Model); if (link.Expression.Schema == DefaultLinkProvider.SchemaTopic && link.EntityId == topic.Id) { var url = Url.Action("EditItem", "Menu", new { id = item.Id, area = "Admin" }); var label = string.Concat( menu.Title.NullEmpty() ?? menu.SystemName.NullEmpty() ?? StringExtensions.NotAvailable, " » ", item.Title.NullEmpty() ?? link.Label.NullEmpty() ?? StringExtensions.NotAvailable); model.MenuLinks[url] = label; } } catch { ModelState.AddModelError(string.Empty, T("Admin.ContentManagement.Menus.Item.InvalidTargetLink", item.Model, item.Title)); } } } } while (menus.HasNextPage); return View(model); } [HttpPost] [ParameterBasedOnFormName("save-continue", "continueEditing")] [Permission(Permissions.Cms.Topic.Update)] public async Task<IActionResult> Edit(TopicModel model, bool continueEditing) { var topic = await _db.Topics.FindByIdAsync(model.Id); if (topic == null) { return RedirectToAction(nameof(List)); } if (!model.IsPasswordProtected) { model.Password = null; } if (ModelState.IsValid) { await MapperFactory.MapAsync(model, topic); if (model.WidgetZone != null) { topic.WidgetZone = string.Join(',', model.WidgetZone); } topic.CookieType = (CookieType?)model.CookieType; var slugResult = await _urlService.SaveSlugAsync(topic, model.SeName, topic.Title.NullEmpty() ?? topic.SystemName, true); model.SeName = slugResult.Slug; await UpdateLocalesAsync(topic, model); await _storeMappingService.ApplyStoreMappingsAsync(topic, model.SelectedStoreIds); await _aclService.ApplyAclMappingsAsync(topic, model.SelectedCustomerRoleIds); await _db.SaveChangesAsync(); AddCookieTypes(model, model.CookieType); await Services.EventPublisher.PublishAsync(new ModelBoundEvent(model, topic, Request.Form)); NotifySuccess(T("Admin.ContentManagement.Topics.Updated")); return continueEditing ? RedirectToAction(nameof(Edit), new { id = topic.Id }) : RedirectToAction(nameof(List)); } // If we got this far something failed. Redisplay form. await PrepareTopicModelAsync(topic, model); return View(model); } [HttpPost] [Permission(Permissions.Cms.Topic.Delete)] public async Task<IActionResult> Delete(int id) { var topic = await _db.Topics.FindByIdAsync(id); if (topic == null) { return NotFound(); } if (topic.IsSystemTopic) { NotifyError(T("Admin.ContentManagement.Topics.CannotBeDeleted")); return RedirectToAction(nameof(Edit), new { id = topic.Id }); } _db.Topics.Remove(topic); await _db.SaveChangesAsync(); NotifySuccess(T("Admin.ContentManagement.Topics.Deleted")); return RedirectToAction(nameof(List)); } /// <summary> /// (AJAX) Gets a list of all available topics. /// </summary> /// <param name="label">Text for optional entry. If not null an entry with the specified label text and the Id 0 will be added to the list.</param> /// <param name="selectedIds">Ids of selected entities.</param> /// <param name="includeWidgets">Specifies whether to include topics which are defined to be rendered as Widgets.</param> /// <param name="includeHomePage">Specifies whether to include homepage text.</param> /// <returns>List of all topics as JSON.</returns> public async Task<IActionResult> AllTopics(string label, int selectedId, bool includeWidgets = false, bool includeHomePage = false) { var topics = await _db.Topics .AsNoTracking() .ApplyStandardFilter(true) .Where(x => includeWidgets || !x.RenderAsWidget) .ToListAsync(); var list = topics .Select(x => { var item = new ChoiceListItem { Id = x.Id.ToString(), Text = x.GetLocalized(y => y.Title).Value.NullEmpty() ?? x.SystemName, Selected = x.Id == selectedId }; if (!item.Text.EqualsNoCase(x.SystemName)) { item.Description = x.SystemName; } return item; }) .ToList(); if (label.HasValue()) { list.Insert(0, new ChoiceListItem { Id = "0", Text = label, Selected = false }); } if (includeHomePage) { list.Insert(0, new ChoiceListItem { Id = "-10", Text = T("Admin.ContentManagement.Homepage").Value, Selected = false }); } return new JsonResult(list); } #region Helper functions private async Task PrepareTopicModelAsync(Topic topic, TopicModel model) { model.Url = await GetTopicUrlAsync(topic); model.SelectedStoreIds = await _storeMappingService.GetAuthorizedStoreIdsAsync(topic); model.SelectedCustomerRoleIds = await _aclService.GetAuthorizedCustomerRoleIdsAsync(topic); AddCookieTypes(model, model.CookieType); } private async Task UpdateLocalesAsync(Topic topic, TopicModel model) { foreach (var localized in model.Locales) { await _localizedEntityService.ApplyLocalizedValueAsync(topic, x => x.ShortTitle, localized.ShortTitle, localized.LanguageId); await _localizedEntityService.ApplyLocalizedValueAsync(topic, x => x.Title, localized.Title, localized.LanguageId); await _localizedEntityService.ApplyLocalizedValueAsync(topic, x => x.Intro, localized.Intro, localized.LanguageId); await _localizedEntityService.ApplyLocalizedValueAsync(topic, x => x.Body, localized.Body, localized.LanguageId); await _localizedEntityService.ApplyLocalizedValueAsync(topic, x => x.MetaKeywords, localized.MetaKeywords, localized.LanguageId); await _localizedEntityService.ApplyLocalizedValueAsync(topic, x => x.MetaDescription, localized.MetaDescription, localized.LanguageId); await _localizedEntityService.ApplyLocalizedValueAsync(topic, x => x.MetaTitle, localized.MetaTitle, localized.LanguageId); var slugResult = await _urlService.SaveSlugAsync(topic, localized.SeName, localized.Title.NullEmpty() ?? localized.ShortTitle, false, localized.LanguageId); model.SeName = slugResult.Slug; } } private async Task<string> GetTopicUrlAsync(Topic topic) { string url = null; try { if (topic.LimitedToStores) { var storeIds = await _storeMappingService.GetAuthorizedStoreIdsAsync(topic); var currentStoreIsAuthorized = storeIds.Contains(Services.StoreContext.CurrentStore.Id); if (currentStoreIsAuthorized) { var store = Services.StoreContext.GetStoreById(Services.StoreContext.CurrentStore.Id); if (store != null) { url = store.GetAbsoluteUrl(Url.RouteUrl("Topic", new { SeName = await topic.GetActiveSlugAsync() })); } } } if (url.IsEmpty()) { url = Url.RouteUrl("Topic", new { SeName = await topic.GetActiveSlugAsync() }, Request.Scheme); } } catch (Exception ex) { Logger.Error(ex); } return url; } private static void AddCookieTypes(TopicModel model, int? selectedType = 0) { model.AvailableCookieTypes.AddRange(new[] { new SelectListItem { Text = "Required", Value = ((int)CookieType.Required).ToString(), Selected = CookieType.Required == (CookieType?)selectedType }, new SelectListItem { Text = "Analytics", Value = ((int)CookieType.Analytics).ToString(), Selected = CookieType.Analytics == (CookieType?)selectedType }, new SelectListItem { Text = "ThirdParty", Value = ((int)CookieType.ThirdParty).ToString(), Selected = CookieType.ThirdParty == (CookieType?)selectedType }, new SelectListItem { Text = "ConsentAdUserData", Value = ((int)CookieType.ConsentAdUserData).ToString(), Selected = CookieType.ConsentAdUserData == (CookieType?)selectedType }, new SelectListItem { Text = "ConsentAdPersonalization", Value = ((int)CookieType.ConsentAdPersonalization).ToString(), Selected = CookieType.ConsentAdPersonalization == (CookieType?)selectedType } }); } #endregion } }
TopicController
csharp
grpc__grpc-dotnet
test/Grpc.Net.Client.Web.Tests/GrpcWebResponseStreamTests.cs
{ "start": 12486, "end": 13774 }
private class ____ : HttpHeaders { } [TestCase("", "")] [TestCase(" ", "")] [TestCase(" a ", "a")] [TestCase(" ", "")] [TestCase("a ", "a")] [TestCase(" a", "a")] [TestCase("a a", "a a")] [TestCase(" a a ", "a a")] public void Trim(string initial, string expected) { var result = GrpcWebResponseStream.Trim(Encoding.UTF8.GetBytes(initial)); var s = Encoding.UTF8.GetString(result.ToArray()); Assert.AreEqual(expected, s); } private static Task<int> ReadAsync(Stream stream, Memory<byte> data, CancellationToken cancellationToken = default) { #if NET462 var success = MemoryMarshal.TryGetArray<byte>(data, out var segment); Debug.Assert(success); return stream.ReadAsync(segment.Array, segment.Offset, segment.Count, cancellationToken); #else return stream.ReadAsync(data, cancellationToken).AsTask(); #endif } private static GrpcWebResponseStream CreateResponseStream(Stream stream, HttpHeaders? trailingHeaders = null, HttpResponseMessage? responseMessage = null) { return new GrpcWebResponseStream(stream, trailingHeaders ?? new TestHttpHeaders(), responseMessage ?? new HttpResponseMessage()); } }
TestHttpHeaders
csharp
nopSolutions__nopCommerce
src/Presentation/Nop.Web/Areas/Admin/Models/Tax/TaxProviderModel.cs
{ "start": 184, "end": 950 }
public partial record ____ : BaseNopModel, IPluginModel { #region Properties [NopResourceDisplayName("Admin.Configuration.Tax.Providers.Fields.FriendlyName")] public string FriendlyName { get; set; } [NopResourceDisplayName("Admin.Configuration.Tax.Providers.Fields.SystemName")] public string SystemName { get; set; } [NopResourceDisplayName("Admin.Configuration.Tax.Providers.Fields.IsPrimaryTaxProvider")] public bool IsPrimaryTaxProvider { get; set; } [NopResourceDisplayName("Admin.Configuration.Tax.Providers.Configure")] public string ConfigurationUrl { get; set; } public string LogoUrl { get; set; } public int DisplayOrder { get; set; } public bool IsActive { get; set; } #endregion }
TaxProviderModel
csharp
ChilliCream__graphql-platform
src/HotChocolate/Core/src/Execution/Processing/OperationCompilerPool.cs
{ "start": 461, "end": 828 }
private sealed class ____ : IPooledObjectPolicy<OperationCompiler> { private readonly InputParser _inputParser; public Policy(InputParser inputParser) { _inputParser = inputParser; } public OperationCompiler Create() => new(_inputParser); public bool Return(OperationCompiler obj) => true; } }
Policy
csharp
bitwarden__server
test/Core.Test/Vault/AutoFixture/CipherFixtures.cs
{ "start": 1443, "end": 1606 }
internal class ____ : BitCustomizeAttribute { public override ICustomization GetCustomization() => new OrganizationCipher(); }
OrganizationCipherCustomizeAttribute
csharp
Cysharp__MemoryPack
src/MemoryPack.Unity/Assets/Tests/PerfTest.cs
{ "start": 125, "end": 5847 }
public class ____ { Person person; Primitives primitives; NestCase nestCase; string personJson; string primitivesJson; string nestCaseJson; byte[] personMempack; byte[] primitivesMempack; byte[] nestCaseMempack; public PerfTest() { person = new Person { Age = 888, Name = "aaaaaaaaa" }; primitives = new Primitives() { Bool = false, Byte = 12, Char = 'z', Double = 1231.214, Float = 314.532f, Int = 9999, Long = 99999999, Short = 12, String = "hogemogehugahuga" }; var inner = new Inner { Int = 9999999, String = "hogemoge", Double = 1321.2 }; nestCase = new NestCase { A = inner, B = inner, C = inner, D = inner, E = inner, F = inner, G = inner, H = inner, I = inner, }; personJson = JsonUtility.ToJson(person); primitivesJson = JsonUtility.ToJson(primitives); nestCaseJson = JsonUtility.ToJson(nestCase); personMempack = MemoryPackSerializer.Serialize(person); primitivesMempack = MemoryPackSerializer.Serialize(primitives); nestCaseMempack = MemoryPackSerializer.Serialize(nestCase); Debug.Log(JsonUtility.FromJson<Person>(personJson).Name == person.Name); Debug.Log(JsonUtility.FromJson<Primitives>(primitivesJson).Short == primitives.Short); Debug.Log(JsonUtility.FromJson<NestCase>(nestCaseJson).E.Double == nestCase.E.Double); Debug.Log(MemoryPackSerializer.Deserialize<Person>(personMempack).Name == person.Name); Debug.Log(MemoryPackSerializer.Deserialize<Primitives>(primitivesMempack).Short == primitives.Short); Debug.Log(MemoryPackSerializer.Deserialize<NestCase>(nestCaseMempack).E.Double == nestCase.E.Double); } [Test, Performance] public void Serialize_Person_JsonUtility() { Measure.Method(() => { JsonUtility.ToJson(person); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Serialize_Primitives_JsonUtility() { Measure.Method(() => { JsonUtility.ToJson(primitives); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Serialize_Nestcase_JsonUtility() { Measure.Method(() => { JsonUtility.ToJson(nestCase); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Serialize_Person_MemoryPack() { Measure.Method(() => { MemoryPackSerializer.Serialize(person); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Serialize_Primitives_MemoryPack() { Measure.Method(() => { MemoryPackSerializer.Serialize(primitives); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Serialize_Nestcase_MemoryPack() { Measure.Method(() => { MemoryPackSerializer.Serialize(nestCase); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } // [Test, Performance] public void Deserialize_Person_JsonUtility() { Measure.Method(() => { JsonUtility.FromJson<Person>(personJson); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Deserialize_Primitives_JsonUtility() { Measure.Method(() => { JsonUtility.FromJson<Primitives>(primitivesJson); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Deserialize_Nestcase_JsonUtility() { Measure.Method(() => { JsonUtility.FromJson<NestCase>(nestCaseJson); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Deserialize_Person_MemoryPack() { Measure.Method(() => { MemoryPackSerializer.Deserialize<Person>(personMempack); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Deserialize_Primitives_MemoryPack() { Measure.Method(() => { MemoryPackSerializer.Deserialize<Primitives>(primitivesMempack); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } [Test, Performance] public void Deserialize_Nestcase_MemoryPack() { Measure.Method(() => { MemoryPackSerializer.Deserialize<NestCase>(nestCaseMempack); }) .WarmupCount(10) .IterationsPerMeasurement(10000) .MeasurementCount(10) .Run(); } }
PerfTest
csharp
npgsql__npgsql
src/Npgsql/Internal/ResolverFactories/UnmappedTypeInfoResolverFactory.cs
{ "start": 4113, "end": 6650 }
class ____ : DynamicTypeInfoResolver { protected override DynamicMappingCollection? GetMappings(Type? type, DataTypeName dataTypeName, PgSerializerOptions options) { var matchedType = type; if ((type is not null && type != typeof(object) && !IsTypeOrNullableOfType(type, static type => type.IsConstructedGenericType && type.GetGenericTypeDefinition() == typeof(NpgsqlRange<>), out matchedType)) || options.DatabaseInfo.GetPostgresType(dataTypeName) is not PostgresRangeType rangeType) return null; // Input matchedType here as we don't want an NpgsqlRange over Nullable<T> (it has its own nullability tracking, for better or worse) var subInfo = options.GetTypeInfoInternal( matchedType is null ? null : matchedType == typeof(object) ? matchedType : matchedType.GetGenericArguments()[0], options.ToCanonicalTypeId(rangeType.Subtype.GetRepresentationalType())); // We have no generic RangeConverterResolver so we would not know how to compose a range mapping for such infos. // See https://github.com/npgsql/npgsql/issues/5268 if (subInfo is not { IsResolverInfo: false }) return null; subInfo = subInfo.ToNonBoxing(); var converterType = typeof(NpgsqlRange<>).MakeGenericType(subInfo.Type); return CreateCollection().AddMapping(matchedType ?? converterType, dataTypeName, (options, mapping, _) => new PgTypeInfo( options, (PgConverter)Activator.CreateInstance(typeof(RangeConverter<>).MakeGenericType(subInfo.Type), subInfo.GetResolution().Converter)!, new DataTypeName(mapping.DataTypeName), unboxedType: matchedType is not null && matchedType != converterType ? converterType : null ) { PreferredFormat = subInfo.PreferredFormat, SupportsWriting = subInfo.SupportsWriting }, mapping => mapping with { MatchRequirement = MatchRequirement.DataTypeName }); } } [RequiresUnreferencedCode("The use of unmapped enums, ranges or multiranges requires reflection usage which is incompatible with trimming.")] [RequiresDynamicCode("The use of unmapped enums, ranges or multiranges requires dynamic code usage which is incompatible with NativeAOT.")]
RangeResolver
csharp
NLog__NLog
src/NLog/Internal/SetupExtensionsBuilder.cs
{ "start": 1697, "end": 1949 }
internal sealed class ____ : ISetupExtensionsBuilder { internal SetupExtensionsBuilder(LogFactory logFactory) { LogFactory = logFactory; } public LogFactory LogFactory { get; } } }
SetupExtensionsBuilder
csharp
ShareX__ShareX
ShareX.ImageEffectsLib/WatermarkConfig.cs
{ "start": 1080, "end": 1844 }
public class ____ { public WatermarkType Type = WatermarkType.Text; public ContentAlignment Placement = ContentAlignment.BottomRight; public int Offset = 5; public DrawText Text = new DrawText { DrawTextShadow = false }; public DrawImage Image = new DrawImage(); public Bitmap Apply(Bitmap bmp) { Text.Placement = Image.Placement = Placement; Text.Offset = Image.Offset = new Point(Offset, Offset); switch (Type) { default: case WatermarkType.Text: return Text.Apply(bmp); case WatermarkType.Image: return Image.Apply(bmp); } } } }
WatermarkConfig
csharp
EventStore__EventStore
src/KurrentDB.Testing/Disposables.cs
{ "start": 212, "end": 761 }
public sealed class ____ : IAsyncDisposable { readonly IList<object> _disposables = []; public T RegisterAsync<T>(T item) where T : IAsyncDisposable { _disposables.Add(item); return item; } public T Register<T>(T item) where T : IDisposable { _disposables.Add(item); return item; } public async ValueTask DisposeAsync() { foreach (var disposable in _disposables.Reverse()) { if (disposable is IAsyncDisposable x) { await x.DisposeAsync(); } else if (disposable is IDisposable y) { y.Dispose(); } } } }
Disposables
csharp
MassTransit__MassTransit
tests/MassTransit.RabbitMqTransport.Tests/PublishTimeout_Specs.cs
{ "start": 335, "end": 1714 }
public class ____ : AsyncTestFixture { [Test] public async Task Should_fault_with_operation_cancelled_on_publish() { var busControl = Bus.Factory.CreateUsingRabbitMq(x => { BusTestFixture.ConfigureBusDiagnostics(x); x.Host("unknown_host"); x.AutoStart = true; }); using var startTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(20)); Task<BusHandle> startTask = busControl.StartAsync(startTimeout.Token).OrCanceled(TestCancellationToken); var publishTimer = Stopwatch.StartNew(); Assert.ThrowsAsync<OperationCanceledException>(async () => { using var publishTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(10)); await busControl.Publish(new PingMessage(), publishTimeout.Token); }); publishTimer.Stop(); var publishElapsed = publishTimer.Elapsed; Assert.That(publishElapsed, Is.LessThan(TimeSpan.FromSeconds(19))); Assert.ThrowsAsync<RabbitMqConnectionException>(async () => { await startTask; }); } public PublishTimeout_Specs() : base(new InMemoryTestHarness()) { } } }
PublishTimeout_Specs
csharp
dotnetcore__Util
src/Util.Templates.Handlebars/HandlebarsDotNet/IHandlebarsTemplateEngine.cs
{ "start": 93, "end": 419 }
public interface ____ : ITemplateEngine { /// <summary> /// 设置文本编码器 /// </summary> /// <param name="encoder">文本编码器</param> IHandlebarsTemplateEngine Encoder( ITextEncoder encoder ); /// <summary> /// 设置 Html 编码器 /// </summary> IHandlebarsTemplateEngine HtmlEncoder(); }
IHandlebarsTemplateEngine
csharp
unoplatform__uno
src/Uno.UI/UI/Xaml/Controls/ContentPresenter/INativeElementHostingExtension.cs
{ "start": 100, "end": 591 }
internal interface ____ { #if UNO_SUPPORTS_NATIVEHOST bool IsNativeElement(object content); void AttachNativeElement(object content); void DetachNativeElement(object content); void ArrangeNativeElement(object content, Rect arrangeRect); Size MeasureNativeElement(object content, Size childMeasuredSize, Size availableSize); object CreateSampleComponent(string text); void ChangeNativeElementOpacity(object content, double opacity); #endif } }
INativeElementHostingExtension
csharp
dotnet__orleans
test/Grains/TestGrains/SlowConsumingGrains/SlowConsumingGrain.cs
{ "start": 275, "end": 2040 }
public class ____ : Grain, ISlowConsumingGrain { private readonly ILogger logger; public SlowObserver<int> ConsumerObserver { get; private set; } public StreamSubscriptionHandle<int> ConsumerHandle { get; set; } public SlowConsumingGrain(ILoggerFactory loggerFactory) { this.logger = loggerFactory.CreateLogger($"{this.GetType().Name}-{this.IdentityString}"); } public override Task OnActivateAsync(CancellationToken cancellationToken) { logger.LogInformation("OnActivateAsync"); ConsumerHandle = null; return Task.CompletedTask; } public Task<int> GetNumberConsumed() { return Task.FromResult(this.ConsumerObserver.NumConsumed); } public async Task BecomeConsumer(Guid streamId, string streamNamespace, string providerToUse) { logger.LogInformation("BecomeConsumer"); ConsumerObserver = new SlowObserver<int>(this, logger); IStreamProvider streamProvider = this.GetStreamProvider(providerToUse); var consumer = streamProvider.GetStream<int>(streamNamespace, streamId); ConsumerHandle = await consumer.SubscribeAsync(ConsumerObserver); } public async Task StopConsuming() { logger.LogInformation("StopConsuming"); if (ConsumerHandle != null) { await ConsumerHandle.UnsubscribeAsync(); ConsumerHandle = null; } } } /// <summary> /// SlowObserver keep rewind to the first item it received, to mimic slow consuming behavior /// </summary> /// <typeparam name="T"></typeparam>
SlowConsumingGrain
csharp
FoundatioFx__Foundatio
src/Foundatio/DeepCloner/Helpers/DeepClonerCache.cs
{ "start": 100, "end": 3104 }
internal static class ____ { private static readonly ConcurrentDictionary<Type, object> _typeCache = new(); private static readonly ConcurrentDictionary<Type, object> _typeCacheDeepTo = new(); private static readonly ConcurrentDictionary<Type, object> _typeCacheShallowTo = new(); private static readonly ConcurrentDictionary<Type, object> _structAsObjectCache = new(); private static readonly ConcurrentDictionary<Tuple<Type, Type>, object> _typeConvertCache = new(); public static object GetOrAddClass<T>(Type type, Func<Type, T> adder) { // return _typeCache.GetOrAdd(type, x => adder(x)); // this implementation is slightly faster than getoradd object value; if (_typeCache.TryGetValue(type, out value)) return value; // will lock by type object to ensure only one type generator is generated simultaneously lock (type) { value = _typeCache.GetOrAdd(type, t => adder(t)); } return value; } public static object GetOrAddDeepClassTo<T>(Type type, Func<Type, T> adder) { object value; if (_typeCacheDeepTo.TryGetValue(type, out value)) return value; // will lock by type object to ensure only one type generator is generated simultaneously lock (type) { value = _typeCacheDeepTo.GetOrAdd(type, t => adder(t)); } return value; } public static object GetOrAddShallowClassTo<T>(Type type, Func<Type, T> adder) { object value; if (_typeCacheShallowTo.TryGetValue(type, out value)) return value; // will lock by type object to ensure only one type generator is generated simultaneously lock (type) { value = _typeCacheShallowTo.GetOrAdd(type, t => adder(t)); } return value; } public static object GetOrAddStructAsObject<T>(Type type, Func<Type, T> adder) { // return _typeCache.GetOrAdd(type, x => adder(x)); // this implementation is slightly faster than getoradd object value; if (_structAsObjectCache.TryGetValue(type, out value)) return value; // will lock by type object to ensure only one type generator is generated simultaneously lock (type) { value = _structAsObjectCache.GetOrAdd(type, t => adder(t)); } return value; } public static T GetOrAddConvertor<T>(Type from, Type to, Func<Type, Type, T> adder) { return (T)_typeConvertCache.GetOrAdd(new Tuple<Type, Type>(from, to), (tuple) => adder(tuple.Item1, tuple.Item2)); } /// <summary> /// This method can be used when we switch between safe / unsafe variants (for testing) /// </summary> public static void ClearCache() { _typeCache.Clear(); _typeCacheDeepTo.Clear(); _typeCacheShallowTo.Clear(); _structAsObjectCache.Clear(); _typeConvertCache.Clear(); } }
DeepClonerCache
csharp
files-community__Files
src/Files.App.Controls/Storage/Data/ThicknessCheck.cs
{ "start": 227, "end": 610 }
public enum ____ { /// <summary> /// The ThicknessCheck for when the Value Thickness is thickest. /// </summary> Value, /// <summary> /// The ThicknessCheck for when the Track Thickness is thickest. /// </summary> Track, /// <summary> /// The ThicknessCheck for when the both Value and Track /// Thickness is equal. /// </summary> Equal, } }
ThicknessCheck
csharp
dotnet__maui
src/Controls/src/Build.Tasks/CompiledConverters/CornerRadiusTypeConverter.cs
{ "start": 244, "end": 1818 }
class ____ : ICompiledTypeConverter { public IEnumerable<Instruction> ConvertFromString(string value, ILContext context, BaseNode node) { var module = context.Body.Method.Module; if (!string.IsNullOrEmpty(value)) { double l, tl, tr, bl, br; var cornerradius = value.Split(','); switch (cornerradius.Length) { case 1: if (double.TryParse(cornerradius[0], NumberStyles.Number, CultureInfo.InvariantCulture, out l)) return GenerateIL(context, module, l); break; case 4: if (double.TryParse(cornerradius[0], NumberStyles.Number, CultureInfo.InvariantCulture, out tl) && double.TryParse(cornerradius[1], NumberStyles.Number, CultureInfo.InvariantCulture, out tr) && double.TryParse(cornerradius[2], NumberStyles.Number, CultureInfo.InvariantCulture, out bl) && double.TryParse(cornerradius[3], NumberStyles.Number, CultureInfo.InvariantCulture, out br)) return GenerateIL(context, module, tl, tr, bl, br); break; } } throw new BuildException(BuildExceptionCode.Conversion, node, null, value, typeof(CornerRadius)); } IEnumerable<Instruction> GenerateIL(ILContext context, ModuleDefinition module, params double[] args) { foreach (var d in args) yield return Instruction.Create(OpCodes.Ldc_R8, d); yield return Instruction.Create(OpCodes.Newobj, module.ImportCtorReference(context.Cache, ("Microsoft.Maui", "Microsoft.Maui", "CornerRadius"), parameterTypes: args.Select(a => ("mscorlib", "System", "Double")).ToArray())); } } }
CornerRadiusTypeConverter
csharp
dotnet__orleans
src/Orleans.Serialization/Codecs/IntegerCodec.cs
{ "start": 392, "end": 2562 }
public sealed class ____ : IFieldCodec<bool> { void IFieldCodec<bool>.WriteField<TBufferWriter>(ref Writer<TBufferWriter> writer, uint fieldIdDelta, Type expectedType, bool value) { ReferenceCodec.MarkValueField(writer.Session); writer.WriteFieldHeader(fieldIdDelta, expectedType, typeof(bool), WireType.VarInt); writer.WriteByte(value ? (byte)3 : (byte)1); // writer.WriteVarUInt32(value ? 1U : 0U); } /// <summary> /// Writes a field without type info (expected type is statically known). /// </summary> /// <typeparam name="TBufferWriter">The buffer writer type.</typeparam> /// <param name="writer">The writer.</param> /// <param name="fieldIdDelta">The field identifier delta.</param> /// <param name="value">The value.</param> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static void WriteField<TBufferWriter>(ref Writer<TBufferWriter> writer, uint fieldIdDelta, bool value) where TBufferWriter : IBufferWriter<byte> { ReferenceCodec.MarkValueField(writer.Session); writer.WriteFieldHeaderExpected(fieldIdDelta, WireType.VarInt); writer.WriteByte(value ? (byte)3 : (byte)1); // writer.WriteVarUInt32(value ? 1U : 0U); } /// <inheritdoc/> bool IFieldCodec<bool>.ReadValue<TInput>(ref Reader<TInput> reader, Field field) => ReadValue(ref reader, field); /// <summary> /// Reads a value. /// </summary> /// <typeparam name="TInput">The reader input type.</typeparam> /// <param name="reader">The reader.</param> /// <param name="field">The field.</param> /// <returns>The value.</returns> [MethodImpl(MethodImplOptions.AggressiveInlining)] public static bool ReadValue<TInput>(ref Reader<TInput> reader, Field field) { ReferenceCodec.MarkValueField(reader.Session); return reader.ReadUInt8(field.WireType) != 0; } } /// <summary> /// Serializer for <see cref="char"/>. /// </summary> [RegisterSerializer]
BoolCodec
csharp
ChilliCream__graphql-platform
src/HotChocolate/Core/src/Execution/RequestExecutorManager.cs
{ "start": 19170, "end": 20235 }
private sealed class ____( IRequestExecutor executor, ServiceProvider services, IExecutionDiagnosticEvents diagnosticEvents, RequestExecutorSetup setup, TypeModuleChangeMonitor typeModuleChangeMonitor, TimeSpan evictionTimeout) : IAsyncDisposable { private bool _disposed; public IRequestExecutor Executor { get; } = executor; public ServiceProvider Services { get; } = services; public IExecutionDiagnosticEvents DiagnosticEvents { get; } = diagnosticEvents; public RequestExecutorSetup Setup { get; } = setup; public TypeModuleChangeMonitor TypeModuleChangeMonitor { get; } = typeModuleChangeMonitor; public TimeSpan EvictionTimeout { get; } = evictionTimeout; public async ValueTask DisposeAsync() { if (!_disposed) { await Services.DisposeAsync(); TypeModuleChangeMonitor.Dispose(); _disposed = true; } } }
RegisteredExecutor
csharp
MassTransit__MassTransit
src/MassTransit.Abstractions/Middleware/Middleware/DelegateFilter.cs
{ "start": 123, "end": 746 }
public class ____<TContext> : IFilter<TContext> where TContext : class, PipeContext { readonly Action<TContext> _callback; public DelegateFilter(Action<TContext> callback) { _callback = callback; } void IProbeSite.Probe(ProbeContext context) { context.CreateFilterScope("delegate"); } [DebuggerNonUserCode] [DebuggerStepThrough] public Task Send(TContext context, IPipe<TContext> next) { _callback(context); return next.Send(context); } } }
DelegateFilter
csharp
FastEndpoints__FastEndpoints
Src/Library/Endpoint/Auxiliary/Group.cs
{ "start": 3219, "end": 3527 }
interface ____ { void InitGroup(EndpointDefinition def); } /// <summary> /// generic attribute for designating a group that an endpoint belongs. only effective when attribute based endpoint configuration is being used. /// </summary> /// <typeparam name="TEndpointGroup">the type of the group
IGroupAttribute
csharp
ServiceStack__ServiceStack
ServiceStack/src/ServiceStack.Client/AuthDtos.cs
{ "start": 6403, "end": 6643 }
public class ____ : IPost, IReturn<CancelRequestResponse>, IMeta { [DataMember(Order = 1)] public string Tag { get; set; } [DataMember(Order = 2)] public Dictionary<string, string> Meta { get; set; } } [DataContract]
CancelRequest
csharp
dotnet__efcore
test/EFCore.Specification.Tests/Query/AdHocNavigationsQueryTestBase.cs
{ "start": 24502, "end": 26103 }
public class ____ : PrimarySchool; } #endregion #region 12456 [ConditionalFact] public virtual async Task Let_multiple_references_with_reference_to_outer() { var contextFactory = await InitializeAsync<Context12456>(); using (var context = contextFactory.CreateContext()) { var users = (from a in context.Activities let cs = context.CompetitionSeasons.First(s => s.StartDate <= a.DateTime && a.DateTime < s.EndDate) select new { cs.Id, Points = a.ActivityType.Points.Where(p => p.CompetitionSeason == cs) }).ToList(); } using (var context = contextFactory.CreateContext()) { var users = context.Activities .Select(a => new { Activity = a, CompetitionSeason = context.CompetitionSeasons .First(s => s.StartDate <= a.DateTime && a.DateTime < s.EndDate) }) .Select(a => new { a.Activity, CompetitionSeasonId = a.CompetitionSeason.Id, Points = a.Activity.Points ?? a.Activity.ActivityType.Points .Where(p => p.CompetitionSeason == a.CompetitionSeason) .Select(p => p.Points).SingleOrDefault() }).ToList(); } } // Protected so that it can be used by inheriting tests, and so that things like unused setters are not removed.
ElementarySchool
csharp
icsharpcode__ILSpy
ICSharpCode.ILSpyX/FileLoaders/LoadResult.cs
{ "start": 1258, "end": 1503 }
public sealed class ____ { public MetadataFile? MetadataFile { get; init; } public Exception? FileLoadException { get; init; } public LoadedPackage? Package { get; init; } public bool IsSuccess => FileLoadException == null; }
LoadResult
csharp
dotnet__reactive
Rx.NET/Source/tests/Tests.System.Reactive/Tests/Linq/ObservableRemotingTest.cs
{ "start": 1569, "end": 6038 }
private class ____ : ILease { public TimeSpan CurrentLeaseTime { get { throw new NotImplementedException(); } } public LeaseState CurrentState { get { throw new NotImplementedException(); } } public TimeSpan InitialLeaseTime { get { throw new NotImplementedException(); } set { throw new NotImplementedException(); } } public void Register(ISponsor obj) { throw new NotImplementedException(); } public void Register(ISponsor obj, TimeSpan renewalTime) { throw new NotImplementedException(); } public TimeSpan Renew(TimeSpan renewalTime) { throw new NotImplementedException(); } public TimeSpan RenewOnCallTime { get { throw new NotImplementedException(); } set { throw new NotImplementedException(); } } public TimeSpan SponsorshipTimeout { get { throw new NotImplementedException(); } set { throw new NotImplementedException(); } } public void Unregister(ISponsor obj) { throw new NotImplementedException(); } } [TestMethod] public void Remotable_Empty() { var evt = new ManualResetEvent(false); var e = GetRemoteObservable(t => t.Empty()); using (e.Subscribe(_ => { Assert.True(false); }, _ => { Assert.True(false); }, () => { evt.Set(); })) { evt.WaitOne(); } } [TestMethod] public void Remotable_Return() { var evt = new ManualResetEvent(false); var next = false; var e = GetRemoteObservable(t => t.Return(42)); using (e.Subscribe(value => { next = true; Assert.Equal(42, value); }, _ => { Assert.True(false); }, () => { evt.Set(); })) { evt.WaitOne(); Assert.True(next); } } [TestMethod] public void Remotable_Return_LongLease() { var evt = new ManualResetEvent(false); var next = false; var e = GetRemoteObservable(t => t.ReturnLongLease(42)); using (e.Subscribe(value => { next = true; Assert.Equal(42, value); }, _ => { Assert.True(false); }, () => { evt.Set(); })) { evt.WaitOne(); Assert.True(next); } } [TestMethod] public void Remotable_Throw() { var ex = new InvalidOperationException("Oops!"); var evt = new ManualResetEvent(false); var error = false; var e = GetRemoteObservable(t => t.Throw(ex)); using (e.Subscribe(value => { Assert.True(false); }, err => { error = true; Assert.True(err is InvalidOperationException && err.Message == ex.Message); evt.Set(); }, () => { Assert.True(false); })) { evt.WaitOne(); Assert.True(error); } } [TestMethod] public void Remotable_Disposal() { var test = GetRemoteTestObject(); test.Disposal().Subscribe().Dispose(); Assert.True(test.Disposed); } private IObservable<int> GetRemoteObservable(Func<RemotingTest, IObservable<int>> f) { var test = GetRemoteTestObject(); return f(test); } private RemotingTest GetRemoteTestObject() { var ads = new AppDomainSetup { ApplicationBase = AppDomain.CurrentDomain.BaseDirectory }; var ad = AppDomain.CreateDomain("test", null, ads); var test = (RemotingTest)ad.CreateInstanceAndUnwrap(Assembly.GetExecutingAssembly().FullName, "ReactiveTests.Tests.RemotingTest"); return test; } }
MyLease
csharp
dotnet__BenchmarkDotNet
tests/BenchmarkDotNet.IntegrationTests/ParamSourceTests.cs
{ "start": 6797, "end": 7021 }
public class ____ { public int Data { get; } public TargetType(int data) => Data = data; public override string ToString() => "target " + Data.ToString(); }
TargetType
csharp
dotnet__reactive
Rx.NET/Source/src/System.Reactive/ObserverBase.cs
{ "start": 298, "end": 415 }
class ____ implementations of the <see cref="IObserver{T}"/> interface. /// </summary> /// <remarks>This base
for
csharp
PrismLibrary__Prism
e2e/Maui/PrismMauiDemo/Views/MainPage.xaml.cs
{ "start": 32, "end": 143 }
public partial class ____ : FlyoutPage { public MainPage() { InitializeComponent(); } }
MainPage
csharp
unoplatform__uno
src/Uno.UWP/Generated/3.0.0.0/Windows.UI.Input.Inking/InkDrawingAttributes.cs
{ "start": 298, "end": 11036 }
public partial class ____ { #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public global::Windows.Foundation.Size Size { get { throw new global::System.NotImplementedException("The member Size InkDrawingAttributes.Size is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=Size%20InkDrawingAttributes.Size"); } set { global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.UI.Input.Inking.InkDrawingAttributes", "Size InkDrawingAttributes.Size"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public global::Windows.UI.Input.Inking.PenTipShape PenTip { get { throw new global::System.NotImplementedException("The member PenTipShape InkDrawingAttributes.PenTip is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=PenTipShape%20InkDrawingAttributes.PenTip"); } set { global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.UI.Input.Inking.InkDrawingAttributes", "PenTipShape InkDrawingAttributes.PenTip"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public bool IgnorePressure { get { throw new global::System.NotImplementedException("The member bool InkDrawingAttributes.IgnorePressure is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=bool%20InkDrawingAttributes.IgnorePressure"); } set { global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.UI.Input.Inking.InkDrawingAttributes", "bool InkDrawingAttributes.IgnorePressure"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public bool FitToCurve { get { throw new global::System.NotImplementedException("The member bool InkDrawingAttributes.FitToCurve is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=bool%20InkDrawingAttributes.FitToCurve"); } set { global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.UI.Input.Inking.InkDrawingAttributes", "bool InkDrawingAttributes.FitToCurve"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public global::Windows.UI.Color Color { get { throw new global::System.NotImplementedException("The member Color InkDrawingAttributes.Color is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=Color%20InkDrawingAttributes.Color"); } set { global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.UI.Input.Inking.InkDrawingAttributes", "Color InkDrawingAttributes.Color"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public global::System.Numerics.Matrix3x2 PenTipTransform { get { throw new global::System.NotImplementedException("The member Matrix3x2 InkDrawingAttributes.PenTipTransform is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=Matrix3x2%20InkDrawingAttributes.PenTipTransform"); } set { global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.UI.Input.Inking.InkDrawingAttributes", "Matrix3x2 InkDrawingAttributes.PenTipTransform"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public bool DrawAsHighlighter { get { throw new global::System.NotImplementedException("The member bool InkDrawingAttributes.DrawAsHighlighter is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=bool%20InkDrawingAttributes.DrawAsHighlighter"); } set { global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.UI.Input.Inking.InkDrawingAttributes", "bool InkDrawingAttributes.DrawAsHighlighter"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public global::Windows.UI.Input.Inking.InkDrawingAttributesKind Kind { get { throw new global::System.NotImplementedException("The member InkDrawingAttributesKind InkDrawingAttributes.Kind is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=InkDrawingAttributesKind%20InkDrawingAttributes.Kind"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public global::Windows.UI.Input.Inking.InkDrawingAttributesPencilProperties PencilProperties { get { throw new global::System.NotImplementedException("The member InkDrawingAttributesPencilProperties InkDrawingAttributes.PencilProperties is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=InkDrawingAttributesPencilProperties%20InkDrawingAttributes.PencilProperties"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public bool IgnoreTilt { get { throw new global::System.NotImplementedException("The member bool InkDrawingAttributes.IgnoreTilt is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=bool%20InkDrawingAttributes.IgnoreTilt"); } set { global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.UI.Input.Inking.InkDrawingAttributes", "bool InkDrawingAttributes.IgnoreTilt"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public global::Windows.UI.Input.Inking.InkModelerAttributes ModelerAttributes { get { throw new global::System.NotImplementedException("The member InkModelerAttributes InkDrawingAttributes.ModelerAttributes is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=InkModelerAttributes%20InkDrawingAttributes.ModelerAttributes"); } } #endif #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public InkDrawingAttributes() { global::Windows.Foundation.Metadata.ApiInformation.TryRaiseNotImplemented("Windows.UI.Input.Inking.InkDrawingAttributes", "InkDrawingAttributes.InkDrawingAttributes()"); } #endif // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.InkDrawingAttributes() // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.Color.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.Color.set // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.PenTip.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.PenTip.set // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.Size.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.Size.set // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.IgnorePressure.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.IgnorePressure.set // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.FitToCurve.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.FitToCurve.set // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.PenTipTransform.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.PenTipTransform.set // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.DrawAsHighlighter.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.DrawAsHighlighter.set // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.Kind.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.PencilProperties.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.IgnoreTilt.get // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.IgnoreTilt.set // Forced skipping of method Windows.UI.Input.Inking.InkDrawingAttributes.ModelerAttributes.get #if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__ [global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")] public static global::Windows.UI.Input.Inking.InkDrawingAttributes CreateForPencil() { throw new global::System.NotImplementedException("The member InkDrawingAttributes InkDrawingAttributes.CreateForPencil() is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=InkDrawingAttributes%20InkDrawingAttributes.CreateForPencil%28%29"); } #endif } }
InkDrawingAttributes
csharp
graphql-dotnet__graphql-dotnet
src/GraphQL/Validation/Errors/UniqueArgumentNamesError.cs
{ "start": 130, "end": 700 }
public class ____ : ValidationError { internal const string NUMBER = "5.4.2"; /// <summary> /// Initializes a new instance with the specified properties. /// </summary> public UniqueArgumentNamesError(ValidationContext context, GraphQLArgument node, GraphQLArgument otherNode) : base(context.Document.Source, NUMBER, DuplicateArgMessage(node.Name.StringValue), node, otherNode) { } internal static string DuplicateArgMessage(string argName) => $"There can be only one argument named '{argName}'."; }
UniqueArgumentNamesError
csharp
getsentry__sentry-dotnet
test/Sentry.Serilog.Tests/SentrySinkTests.cs
{ "start": 3790, "end": 12206 }
internal class ____ : IEnumerable<object[]> { public IEnumerator<object[]> GetEnumerator() { yield return new object[] { LogEventLevel.Debug, SentryLevel.Debug }; yield return new object[] { LogEventLevel.Verbose, SentryLevel.Debug }; yield return new object[] { LogEventLevel.Information, SentryLevel.Info }; yield return new object[] { LogEventLevel.Warning, SentryLevel.Warning }; yield return new object[] { LogEventLevel.Error, SentryLevel.Error }; yield return new object[] { LogEventLevel.Fatal, SentryLevel.Fatal }; } IEnumerator IEnumerable.GetEnumerator() => GetEnumerator(); } [Theory] [ClassData(typeof(EventLogLevelsData))] public void Emit_LoggerLevel_Set(LogEventLevel serilogLevel, SentryLevel? sentryLevel) { // Make sure test cases are not filtered out by the default min levels: _fixture.Options.MinimumEventLevel = LogEventLevel.Verbose; _fixture.Options.MinimumBreadcrumbLevel = LogEventLevel.Verbose; var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, serilogLevel, null, MessageTemplate.Empty, Enumerable.Empty<LogEventProperty>()); sut.Emit(evt); _fixture.Hub.Received(1) .CaptureEvent(Arg.Is<SentryEvent>(e => e.Level == sentryLevel)); } [Fact] public void Emit_RenderedMessage_Set() { const string expected = "message"; var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, new MessageTemplateParser().Parse(expected), Enumerable.Empty<LogEventProperty>()); sut.Emit(evt); _fixture.Hub.Received(1) .CaptureEvent(Arg.Is<SentryEvent>(e => e.Message.Formatted == expected)); } [Fact] public void Emit_HubAccessorReturnsNull_DoesNotThrow() { _fixture.HubAccessor = () => null; var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, MessageTemplate.Empty, Enumerable.Empty<LogEventProperty>()); sut.Emit(evt); } [Fact] public void Emit_DisabledHub_CaptureNotCalled() { _fixture.Hub.IsEnabled.Returns(false); var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, MessageTemplate.Empty, Enumerable.Empty<LogEventProperty>()); sut.Emit(evt); _fixture.Hub.DidNotReceive().CaptureEvent(Arg.Any<SentryEvent>()); } [Fact] public void Emit_EnabledHub_CaptureCalled() { _fixture.Hub.IsEnabled.Returns(true); var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, MessageTemplate.Empty, Enumerable.Empty<LogEventProperty>()); sut.Emit(evt); _fixture.Hub.Received(1).CaptureEvent(Arg.Any<SentryEvent>()); } [Fact] public void Emit_Properties_AsExtra() { const string expectedIp = "127.0.0.1"; var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, MessageTemplate.Empty, new[] { new LogEventProperty("IPAddress", new ScalarValue(expectedIp)) }); sut.Emit(evt); _fixture.Hub.Received(1) .CaptureEvent(Arg.Is<SentryEvent>(e => e.Extra["IPAddress"].ToString() == expectedIp)); } [Fact] public void Close_DisposesSdk() { var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, MessageTemplate.Empty, Enumerable.Empty<LogEventProperty>()); sut.Emit(evt); _fixture.SdkDisposeHandle.DidNotReceive().Dispose(); sut.Dispose(); _fixture.SdkDisposeHandle.Received(1).Dispose(); } [Fact] public void Close_NoDisposeHandleProvided_DoesNotThrow() { _fixture.SdkDisposeHandle = null; var sut = _fixture.GetSut(); sut.Dispose(); } [Fact] public void Emit_WithFormat_EventCaptured() { const string expectedMessage = "Test {structured} log"; const int param = 10; var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, new MessageTemplateParser().Parse(expectedMessage), new[] { new LogEventProperty("structured", new ScalarValue(param)) }); sut.Emit(evt); _fixture.Hub.Received(1).CaptureEvent(Arg.Is<SentryEvent>(p => p.Message.Formatted == $"Test {param} log" && p.Message.Message == expectedMessage)); } [Fact] public void Emit_WithTextFormatter_EventCaptured() { const string expectedMessage = "Test log with formatter"; const int param = 10; // Use custom TextFormatter _fixture.Options.TextFormatter = new MessageTemplateTextFormatter("[{structured}] {Message}"); var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, new MessageTemplateParser().Parse(expectedMessage), new[] { new LogEventProperty("structured", new ScalarValue(param)) }); sut.Emit(evt); _fixture.Hub.Received(1).CaptureEvent(Arg.Is<SentryEvent>(p => p.Message.Formatted == $"[{param}] Test log with formatter" && p.Message.Message == expectedMessage)); } [Fact] public void Emit_SourceContextMatchesSentry_NoEventSent() { var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, new MessageTemplateParser().Parse("message"), new[] { new LogEventProperty("SourceContext", new ScalarValue("Sentry.Serilog")) }); sut.Emit(evt); _fixture.Hub.DidNotReceive().CaptureEvent(Arg.Any<SentryEvent>()); } [Fact] public void Emit_SourceContextContainsSentry_NoEventSent() { var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, new MessageTemplateParser().Parse("message"), new[] { new LogEventProperty("SourceContext", new ScalarValue("Sentry")) }); sut.Emit(evt); _fixture.Hub.DidNotReceive().CaptureEvent(Arg.Any<SentryEvent>()); } [Fact] public void Emit_SourceContextMatchesSentry_NoScopeConfigured() { var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, new MessageTemplateParser().Parse("message"), new[] { new LogEventProperty("SourceContext", new ScalarValue("Sentry.Serilog")) }); sut.Emit(evt); _fixture.Hub.DidNotReceive().ConfigureScope(Arg.Any<Action<Scope>>()); } [Fact] public void Emit_SourceContextContainsSentry_NoScopeConfigured() { var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, new MessageTemplateParser().Parse("message"), new[] { new LogEventProperty("SourceContext", new ScalarValue("Sentry")) }); sut.Emit(evt); _fixture.Hub.DidNotReceive().ConfigureScope(Arg.Any<Action<Scope>>()); } [Fact] public void Emit_WithSourceContext_LoggerNameEquals() { var sut = _fixture.GetSut(); const string expectedLogger = "LoggerName"; var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, new MessageTemplateParser().Parse("message"), new[] { new LogEventProperty("SourceContext", new ScalarValue(expectedLogger)) }); sut.Emit(evt); _fixture.Hub.Received(1).CaptureEvent(Arg.Is<SentryEvent>(p => p.Logger == expectedLogger)); } [Fact] public void Emit_NoSourceContext_LoggerNameNull() { var sut = _fixture.GetSut(); var evt = new LogEvent(DateTimeOffset.UtcNow, LogEventLevel.Error, null, new MessageTemplateParser().Parse("message"), new LogEventProperty[0]); sut.Emit(evt); _fixture.Hub.Received(1).CaptureEvent(Arg.Is<SentryEvent>(p => p.Logger == null)); } }
EventLogLevelsData
csharp
abpframework__abp
framework/src/Volo.Abp.Auditing/Volo/Abp/Auditing/IAuditingManager.cs
{ "start": 31, "end": 145 }
public interface ____ { IAuditLogScope? Current { get; } IAuditLogSaveHandle BeginScope(); }
IAuditingManager
csharp
graphql-dotnet__graphql-dotnet
src/GraphQL.Analyzers.Tests/FieldBuilderAnalyzerTests.cs
{ "start": 10463, "end": 11399 }
public class ____ : InputObjectGraphType { public MyGraphType() { {|#0:Field<StringGraphType>( "name" /* a */, // b // c arguments: new QueryArguments( new QueryArgument<StringGraphType> { Name = "argName1" }, new QueryArgument<StringGraphType> { Name = "argName2" } ), // d description: "desc" )|}; // e } } """; // NOTE: line break before 'Description' shouldn't appear, // but I have no idea where it comes from... // At least comments are preserved const string fix = """ using GraphQL.Types; namespace Sample.Server;
MyGraphType
csharp
nunit__nunit
src/NUnitFramework/framework/Attributes/RetryAttribute.cs
{ "start": 1757, "end": 5595 }
public class ____ : DelegatingTestCommand { private readonly int _tryCount; private readonly Type[] _retryExceptions; /// <summary> /// Initializes a new instance of the <see cref="RetryCommand"/> class. /// </summary> /// <param name="innerCommand">The inner command.</param> /// <param name="tryCount">The maximum number of repetitions.</param> public RetryCommand(TestCommand innerCommand, int tryCount) : this(innerCommand, tryCount, Array.Empty<Type>()) { } /// <summary> /// Initializes a new instance of the <see cref="RetryCommand"/> class. /// </summary> /// <param name="innerCommand">The inner command.</param> /// <param name="tryCount">The maximum number of repetitions.</param> /// <param name="retryExceptions">A list of exception types, that trigger a retry when thrown.</param> public RetryCommand(TestCommand innerCommand, int tryCount, Type[] retryExceptions) : base(innerCommand) { _tryCount = tryCount; _retryExceptions = retryExceptions; } /// <summary> /// Runs the test, saving a TestResult in the supplied TestExecutionContext. /// </summary> /// <param name="context">The context in which the test should run.</param> /// <returns>A TestResult</returns> public override TestResult Execute(TestExecutionContext context) { int count = _tryCount; while (count-- > 0) { Exception? caughtException = null; try { context.CurrentResult = innerCommand.Execute(context); } catch (Exception ex) { caughtException = ex.Unwrap(); if (context.CurrentResult is null) context.CurrentResult = context.CurrentTest.MakeTestResult(); } if (caughtException is not null and not ResultStateException) { if (count == 0 || IsRetryException(caughtException) is false) { context.CurrentResult.RecordException(caughtException); break; } } if (context.CurrentResult.ResultState != ResultState.Failure && IsRetryException(context.CurrentResult.RecordedException) is false) { break; } // Clear result for retry if (count > 0) { context.CurrentResult = context.CurrentTest.MakeTestResult(); context.CurrentRepeatCount++; // increment Retry count for next iteration. will only happen if we are guaranteed another iteration } } return context.CurrentResult; } private bool IsRetryException(Exception? ex) { if (ex is null) return false; Type exceptionType = ex.GetType(); foreach (var retryException in _retryExceptions) { if (retryException.IsAssignableFrom(exceptionType)) { return true; } } return false; } } #endregion } }
RetryCommand
csharp
RicoSuter__NJsonSchema
src/NJsonSchema.CodeGeneration.TypeScript/DataConversionGenerator.cs
{ "start": 1142, "end": 12011 }
class ____.</summary> /// <returns>The generated code.</returns> public static string RenderConvertToClassCode(DataConversionParameters parameters) { var model = CreateModel(parameters); var template = parameters.Settings.TemplateFactory.CreateTemplate("TypeScript", "ConvertToClass", model); return template.Render(); } private static object CreateModel(DataConversionParameters parameters) { var type = parameters.Resolver.Resolve(parameters.Schema, parameters.IsPropertyNullable, parameters.TypeNameHint); var valueGenerator = parameters.Settings.ValueGenerator; JsonSchema typeSchema = parameters.Schema.ActualTypeSchema; var dictionaryValueType = typeSchema.AdditionalPropertiesSchema != null ? parameters.Resolver.TryResolve(typeSchema.AdditionalPropertiesSchema, parameters.TypeNameHint) ?? "any" : null; var dictionaryValueDefaultValue = typeSchema.AdditionalPropertiesSchema != null && dictionaryValueType != null ? valueGenerator.GetDefaultValue(typeSchema.AdditionalPropertiesSchema, typeSchema.AdditionalPropertiesSchema.IsNullable(parameters.Settings.SchemaType), dictionaryValueType, parameters.TypeNameHint, parameters.Settings.GenerateDefaultValues, parameters.Resolver) : null; return new { NullValue = parameters.NullValue.ToString().ToLowerInvariant(), Variable = parameters.Variable, Value = parameters.Value, HasDefaultValue = valueGenerator.GetDefaultValue(parameters.Schema, parameters.IsPropertyNullable, type, parameters.TypeNameHint, parameters.Settings.GenerateDefaultValues, parameters.Resolver) != null, DefaultValue = valueGenerator.GetDefaultValue(parameters.Schema, parameters.IsPropertyNullable, type, parameters.TypeNameHint, parameters.Settings.GenerateDefaultValues, parameters.Resolver), Type = type, CheckNewableObject = parameters.CheckNewableObject, IsNewableObject = IsNewableObject(parameters.Schema, parameters), IsDate = IsDate(typeSchema.Format, parameters.Settings.DateTimeType), IsDateTime = IsDateTime(typeSchema.Format, parameters.Settings.DateTimeType), // Dictionary IsDictionary = typeSchema.IsDictionary, DictionaryValueType = dictionaryValueType, DictionaryValueDefaultValue = dictionaryValueDefaultValue, HasDictionaryValueDefaultValue = dictionaryValueDefaultValue != null, IsDictionaryValueNewableObject = typeSchema.AdditionalPropertiesSchema != null && IsNewableObject(typeSchema.AdditionalPropertiesSchema, parameters), IsDictionaryValueDate = IsDate(typeSchema.AdditionalPropertiesSchema?.ActualSchema?.Format, parameters.Settings.DateTimeType), IsDictionaryValueDateTime = IsDateTime(typeSchema.AdditionalPropertiesSchema?.ActualSchema?.Format, parameters.Settings.DateTimeType), IsDictionaryValueNewableArray = typeSchema.AdditionalPropertiesSchema?.ActualSchema?.IsArray == true && typeSchema.AdditionalPropertiesSchema.Item != null && IsNewableObject(typeSchema.AdditionalPropertiesSchema.Item, parameters), DictionaryValueArrayItemType = typeSchema.AdditionalPropertiesSchema?.ActualSchema?.IsArray == true ? parameters.Resolver.TryResolve(typeSchema.AdditionalPropertiesSchema.Item, "Anonymous") ?? "any" : "any", // Array IsArray = typeSchema.IsArray, ArrayItemType = parameters.Resolver.TryResolve(typeSchema.Item, parameters.TypeNameHint) ?? "any", IsArrayItemNewableObject = typeSchema.Item != null && IsNewableObject(typeSchema.Item, parameters), IsArrayItemDate = IsDate(typeSchema.Item?.Format, parameters.Settings.DateTimeType), IsArrayItemDateTime = IsDateTime(typeSchema.Item?.Format, parameters.Settings.DateTimeType), // Dates //StringToDateCode is used for date and date-time formats UseJsDate = parameters.Settings.DateTimeType == TypeScriptDateTimeType.Date, StringToDateCode = GetStringToDateTime(parameters, typeSchema), StringToDateOnlyCode = parameters.Settings.DateTimeType == TypeScriptDateTimeType.Date && parameters.Settings.ConvertDateToLocalTimezone ? "parseDateOnly" : GetStringToDateTime(parameters, typeSchema), DateToStringCode = GetDateToString(parameters, typeSchema), DateTimeToStringCode = GetDateTimeToString(parameters, typeSchema), HandleReferences = parameters.Settings.HandleReferences }; } private static string GetStringToDateTime(DataConversionParameters parameters, JsonSchema typeSchema) { switch (parameters.Settings.DateTimeType) { case TypeScriptDateTimeType.Date: return "new Date"; case TypeScriptDateTimeType.MomentJS: case TypeScriptDateTimeType.OffsetMomentJS: if (typeSchema.Format is JsonFormatStrings.Duration or JsonFormatStrings.TimeSpan) { return "moment.duration"; } if (parameters.Settings.DateTimeType == TypeScriptDateTimeType.OffsetMomentJS) { return "moment.parseZone"; } return "moment"; case TypeScriptDateTimeType.String: return ""; case TypeScriptDateTimeType.Luxon: if (typeSchema.Format is JsonFormatStrings.Duration or JsonFormatStrings.TimeSpan) { return "Duration.fromISO"; } return "DateTime.fromISO"; case TypeScriptDateTimeType.DayJS: return "dayjs"; default: throw new ArgumentOutOfRangeException(nameof(parameters)); } } private static string GetDateToString(DataConversionParameters parameters, JsonSchema typeSchema) { switch (parameters.Settings.DateTimeType) { case TypeScriptDateTimeType.Date: case TypeScriptDateTimeType.String: return ""; case TypeScriptDateTimeType.MomentJS: case TypeScriptDateTimeType.OffsetMomentJS: case TypeScriptDateTimeType.DayJS: return "format('YYYY-MM-DD')"; case TypeScriptDateTimeType.Luxon: return "toFormat('yyyy-MM-dd')"; default: throw new ArgumentOutOfRangeException(nameof(parameters)); } } private static string GetDateTimeToString(DataConversionParameters parameters, JsonSchema typeSchema) { switch (parameters.Settings.DateTimeType) { case TypeScriptDateTimeType.Date: return "toISOString()"; case TypeScriptDateTimeType.MomentJS: case TypeScriptDateTimeType.OffsetMomentJS: if (typeSchema.Format is JsonFormatStrings.Duration or JsonFormatStrings.TimeSpan) { return "format('d.hh:mm:ss.SS', { trim: false })"; } if (parameters.Settings.DateTimeType == TypeScriptDateTimeType.OffsetMomentJS) { return "toISOString(true)"; } return "toISOString()"; case TypeScriptDateTimeType.String: return ""; case TypeScriptDateTimeType.Luxon: return "toString()"; case TypeScriptDateTimeType.DayJS: if (typeSchema.Format is JsonFormatStrings.Duration or JsonFormatStrings.TimeSpan) { return "format('d.hh:mm:ss.SSS')"; } return "toISOString()"; default: throw new ArgumentOutOfRangeException(nameof(parameters)); } } private static bool IsDateTime(string? format, TypeScriptDateTimeType type) { // TODO: Make this more generic (see TypeScriptTypeResolver.ResolveString) if (type == TypeScriptDateTimeType.Date) { return format == JsonFormatStrings.DateTime; } if (type is TypeScriptDateTimeType.DayJS or TypeScriptDateTimeType.MomentJS or TypeScriptDateTimeType.OffsetMomentJS) { return format is JsonFormatStrings.DateTime or JsonFormatStrings.Time or JsonFormatStrings.Duration or JsonFormatStrings.TimeSpan; } if (type == TypeScriptDateTimeType.Luxon) { return format is JsonFormatStrings.DateTime or JsonFormatStrings.Time or JsonFormatStrings.Duration or JsonFormatStrings.TimeSpan; } return false; } private static bool IsDate(string? format, TypeScriptDateTimeType type) { // TODO: Make this more generic (see TypeScriptTypeResolver.ResolveString) if (type == TypeScriptDateTimeType.Date) { return format == JsonFormatStrings.Date; } if (type is TypeScriptDateTimeType.DayJS or TypeScriptDateTimeType.MomentJS or TypeScriptDateTimeType.OffsetMomentJS) { return format == JsonFormatStrings.Date; } if (type == TypeScriptDateTimeType.Luxon) { return format == JsonFormatStrings.Date; } return false; } private static bool IsNewableObject(JsonSchema? schema, DataConversionParameters parameters) { if (schema != null) { if (schema.ActualTypeSchema.IsEnumeration) { return false; } return parameters.Resolver.GeneratesType(schema); } return false; } } }
instances
csharp
EventStore__EventStore
src/KurrentDB.Transport.Http/EntityManagement/HttpEntityManagerExtensions.cs
{ "start": 315, "end": 2096 }
public static class ____ { public static void ReplyStatus(this HttpEntityManager self, int code, string description, Action<Exception> onError, IEnumerable<KeyValuePair<string, string>> headers = null) { self.Reply(null, code, description, null, null, null, onError); } public static void ReplyContent(this HttpEntityManager self, byte[] response, int code, string description, string type, IEnumerable<KeyValuePair<string, string>> headers, Action<Exception> onError) { self.Reply(response, code, description, type, null, headers, onError); } public static void ReplyTextContent(this HttpEntityManager self, string response, int code, string description, string type, IEnumerable<KeyValuePair<string, string>> headers, Action<Exception> onError) { //TODO: add encoding header??? self.Reply(Helper.UTF8NoBom.GetBytes(response ?? string.Empty), code, description, type, Helper.UTF8NoBom, headers, onError); } public static void ContinueReplyTextContent( this HttpEntityManager self, string response, Action<Exception> onError, Action completed) { //TODO: add encoding header??? var bytes = Helper.UTF8NoBom.GetBytes(response ?? string.Empty); self.ContinueReply(bytes, onError, completed); } public static void ReadTextRequestAsync( this HttpEntityManager self, Action<HttpEntityManager, string> onSuccess, Action<Exception> onError) { self.ReadRequestAsync( (manager, bytes) => { int offset = 0; // check for UTF-8 BOM (0xEF, 0xBB, 0xBF) and skip it safely, if any if (bytes.Length >= 3 && bytes[0] == 0xEF && bytes[1] == 0xBB && bytes[2] == 0xBF) offset = 3; onSuccess(manager, Helper.UTF8NoBom.GetString(bytes, offset, bytes.Length - offset)); }, onError); } }
HttpEntityManagerExtensions
csharp
mongodb__mongo-csharp-driver
src/MongoDB.Driver/Core/Servers/DefaultServer.cs
{ "start": 1041, "end": 13197 }
internal sealed class ____ : Server { #region static // static fields private static readonly List<Type> __invalidatingExceptions = new List<Type> { typeof(MongoConnectionException), typeof(SocketException), typeof(EndOfStreamException), typeof(IOException), }; #endregion private readonly ServerDescription _baseDescription; private volatile ServerDescription _currentDescription; private readonly IServerMonitor _monitor; public DefaultServer( ClusterId clusterId, IClusterClock clusterClock, bool directConnection, ServerSettings settings, EndPoint endPoint, IConnectionPoolFactory connectionPoolFactory, IServerMonitorFactory monitorFactory, ServerApi serverApi, EventLogger<LogCategories.SDAM> eventLogger) : base( clusterId, clusterClock, directConnection, settings, endPoint, connectionPoolFactory, serverApi, eventLogger) { _monitor = Ensure.IsNotNull(monitorFactory, nameof(monitorFactory)).Create(ServerId, endPoint); _baseDescription = _currentDescription = new ServerDescription(ServerId, endPoint, reasonChanged: "ServerInitialDescription", heartbeatInterval: settings.HeartbeatInterval); } // properties public override ServerDescription Description => _currentDescription; // public methods public override void RequestHeartbeat() { _monitor.RequestHeartbeat(); } // protected methods protected override void Dispose(bool disposing) { _monitor.Dispose(); _monitor.DescriptionChanged -= OnMonitorDescriptionChanged; } protected override void HandleBeforeHandshakeCompletesException(Exception ex) { if (ex is not MongoConnectionException connectionException) { // non connection exception return; } var (invalidateAndClear, cancelCheck) = ex switch { MongoAuthenticationException => (invalidateAndClear: true, cancelCheck: false), _ => (invalidateAndClear: connectionException.IsNetworkException || connectionException.ContainsTimeoutException, cancelCheck: connectionException.IsNetworkException && !connectionException.ContainsTimeoutException) }; if (invalidateAndClear) { lock (_monitor.Lock) { if (connectionException.Generation != null && connectionException.Generation != ConnectionPool.Generation) { // stale generation number return; } if (cancelCheck) { _monitor.CancelCurrentCheck(); } Invalidate($"ChannelException during handshake: {ex}.", clearConnectionPool: true, topologyVersion: null); } } } protected override void HandleAfterHandshakeCompletesException(IConnection connection, Exception ex) { lock (_monitor.Lock) { if (ex is MongoConnectionException mongoConnectionException) { if (mongoConnectionException.Generation != null && mongoConnectionException.Generation != ConnectionPool.Generation) { return; // stale generation number } if (mongoConnectionException.IsNetworkException && !mongoConnectionException.ContainsTimeoutException) { _monitor.CancelCurrentCheck(); } } var description = _currentDescription; if (ShouldInvalidateServer(connection, ex, description, out TopologyVersion responseTopologyVersion)) { var shouldClearConnectionPool = ShouldClearConnectionPoolForChannelException(ex, connection.Description.MaxWireVersion); Invalidate($"ChannelException:{ex}", shouldClearConnectionPool, responseTopologyVersion); } else { RequestHeartbeat(); } } } protected override void InitializeSubClass() { _monitor.DescriptionChanged += OnMonitorDescriptionChanged; _monitor.Initialize(); } protected override void Invalidate(string reasonInvalidated, bool clearConnectionPool, TopologyVersion topologyVersion) { var newDescription = _baseDescription.With( $"InvalidatedBecause:{reasonInvalidated}", lastUpdateTimestamp: DateTime.UtcNow, topologyVersion: topologyVersion); var (host, port) = ServerId.EndPoint.GetHostAndPort(); EventLogger.Logger?.LogDebug( StructuredLogTemplateProviders.ServerId_Message_Description, ServerId.ClusterId.Value, host, port, newDescription, "Invalidating description"); SetDescription(newDescription, clearConnectionPool); // TODO: make the heartbeat request conditional so we adhere to this part of the spec // > Network error when reading or writing: ... Clients MUST NOT request an immediate check of the server; // > since application sockets are used frequently, a network error likely means the server has just become // > unavailable, so an immediate refresh is likely to get a network error, too. RequestHeartbeat(); } // private methods private void OnMonitorDescriptionChanged(object sender, ServerDescriptionChangedEventArgs e) { var currentDescription = _currentDescription; var heartbeatException = e.NewServerDescription.HeartbeatException; // The heartbeat command is hello (or legacy hello). This command will throw a MongoCommandException on // {ok: 0}, but a reply (with a potential topologyVersion) will still have been received. // Not receiving a reply to the heartbeat commands implies a network error or a "HeartbeatFailed" type // exception (i.e. ServerDescription.WithHeartbeatException was called), in which case we should immediately // set the description to "Unknown"// (which is what e.NewServerDescription will be in such a case) var heartbeatReplyNotReceived = heartbeatException != null && !(heartbeatException is MongoCommandException); // We cannot use FresherThan(e.NewServerDescription.TopologyVersion, currentDescription.TopologyVersion) // because due to how TopologyVersions comparisons are defined, IsStalerThanOrEqualTo(x, y) does not imply // FresherThan(y, x) if (heartbeatReplyNotReceived || TopologyVersion.IsStalerThanOrEqualTo(currentDescription.TopologyVersion, e.NewServerDescription.TopologyVersion)) { SetDescription(e.NewServerDescription, forceClearConnectionPool: false); } } private void SetDescription(ServerDescription newDescription, bool forceClearConnectionPool) { // Current assumption is SetDescription is always synchronized under _monitor.Lock. // This synchronization technically can be violated by calling server.Invalidate not under _monitor.Lock. // Therefore _currentDescription and ConnectionPool state can get out of sync. var serverDescriptionChangedEvent = new ServerDescriptionChangedEventArgs(_currentDescription, newDescription); _currentDescription = newDescription; if (newDescription.HeartbeatException != null || forceClearConnectionPool) { // propagate event to upper levels TriggerServerDescriptionChanged(this, serverDescriptionChangedEvent); // pool must be cleared on after cluster update var closeInUseConnections = newDescription.HeartbeatException is MongoConnectionException mongoConnectionException && mongoConnectionException.ContainsTimeoutException; ConnectionPool.Clear(closeInUseConnections); } else { if (newDescription.IsDataBearing || (newDescription.Type != ServerType.Unknown && DirectConnection)) { // The spec requires to check (server.type != Unknown and newTopologyDescription.type == Single) // in C# driver servers in single topology will be only selectable if direct connection was requested // therefore it is sufficient to check whether the connection mode is directConnection. ConnectionPool.SetReady(); } // propagate event to upper levels TriggerServerDescriptionChanged(this, serverDescriptionChangedEvent); } } private bool ShouldInvalidateServer( IConnection connection, Exception exception, ServerDescription description, out TopologyVersion invalidatingResponseTopologyVersion) { if (exception is MongoConnectionException mongoConnectionException && mongoConnectionException.ContainsTimeoutException) { invalidatingResponseTopologyVersion = null; return false; } if (__invalidatingExceptions.Contains(exception.GetType())) { invalidatingResponseTopologyVersion = null; return true; } var exceptionsToCheck = new[] { exception as MongoCommandException, (exception as MongoWriteConcernException)?.MappedWriteConcernResultException } .OfType<MongoCommandException>(); foreach (MongoCommandException commandException in exceptionsToCheck) { if (IsStateChangeException(commandException)) { return !IsStaleStateChangeError(commandException.Result, out invalidatingResponseTopologyVersion); } } invalidatingResponseTopologyVersion = null; return false; bool IsStaleStateChangeError(BsonDocument response, out TopologyVersion nonStaleResponseTopologyVersion) { if (ConnectionPool.Generation > connection.Generation) { // stale generation number nonStaleResponseTopologyVersion = null; return true; } var responseTopologyVersion = TopologyVersion.FromMongoCommandResponse(response); // We use FresherThanOrEqualTo instead of FresherThan because a state change should come with a new // topology version. // We cannot use StalerThan(responseTopologyVersion, description.TopologyVersion) because due to how // TopologyVersions comparisons are defined, FresherThanOrEqualTo(x, y) does not imply StalerThan(y, x) bool isStale = TopologyVersion.IsFresherThanOrEqualTo(description.TopologyVersion, responseTopologyVersion); nonStaleResponseTopologyVersion = isStale ? null : responseTopologyVersion; return isStale; } } } }
DefaultServer
csharp
MassTransit__MassTransit
src/MassTransit/RetryPolicies/NoRetryContext.cs
{ "start": 91, "end": 631 }
public class ____<TContext> : BaseRetryContext<TContext>, RetryContext<TContext> where TContext : class, PipeContext { public NoRetryContext(TContext context, Exception exception, CancellationToken cancellationToken) : base(context, exception, 0, cancellationToken) { } bool RetryContext<TContext>.CanRetry(Exception exception, out RetryContext<TContext> retryContext) { retryContext = this; return false; } } }
NoRetryContext
csharp
CommunityToolkit__Maui
samples/CommunityToolkit.Maui.Sample/ViewModels/Behaviors/CharactersValidationBehaviorViewModel.cs
{ "start": 103, "end": 282 }
public partial class ____ : BaseViewModel { public IReadOnlyList<CharacterType> CharacterTypes { get; } = [.. Enum.GetValues<CharacterType>()]; }
CharactersValidationBehaviorViewModel
csharp
unoplatform__uno
src/Uno.UWP/Devices/Enumeration/DeviceInformation.static.cs
{ "start": 205, "end": 776 }
public partial class ____ { #if !__ANDROID__ && !__WASM__ && !__IOS__ private static readonly Dictionary<Guid, Func<IDeviceClassProvider>> _deviceClassProviders = new Dictionary<Guid, Func<IDeviceClassProvider>>(); #endif /// <summary> /// Creates a DeviceWatcher for devices matching /// the specified Advanced Query Syntax (AQS) string. /// </summary> /// <param name="aqsFilter">An AQS string that filters /// the DeviceInformation objects to enumerate. Typically /// this string is retrieved from the GetDeviceSelector /// method of a
DeviceInformation
csharp
abpframework__abp
framework/src/Volo.Abp.Cli.Core/Volo/Abp/Cli/ProjectBuilding/Templates/MicroserviceServiceStringEncryptionStep.cs
{ "start": 148, "end": 1735 }
public class ____ : RandomizeStringEncryptionStep { protected override string GetRandomPassPhrase(ProjectBuildContext context) { return FindDefaultPassPhrase(context) ?? base.GetRandomPassPhrase(context); } protected virtual string FindDefaultPassPhrase(ProjectBuildContext context) { var directoryInfo = new DirectoryInfo(context.BuildArgs.OutputFolder); do { var msSolution = Directory.GetFiles(directoryInfo.FullName, "*.sln", SearchOption.TopDirectoryOnly) .Concat(Directory.GetFiles(directoryInfo.FullName, "*.slnx", SearchOption.TopDirectoryOnly)).FirstOrDefault(); if (msSolution != null) { var appSettings = Directory.GetFiles(Path.Combine(directoryInfo.FullName, "apps", "auth-server"), "appsettings.json", SearchOption.AllDirectories).FirstOrDefault(); if (appSettings != null) { var file = File.ReadAllText(appSettings); const string searchText = "DefaultPassPhrase\": \""; var s = file.IndexOf(searchText, StringComparison.Ordinal) + searchText.Length; var e = file.IndexOf("\"", s, StringComparison.Ordinal); var defaultPassPhrase = file.Substring(s, e - s); return defaultPassPhrase; } } directoryInfo = directoryInfo.Parent; } while (directoryInfo?.Parent != null); return null; } }
MicroserviceServiceStringEncryptionStep
csharp
mongodb__mongo-csharp-driver
tests/MongoDB.Bson.Tests/Serialization/Serializers/NetPrimitiveSerializerTests.cs
{ "start": 14240, "end": 18943 }
public class ____ { [BsonRepresentation(BsonType.Binary)] public byte B; [BsonRepresentation(BsonType.Int32)] public byte I; [BsonRepresentation(BsonType.Int64)] public byte L; [BsonRepresentation(BsonType.String)] public byte S; } [Fact] public void TestMin() { var obj = new TestClass { B = byte.MinValue, I = byte.MinValue, L = byte.MinValue, S = byte.MinValue }; var json = obj.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }); var expected = "{ 'B' : new BinData(0, 'AA=='), 'I' : 0, 'L' : NumberLong(0), 'S' : '00' }".Replace("'", "\""); Assert.Equal(expected, json); var bson = obj.ToBson(); var rehydrated = BsonSerializer.Deserialize<TestClass>(bson); Assert.True(bson.SequenceEqual(rehydrated.ToBson())); } [Fact] public void TestZero() { var obj = new TestClass { B = 0, I = 0, L = 0, S = 0 }; var json = obj.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }); var expected = "{ 'B' : new BinData(0, 'AA=='), 'I' : 0, 'L' : NumberLong(0), 'S' : '00' }".Replace("'", "\""); Assert.Equal(expected, json); var bson = obj.ToBson(); var rehydrated = BsonSerializer.Deserialize<TestClass>(bson); Assert.True(bson.SequenceEqual(rehydrated.ToBson())); } [Fact] public void TestOne() { var obj = new TestClass { B = 1, I = 1, L = 1, S = 1 }; var json = obj.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }); var expected = "{ 'B' : new BinData(0, 'AQ=='), 'I' : 1, 'L' : NumberLong(1), 'S' : '01' }".Replace("'", "\""); Assert.Equal(expected, json); var bson = obj.ToBson(); var rehydrated = BsonSerializer.Deserialize<TestClass>(bson); Assert.True(bson.SequenceEqual(rehydrated.ToBson())); } [Fact] public void TestMax() { var obj = new TestClass { B = byte.MaxValue, I = byte.MaxValue, L = byte.MaxValue, S = byte.MaxValue }; var json = obj.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }); var expected = "{ 'B' : new BinData(0, '/w=='), 'I' : 255, 'L' : NumberLong(255), 'S' : 'ff' }".Replace("'", "\""); Assert.Equal(expected, json); var bson = obj.ToBson(); var rehydrated = BsonSerializer.Deserialize<TestClass>(bson); Assert.True(bson.SequenceEqual(rehydrated.ToBson())); } [Fact] public void Equals_null_should_return_false() { var x = new ByteSerializer(BsonType.Int32); var result = x.Equals(null); result.Should().Be(false); } [Fact] public void Equals_object_should_return_false() { var x = new ByteSerializer(BsonType.Int32); var y = new object(); var result = x.Equals(y); result.Should().Be(false); } [Fact] public void Equals_self_should_return_true() { var x = new ByteSerializer(BsonType.Int32); var result = x.Equals(x); result.Should().Be(true); } [Fact] public void Equals_with_equal_fields_should_return_true() { var x = new ByteSerializer(BsonType.Int32); var y = new ByteSerializer(BsonType.Int32); var result = x.Equals(y); result.Should().Be(true); } [Fact] public void Equals_with_not_equal_field_should_return_false() { var x = new ByteSerializer(BsonType.Int32); var y = new ByteSerializer(BsonType.String); var result = x.Equals(y); result.Should().Be(false); } [Fact] public void GetHashCode_should_return_zero() { var x = new ByteSerializer(BsonType.Int32); var result = x.GetHashCode(); result.Should().Be(0); } }
TestClass
csharp
ChilliCream__graphql-platform
src/HotChocolate/Primitives/src/Primitives/Types/IWrapperType.cs
{ "start": 149, "end": 304 }
public interface ____ : IOutputType, IInputType { /// <summary> /// Gets the inner type. /// </summary> IType InnerType { get; } }
IWrapperType
csharp
open-telemetry__opentelemetry-dotnet
src/Shared/Shims/NullableAttributes.cs
{ "start": 2421, "end": 2929 }
internal sealed class ____ : Attribute { public MemberNotNullWhenAttribute(bool returnValue, string member) { ReturnValue = returnValue; Members = [member]; } public MemberNotNullWhenAttribute(bool returnValue, params string[] members) { ReturnValue = returnValue; Members = members; } public bool ReturnValue { get; } public string[] Members { get; } } } #endif
MemberNotNullWhenAttribute
csharp
ServiceStack__ServiceStack
ServiceStack/src/ServiceStack.Razor/Html/TemplateProvider.cs
{ "start": 247, "end": 4384 }
public class ____ { public int? CompileInParallelWithNoOfThreads { get; set; } private static readonly ILog Log = LogManager.GetLogger(typeof(TemplateProvider)); AutoResetEvent waiter = new AutoResetEvent(false); readonly string defaultTemplateName; public TemplateProvider(string defaultTemplateName) { this.defaultTemplateName = defaultTemplateName; } readonly Dictionary<string, IVirtualFile> templatePathsFound = new Dictionary<string, IVirtualFile>(StringComparer.OrdinalIgnoreCase); readonly HashSet<string> templatePathsNotFound = new HashSet<string>(StringComparer.OrdinalIgnoreCase); public string GetTemplatePath(IVirtualDirectory fileDir) { try { if (templatePathsNotFound.Contains(fileDir.VirtualPath)) return null; var templateDir = fileDir; IVirtualFile templateFile; while (templateDir != null && templateDir.GetFile(defaultTemplateName) == null) { var tmplPath = templateDir.VirtualPath; if (tmplPath != null && templatePathsFound.TryGetValue(tmplPath, out templateFile)) return templateFile.RealPath; templateDir = templateDir.ParentDirectory; } if (templateDir != null) { templateFile = templateDir.GetFile(defaultTemplateName); templatePathsFound[templateDir.VirtualPath] = templateFile; return templateFile.VirtualPath; } templatePathsNotFound.Add(fileDir.VirtualPath); return null; } catch (Exception ex) { ex.Message.Print(); throw; } } private readonly ConcurrentQueue<IViewPage> compilePages = new ConcurrentQueue<IViewPage>(); private readonly ConcurrentQueue<IViewPage> priorityCompilePages = new ConcurrentQueue<IViewPage>(); public void QueuePageToCompile(IViewPage pageToCompile) { waiter.Reset(); compilePages.Enqueue(pageToCompile); } private int runningThreads; public void CompileQueuedPages() { var compileInParallel = CompileInParallelWithNoOfThreads > 0; Log.Info($"Starting to compile {compilePages.Count}/{priorityCompilePages.Count} pages, " + $"{(compileInParallel ? "In Parallel" : "Sequentially")}"); #if !NETCORE if (compileInParallel) { var threadsToRun = Math.Min(CompileInParallelWithNoOfThreads.GetValueOrDefault(), compilePages.Count); if (threadsToRun <= runningThreads) return; Log.Info($"Starting {threadsToRun} threads.."); threadsToRun.Times(x => { ThreadPool.QueueUserWorkItem(waitHandle => { try { CompileAllPages(); } catch { } }); }); } else { CompileAllPages(); } #else CompileAllPages(); #endif } private void CompileAllPages() { try { Interlocked.Increment(ref runningThreads); while (!compilePages.IsEmpty || !priorityCompilePages.IsEmpty) { IViewPage viewPage; if (compilePages.TryDequeue(out viewPage)) { viewPage.Compile(); } } } finally { Interlocked.Decrement(ref runningThreads); Log.Info($"Compilation threads remaining {runningThreads}..."); waiter.Set(); } } public void EnsureAllCompiled() { if (compilePages.IsEmpty && priorityCompilePages.IsEmpty) return; waiter.WaitOne(60 * 1000); } } }
TemplateProvider
csharp
dotnet__efcore
test/EFCore.Specification.Tests/ModelBuilding/GiantModel.cs
{ "start": 307137, "end": 307357 }
public class ____ { public int Id { get; set; } public RelatedEntity1410 ParentEntity { get; set; } public IEnumerable<RelatedEntity1412> ChildEntities { get; set; } }
RelatedEntity1411
csharp
duplicati__duplicati
Tools/RemoteSynchronization/Program.cs
{ "start": 1777, "end": 40070 }
private sealed record ____ ( // Arguments string Src, string Dst, // Options bool AutoCreateFolders, int BackendRetries, int BackendRetryDelay, bool BackendRetryWithExponentialBackoff, bool Confirm, bool DryRun, List<string> DstOptions, bool Force, List<string> GlobalOptions, string LogFile, string LogLevel, bool ParseArgumentsOnly, bool Progress, bool Retention, int Retry, List<string> SrcOptions, bool VerifyContents, bool VerifyGetAfterPut ); /// <summary> /// The log tag for this tool. /// </summary> private static readonly string LOGTAG = Duplicati.Library.Logging.Log.LogTagFromType<Program>(); /// <summary> /// Main entry point for the tool. /// </summary> /// <param name="args">The commandline arguments</param> /// <returns>0 on success, -1 on abort, and the number of errors encountered otherwise.</returns> public static async Task<int> Main(string[] args) { var arg_src = new Argument<string>(name: "backend_src", description: "The source backend string"); var arg_dst = new Argument<string>(name: "backend_dst", description: "The destination backend string"); var root_cmd = new RootCommand(@"Remote Synchronization Tool This tool synchronizes two remote backends. The tool assumes that the intent is to have the destination match the source. If the destination has files that are not in the source, they will be deleted (or renamed if the retention option is set). If the destination has files that are also present in the source, but the files differ in size, or if the source files have a newer (more recent) timestamp, the destination files will be overwritten by the source files. Given that some backends do not allow for metadata or timestamp modification, and that the tool is run after backup, the destination files should always have a timestamp that is newer (or the same if run promptly) compared to the source files. If the force option is set, the destination will be overwritten by the source, regardless of the state of the files. It will also skip the initial comparison, and delete (or rename) all files in the destination. If the verify option is set, the files will be downloaded and compared after uploading to ensure that the files are correct. Files that already exist in the destination will be verified before being overwritten (if they seemingly match). ") { arg_src, arg_dst, new Option<bool>(aliases: ["--auto-create-folders"], description: "Automatically create folders in the destination backend if they do not exist", getDefaultValue: () => true), new Option<int>(aliases: ["--backend-retries"], description: "Number of times to recreate a backend on backend errors", getDefaultValue: () => 3) { Arity = ArgumentArity.ExactlyOne }, new Option<int>(aliases: ["--backend-retry-delay"], description: "Delay in milliseconds between backend retries", getDefaultValue: () => 1000) { Arity = ArgumentArity.ExactlyOne }, new Option<bool>(aliases: ["--backend-retry-with-exponential-backoff"], description: "Use exponential backoff for backend retries, multiplying the delay by two for each failure.", getDefaultValue: () => true), new Option<bool>(aliases: ["--confirm", "--yes", "-y"], description: "Automatically confirm the operation", getDefaultValue: () => false), new Option<bool>(aliases: ["--dry-run", "-d"], description: "Do not actually write or delete files. If not set here, the global options will be checked", getDefaultValue: () => false), OptionWithMultipleTokens(aliases: ["--dst-options"], description: "Options for the destination backend. Each option is a key-value pair separated by an equals sign, e.g. --dst-options key1=value1 key2=value2 [default: empty]", getDefaultValue: () => []), new Option<bool>(aliases: ["--force", "-f"], description: "Force the synchronization", getDefaultValue: () => false), OptionWithMultipleTokens(aliases: ["--global-options"], description: "Global options all backends. May be overridden by backend specific options (src-options, dst-options). Each option is a key-value pair separated by an equals sign, e.g. --global-options key1=value1 key2=value2 [default: empty]", getDefaultValue: () => []), new Option<string>(aliases: ["--log-file"], description: "The log file to write to. If not set here, global options will be checked [default: \"\"]", getDefaultValue: () => "") { Arity = ArgumentArity.ExactlyOne }, new Option<string>(aliases: ["--log-level"], description: "The log level to use. If not set here, global options will be checked", getDefaultValue: () => "Information") { Arity = ArgumentArity.ExactlyOne }, new Option<bool>(aliases: ["--parse-arguments-only"], description: "Only parse the arguments and then exit", getDefaultValue: () => false), new Option<bool>(aliases: ["--progress"], description: "Print progress to STDOUT", getDefaultValue: () => false), new Option<bool>(aliases: ["--retention"], description: "Toggles whether to keep old files. Any deletes will be renames instead", getDefaultValue: () => false), new Option<int>(aliases: ["--retry"], description: "Number of times to retry on errors", getDefaultValue: () => 3) { Arity = ArgumentArity.ExactlyOne }, OptionWithMultipleTokens(aliases: ["--src-options"], description: "Options for the source backend. Each option is a key-value pair separated by an equals sign, e.g. --src-options key1=value1 key2=value2 [default: empty]", getDefaultValue: () => []), new Option<bool>(aliases: ["--verify-contents"], description: "Verify the contents of the files to decide whether the pre-existing destination files should be overwritten", getDefaultValue: () => false), new Option<bool>(aliases: ["--verify-get-after-put"], description: "Verify the files after uploading them to ensure that they were uploaded correctly", getDefaultValue: () => false), }; root_cmd.Handler = CommandHandler.Create((string backend_src, string backend_dst, Config config, CancellationToken token) => { var config_with_args = config with { Dst = backend_dst, Src = backend_src }; return Run(config_with_args, token); }); return await root_cmd.InvokeAsync(args).ConfigureAwait(false); } /// <summary> /// The main logic of the tool. /// </summary> /// <param name="config">The parsed configuration for the tool.</param> /// <param name="token">The cancellation token to use for the asynchronous operations.</param> /// <returns>The return code for the main entry; 0 on success.</returns> private static async Task<int> Run(Config config, CancellationToken token) { // Unpack and parse the multi token options var global_options = ParseOptions(config.GlobalOptions); // Parse the log level var log_level_parsed = Enum.TryParse<Duplicati.Library.Logging.LogMessageType>(config.LogLevel, true, out var log_level_enum); log_level_enum = log_level_parsed ? log_level_enum : Duplicati.Library.Logging.LogMessageType.Information; using var console_sink = new Duplicati.CommandLine.ConsoleOutput(Console.Out, global_options); using var multi_sink = new Duplicati.Library.Main.ControllerMultiLogTarget(console_sink, log_level_enum, null, null); // Parse the log file // The log file sink doesn't have to be disposed, as the multi_sink will take care of it Duplicati.Library.Logging.StreamLogDestination? log_file_sink = null; if (!string.IsNullOrEmpty(config.LogFile)) { string log_file_dir = SystemIO.IO_OS.PathGetDirectoryName(config.LogFile); if (!string.IsNullOrEmpty(log_file_dir) && !SystemIO.IO_OS.DirectoryExists(log_file_dir)) SystemIO.IO_OS.DirectoryCreate(log_file_dir); log_file_sink = new Duplicati.Library.Logging.StreamLogDestination(config.LogFile); } multi_sink.AddTarget(log_file_sink, log_level_enum, null); // Start the logging scope using var _ = Duplicati.Library.Logging.Log.StartScope(multi_sink, log_level_enum); var src_opts = ParseOptions(config.SrcOptions); var dst_opts = ParseOptions(config.DstOptions); // Merge the global options into the source and destination options. The global options will be overridden by the source and destination options. foreach (var x in global_options) { if (!src_opts.ContainsKey(x.Key)) src_opts[x.Key] = x.Value; if (!dst_opts.ContainsKey(x.Key)) dst_opts[x.Key] = x.Value; } // Check if we only had to parse the arguments if (config.ParseArgumentsOnly) { Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Arguments parsed successfully; {0}; exiting.", config); return 0; } using var b1m = new LightWeightBackendManager(config.Src, src_opts, config.BackendRetries, config.BackendRetryDelay, config.BackendRetryWithExponentialBackoff); using var b2m = new LightWeightBackendManager(config.Dst, dst_opts, config.BackendRetries, config.BackendRetryDelay, config.BackendRetryWithExponentialBackoff); // Prepare the operations var (to_copy, to_delete, to_verify) = await PrepareFileLists(b1m, b2m, config, token).ConfigureAwait(false); // Verify the files if requested. If the files are not verified, they will be deleted and copied again. long verified = 0, failed_verify = 0; if (config.VerifyContents) { // As this is a potentially slow operation, ask for confirmation of the verification) if (!config.Confirm) { Console.WriteLine($"This will verify {to_verify.Count()} files before copying them. Do you want to continue? [y/N]"); var response = Console.ReadLine(); if (!response?.Equals("y", StringComparison.CurrentCultureIgnoreCase) ?? true) { Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Aborted"); return -1; } } var not_verified = await VerifyAsync(b1m, b2m, to_verify, config, token).ConfigureAwait(false); failed_verify = not_verified.Count(); verified = to_verify.Count() - failed_verify; if (not_verified.Any()) { Duplicati.Library.Logging.Log.WriteWarningMessage(LOGTAG, "rsync", null, "{0} files failed verification. They will be deleted and copied again.", not_verified.Count()); to_delete = to_delete.Concat(not_verified); to_copy = to_copy.Concat(not_verified); } } Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "The remote synchronization plan is to {0} {1} files from {2}, then copy {3} files from {4} to {2}.", config.Retention ? "rename" : "delete", to_delete.Count(), b2m.DisplayName, to_copy.Count(), b1m.DisplayName); // As this is a potentially destructive operation, ask for confirmation if (!config.Confirm) { var delete_rename = config.Retention ? "Rename" : "Delete"; Console.WriteLine($"This will perform the following actions (in order):"); Console.WriteLine($" - {delete_rename} {to_delete.Count()} files from {config.Dst}"); Console.WriteLine($" - Copy {to_copy.Count()} files from {config.Src} to {config.Dst}"); Console.WriteLine(); Console.WriteLine("Do you want to continue? [y/N]"); var response = Console.ReadLine(); if (!response?.Equals("y", StringComparison.CurrentCultureIgnoreCase) ?? true) { Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Aborted"); return -1; } } // Delete or rename the files that are not needed long renamed = 0, deleted = 0; if (config.Retention) { renamed = await RenameAsync(b2m, to_delete, config, token).ConfigureAwait(false); Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Renamed {0} files in {1}", renamed, b2m.DisplayName); } else { deleted = await DeleteAsync(b2m, to_delete, config, token).ConfigureAwait(false); Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Deleted {0} files from {1}", deleted, b2m.DisplayName); } // Copy the files var (copied, copy_errors) = await CopyAsync(b1m, b2m, to_copy, config, token).ConfigureAwait(false); Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Copied {0} files from {1} to {2}", copied, b1m.DisplayName, b2m.DisplayName); // If there are still errors, retry a few times if (copy_errors.Any()) { Duplicati.Library.Logging.Log.WriteWarningMessage(LOGTAG, "rsync", null, "Could not copy {0} files.", copy_errors.Count()); if (config.Retry > 0) { Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Retrying {0} more times to copy the {1} files that failed", config.Retry, copy_errors.Count()); for (int i = 0; i < config.Retry; i++) { await Task.Delay(5000).ConfigureAwait(false); // Wait 5 seconds before retrying (copied, copy_errors) = await CopyAsync(b1m, b2m, copy_errors, config, token).ConfigureAwait(false); Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Copied {0} files from {1} to {2}", copied, b1m.DisplayName, b2m.DisplayName); if (!copy_errors.Any()) break; } } if (copy_errors.Any()) { Duplicati.Library.Logging.Log.WriteErrorMessage(LOGTAG, "rsync", null, "Could not copy {0} files. Not retrying any more.", copy_errors.Count()); return copy_errors.Count(); } } // Results reporting if (verified > 0) Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Verified {0} files in {1} that didn't need to be copied", verified, b2m.DisplayName); if (failed_verify > 0) Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Failed to verify {0} files in {1}, which were then attempted to be copied", failed_verify, b2m.DisplayName); if (copied > 0) Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Copied {0} files from {1} to {2}", copied, b1m.DisplayName, b2m.DisplayName); if (deleted > 0) Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Deleted {0} files from {1}", deleted, b2m.DisplayName); if (renamed > 0) Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Renamed {0} files in {1}", renamed, b2m.DisplayName); Duplicati.Library.Logging.Log.WriteInformationMessage(LOGTAG, "rsync", "Remote synchronization completed successfully"); return 0; } // TODO have concurrency parameters: uploaders, downloaders // TODO low memory mode, where things aren't kept in memory. Maybe utilize SQLite? // TODO For convenience, have the option to launch a "duplicati test" on the destination backend after the synchronization // TODO Save hash to minimize redownload // TODO Duplicati Results /// <summary> /// Copies the files from one backend to another. /// The files are copied one by one, and each file is verified after uploading if the verify flag is set. /// </summary> /// <param name="b_src">The source backend.</param> /// <param name="b_dst">The destination backend.</param> /// <param name="files">The files that will be copied.</param> /// <param name="config">The parsed configuration for the tool.</param> /// <param name="token">The cancellation token to use for the asynchronous operations.</param> /// <returns>A tuple holding the number of succesful copies and a List of the files that failed.</returns> private static async Task<(long, IEnumerable<IFileEntry>)> CopyAsync(LightWeightBackendManager b_src, LightWeightBackendManager b_dst, IEnumerable<IFileEntry> files, Config config, CancellationToken token) { long successful_copies = 0; List<IFileEntry> errors = []; long i = 0, n = files.Count(); var sw_get_src = new System.Diagnostics.Stopwatch(); var sw_put_dst = new System.Diagnostics.Stopwatch(); var sw_get_dst = new System.Diagnostics.Stopwatch(); var sw_get_cmp = new System.Diagnostics.Stopwatch(); foreach (var f in files) { if (config.Progress) Console.Write($"\rCopying: {i}/{n}"); Duplicati.Library.Logging.Log.WriteVerboseMessage(LOGTAG, "rsync", "Copying {0} from {1} to {2}", f.Name, b_src.DisplayName, b_dst.DisplayName); using var s_src = Duplicati.Library.Utility.TempFileStream.Create(); try { sw_get_src.Start(); await b_src.GetAsync(f.Name, s_src, token).ConfigureAwait(false); s_src.Position = 0; sw_get_src.Stop(); if (config.DryRun) { Duplicati.Library.Logging.Log.WriteDryrunMessage(LOGTAG, "rsync", "Would write {0} bytes of {1} to {2}", Duplicati.Library.Utility.Utility.FormatSizeString(s_src.Length), f.Name, b_dst.DisplayName); } else { sw_put_dst.Start(); await b_dst.PutAsync(f.Name, s_src, token).ConfigureAwait(false); s_src.Position = 0; sw_put_dst.Stop(); if (config.VerifyGetAfterPut) { // Start calculating the hash of the source file while we are downloading var srchashtask = Task.Run(() => { using var hasher = HashFactory.CreateHasher("SHA256"); return Convert.ToBase64String(hasher.ComputeHash(s_src)); }); using var s_dst = Duplicati.Library.Utility.TempFileStream.Create(); sw_get_dst.Start(); await b_dst.GetAsync(f.Name, s_dst, token).ConfigureAwait(false); s_dst.Position = 0; sw_get_dst.Stop(); sw_get_cmp.Start(); string? err_string = null; if (s_src.Length != s_dst.Length) { err_string = $"The sizes of the files do not match: {s_src.Length} != {s_dst.Length}."; } using var hasher = HashFactory.CreateHasher("SHA256"); var dsthash = Convert.ToBase64String(hasher.ComputeHash(s_dst)); if (await srchashtask.ConfigureAwait(false) != dsthash) { err_string = (err_string is null ? "" : err_string + " ") + "The contents of the files do not match."; } sw_get_cmp.Stop(); if (err_string is not null) { throw new Exception(err_string); } } } successful_copies++; } catch (Exception e) { Duplicati.Library.Logging.Log.WriteErrorMessage(LOGTAG, "rsync", e, "Error copying {0}: {1}", f.Name, e.Message); errors.Add(f); } finally { i++; // Stop any running timers sw_get_src.Stop(); sw_put_dst.Stop(); sw_get_dst.Stop(); sw_get_cmp.Stop(); } } if (config.Progress) Console.WriteLine($"\rCopying: {n}/{n}"); Duplicati.Library.Logging.Log.WriteProfilingMessage(LOGTAG, "rsync", "Copy | Get source: {0} ms, Put destination: {1} ms, Get destination: {2} ms, Get compare: {3} ms", TimeSpan.FromMilliseconds(sw_get_src.ElapsedMilliseconds), TimeSpan.FromMilliseconds(sw_put_dst.ElapsedMilliseconds), TimeSpan.FromMilliseconds(sw_get_dst.ElapsedMilliseconds), TimeSpan.FromMilliseconds(sw_get_cmp.ElapsedMilliseconds)); return (successful_copies, errors); } /// <summary> /// Deletes the files from a backend. /// </summary> /// <param name="b">The backend to delete the files from.</param> /// <param name="files">The files to delete.</param> /// <param name="config">The parsed configuration for the tool.</param> /// <param name="token">The cancellation token to use for the asynchronous operations.</param> /// <returns>The number of successful deletions.</returns> private static async Task<long> DeleteAsync(LightWeightBackendManager b, IEnumerable<IFileEntry> files, Config config, CancellationToken token) { long successful_deletes = 0; long i = 0, n = files.Count(); using var timer = new Duplicati.Library.Logging.Timer(LOGTAG, "rsync", "Delete operation"); foreach (var f in files) { if (n > 1 && config.Progress) { Console.Write($"\rDeleting: {i}/{n}"); } Duplicati.Library.Logging.Log.WriteVerboseMessage(LOGTAG, "rsync", "Deleting {0} from {1}", f.Name, b.DisplayName); try { if (config.DryRun) { Duplicati.Library.Logging.Log.WriteDryrunMessage(LOGTAG, "rsync", "Would delete {0} from {1}", f.Name, b.DisplayName); } else { await b.DeleteAsync(f.Name, token).ConfigureAwait(false); } successful_deletes++; } catch (Exception e) { Duplicati.Library.Logging.Log.WriteErrorMessage(LOGTAG, "rsync", e, "Error deleting {0}: {1}", f.Name, e.Message); } i++; } if (config.Progress) Console.WriteLine($"\rDeleting: {n}/{n}"); return successful_deletes; } /// <summary> /// Creates an option that allows multiple tokens and multiple arguments per token. /// </summary> /// <param name="aliases">The aliases for the option.</param> /// <param name="description">The description for the option.</param> /// <returns>The created option.</returns> private static Option<List<string>> OptionWithMultipleTokens(string[] aliases, string description, Func<List<string>> getDefaultValue) { return new Option<List<string>>(aliases: aliases, description: description, getDefaultValue: getDefaultValue) { Arity = ArgumentArity.OneOrMore, AllowMultipleArgumentsPerToken = true }; } /// <summary> /// Parses the options from a list of strings. /// Each option should be in the format "key=value". If the value contains spaces, /// it should be enclosed in quotes, e.g. "key=\"value with spaces\"". /// </summary> /// <param name="options">The list of string options to parse</param> /// <returns>A dictionary with the parsed options, where the key is the option name and the value is the option value.</returns> /// <exception cref="ArgumentException">If an option was not parsed correctly.</exception> private static Dictionary<string, string> ParseOptions(IEnumerable<string> options) { var result = options .Select(x => x.Split('=')) .ToDictionary(x => x[0], x => string.Join("=", x.Skip(1))); // Double check that the options are valid by reconstructing them from the dictionary foreach (var opt in result.Select(x => $"{x.Key}={x.Value}")) { if (!options.Contains(opt)) { Duplicati.Library.Logging.Log.WriteErrorMessage(LOGTAG, "rsync", null, "The source option '{0}' is not valid. Please check the syntax.", opt); throw new ArgumentException($"The source option '{opt}' has not been parsed correctly."); } } return result; } /// <summary> /// Prepares the lists of files to copy, delete and verify. /// The files to copy are the files that are not in the destination, have a different size or have a more recent modification date. /// The files to delete are the files that are found in the destination but not found in the source. /// The files to verify are the files that are found in both the source and the destination, and that have the same size and modification date. /// </summary> /// <param name="b_src">The source lightweight backend manager.</param> /// <param name="b_dst">The destination lightweight backend manager.</param> /// <param name="config">The parsed configuration for the tool.</param> /// <param name="token">The cancellation token to use for the asynchronous operations.</param> /// <returns>A tuple of Lists each holding the files to copy, delete and verify.</returns> private static async Task<(IEnumerable<IFileEntry>, IEnumerable<IFileEntry>, IEnumerable<IFileEntry>)> PrepareFileLists(LightWeightBackendManager b_src, LightWeightBackendManager b_dst, Config config, CancellationToken token) { IEnumerable<IFileEntry> files_src, files_dst; using (new Duplicati.Library.Logging.Timer(LOGTAG, "rsync", "Prepare | List source")) files_src = await b_src.ListAsync(token).ConfigureAwait(false); using (new Duplicati.Library.Logging.Timer(LOGTAG, "rsync", "Prepare | List destination")) files_dst = await b_dst.ListAsync(token).ConfigureAwait(false); // Shortcut for force if (config.Force) { return (files_src, files_dst, []); } // Shortcut for empty destination if (!files_dst.Any()) { return (files_src, [], []); } Dictionary<string, IFileEntry> lookup_src, lookup_dst; using (new Duplicati.Library.Logging.Timer(LOGTAG, "rsync", "Prepare | Build lookup for source and destination")) { lookup_src = files_src.ToDictionary(x => x.Name); lookup_dst = files_dst.ToDictionary(x => x.Name); } var to_copy = new List<IFileEntry>(); var to_delete = new HashSet<string>(); var to_verify = new List<IFileEntry>(); // Find all of the files in src that are not in dst, where the dst has a different size than src or src a more recent modification date than dst using (new Duplicati.Library.Logging.Timer(LOGTAG, "rsync", "Prepare | Check the files that are present in source against destination")) foreach (var f_src in files_src) { if (lookup_dst.TryGetValue(f_src.Name, out var f_dst)) { if (f_src.Size != f_dst.Size || f_src.LastModification > f_dst.LastModification) { // The file is different, so we need to copy it to_copy.Add(f_src); to_delete.Add(f_dst.Name); } else { // The file seems to be the same, so we need to verify it if the user wants to to_verify.Add(f_src); } } else { // The file is not in the destination, so we need to copy it to_copy.Add(f_src); } } // Find all of the files in dst that are not in src using (new Duplicati.Library.Logging.Timer(LOGTAG, "rsync", "Prepare | Check the files that are present in destination against source")) foreach (var f_dst in files_dst) { if (to_delete.Contains(f_dst.Name)) continue; if (!lookup_src.ContainsKey(f_dst.Name)) { to_delete.Add(f_dst.Name); } } List<IFileEntry> to_delete_lookedup; using (new Duplicati.Library.Logging.Timer(LOGTAG, "rsync", "Prepare | Lookup the files to delete")) to_delete_lookedup = [.. to_delete.Select(x => lookup_dst[x])]; return (to_copy, to_delete_lookedup, to_verify); } /// <summary> /// Renames the files in a backend. /// The renaming is done by deleting the file and re-uploading it with a new name. /// </summary> /// <param name="bm">The lightweight backend manager to issue rename operations to.</param> /// <param name="files">The files to rename.</param> /// <param name="config">The parsed configuration for the tool.</param> /// <param name="token">The cancellation token to use for the asynchronous operations.</param> /// <returns>The number of successful renames.</returns> private static async Task<long> RenameAsync(LightWeightBackendManager bm, IEnumerable<IFileEntry> files, Config config, CancellationToken token) { long successful_renames = 0; string prefix = $"{System.DateTime.UtcNow:yyyyMMddHHmmss}.old"; using var downloaded = new MemoryStream(); long i = 0, n = files.Count(); var sw = new System.Diagnostics.Stopwatch(); foreach (var f in files) { if (config.Progress) Console.Write($"\rRenaming: {i}/{n}"); Duplicati.Library.Logging.Log.WriteVerboseMessage(LOGTAG, "rsync", "Renaming {0} to {1}.{0} by calling Rename on {2}", f.Name, prefix, bm.DisplayName); try { if (config.DryRun) { Duplicati.Library.Logging.Log.WriteDryrunMessage(LOGTAG, "rsync", "Would rename {0} to {1}.{0} by calling Rename on {2}", f.Name, prefix, bm.DisplayName); } else { sw.Start(); await bm.RenameAsync(f.Name, $"{prefix}.{f.Name}", token).ConfigureAwait(false); sw.Stop(); } successful_renames++; } catch (Exception e) { Duplicati.Library.Logging.Log.WriteErrorMessage(LOGTAG, "rsync", e, "Error renaming {0}: {1}", f.Name, e.Message); } finally { // Ensure the timer is stopped sw.Stop(); } i++; } Duplicati.Library.Logging.Log.WriteProfilingMessage(LOGTAG, "rsync", "Rename: {0} ms", TimeSpan.FromMilliseconds(sw.ElapsedMilliseconds)); if (config.Progress) Console.WriteLine($"\rRenaming: {n}/{n}"); return successful_renames; } /// <summary> /// Verifies the files in the destination backend. /// The verification is done by downloading the files from the destination backend and comparing them to the source files. /// </summary> /// <param name="b_src">The source lightweight backend manager.</param> /// <param name="b_dst">The destination lightweight backend manager.</param> /// <param name="files">The files to verify.</param> /// <param name="config">The parsed configuration for the tool.</param> /// <param name="token">The cancellation token to use for the asynchronous operations.</param> /// <returns>A list of the files that failed verification.</returns> private static async Task<IEnumerable<IFileEntry>> VerifyAsync(LightWeightBackendManager b_src, LightWeightBackendManager b_dst, IEnumerable<IFileEntry> files, Config config, CancellationToken token) { var errors = new List<IFileEntry>(); using var s_src = new MemoryStream(); using var s_dst = new MemoryStream(); long i = 0, n = files.Count(); var sw_get = new System.Diagnostics.Stopwatch(); var sw_cmp = new System.Diagnostics.Stopwatch(); foreach (var f in files) { if (config.Progress) Console.Write($"\rVerifying: {i}/{n}"); Duplicati.Library.Logging.Log.WriteVerboseMessage(LOGTAG, "rsync", "Verifying {0} by downloading and comparing {1} bytes from {2} and {3}", f.Name, Duplicati.Library.Utility.Utility.FormatSizeString(s_src.Length), b_dst.DisplayName, b_src.DisplayName); try { // Get both files sw_get.Start(); var fs = b_src.GetAsync(f.Name, s_src, token); var ds = b_dst.GetAsync(f.Name, s_dst, token); await Task.WhenAll(fs, ds).ConfigureAwait(false); sw_get.Stop(); // Compare the contents sw_cmp.Start(); if (s_src.Length != s_dst.Length || !s_src.ToArray().SequenceEqual(s_dst.ToArray())) { errors.Add(f); } sw_cmp.Stop(); } catch (Exception e) { errors.Add(f); Duplicati.Library.Logging.Log.WriteErrorMessage(LOGTAG, "rsync", e, "Error during verification of {0}: {1}", f.Name, e.Message); } finally { // Reset the streams s_src.SetLength(0); s_dst.SetLength(0); // Stop any running timers sw_get.Stop(); sw_cmp.Stop(); } i++; } Duplicati.Library.Logging.Log.WriteProfilingMessage(LOGTAG, "rsync", "Verify | Get: {0} ms, Compare: {1} ms", TimeSpan.FromMilliseconds(sw_get.ElapsedMilliseconds), TimeSpan.FromMilliseconds(sw_cmp.ElapsedMilliseconds)); if (config.Progress) Console.WriteLine($"\rVerifying: {n}/{n}"); return errors; } } }
Config
csharp
getsentry__sentry-dotnet
benchmarks/Sentry.Benchmarks/StackFrameBenchmarks.cs
{ "start": 15622, "end": 16158 }
class ____.Threading.Tasks.Task`1<!0>,!0)", Module ="System.Private.CoreLib.il" }, new SentryStackFrame() { Function ="System.Threading.SemaphoreSlim+<WaitUntilCountOrTimeoutAsync>d__31.MoveNext()", Module ="System.Private.CoreLib.il" }, new SentryStackFrame() { Function ="System.Runtime.CompilerServices.AsyncTaskMethodBuilder`1+AsyncStateMachineBox`1[System.Boolean,System.Threading.SemaphoreSlim+<WaitUntilCountOrTimeoutAsync>d__31].ExecutionContextCallback(
System
csharp
dotnet__maui
src/Controls/src/SourceGen/TypeConverters/ISGTypeConverter.cs
{ "start": 274, "end": 1173 }
interface ____ { /// <summary> /// Converts a string value from XAML to the corresponding C# code generation string. /// </summary> /// <param name="value">The string value from XAML to convert</param> /// <param name="node">The XML node for diagnostic location information</param> /// <param name="toType">The target type symbol for the conversion</param> /// <param name="context">The source generation context</param> /// <param name="parentVar">Optional parent variable context</param> /// <returns>Generated C# code string, or "default" if conversion fails</returns> string Convert(string value, BaseNode node, ITypeSymbol toType, IndentedTextWriter writer, SourceGenContext context, ILocalValue? parentVar = null); /// <summary> /// Gets the type names this converter can handle (for registration/lookup). /// </summary> IEnumerable<string> SupportedTypes { get; } }
ISGTypeConverter
csharp
louthy__language-ext
LanguageExt.Core/Traits/Resolve/EqResolver.cs
{ "start": 114, "end": 2550 }
public static class ____<A> { public static string? ResolutionError; public static Func<A, int> GetHashCodeFunc = null!; public static MethodInfo GetHashCodeMethod = null!; public static nint GetHashCodeMethodPtr; public static Func<A, A, bool> EqualsFunc = null!; public static MethodInfo EqualsMethod = null!; public static nint EqualsMethodPtr; public static int GetHashCode(A value) => GetHashCodeFunc(value); public static bool Equals(A lhs, A rhs) => EqualsFunc(lhs, rhs); public static bool Exists => ResolutionError is null; static EqResolve() { var source = typeof(A); var impl = Resolver.Find(source, "Eq"); if (impl is null) { ResolutionError = $"Trait implementation not found for: {typeof(A).Name}"; MakeDefault(); return; } // Equals var m = Resolver.Method(impl, "Equals", source, source); if (m is null) { ResolutionError = $"`Equals` method not found for: {typeof(A).Name}"; MakeDefault(); return; } EqualsMethod = m; EqualsMethodPtr = m.MethodHandle.GetFunctionPointer(); EqualsFunc = (x, y) => (bool?)EqualsMethod.Invoke(null, [x, y]) ?? throw new InvalidOperationException(); // GetHashCode m = Resolver.Method(impl, "GetHashCode", source); if (m is null) { ResolutionError = $"`GetHashCode` method not found for: {typeof(A).Name}"; MakeDefault(); return; } GetHashCodeMethod = m; GetHashCodeMethodPtr = m.MethodHandle.GetFunctionPointer(); GetHashCodeFunc = x => (int?)GetHashCodeMethod.Invoke(null, [x]) ?? throw new InvalidOperationException(); } static void MakeDefault() { EqualsFunc = EqualityComparer<A>.Default.Equals; EqualsMethod = EqualsFunc.Method; EqualsMethodPtr = EqualsFunc.Method.MethodHandle.GetFunctionPointer(); GetHashCodeFunc = DefaultGetHashCode; GetHashCodeMethod = GetHashCodeFunc.Method; GetHashCodeMethodPtr = GetHashCodeFunc.Method.MethodHandle.GetFunctionPointer(); } static int DefaultGetHashCode(A value) => value is null ? 0 : value.GetHashCode(); }
EqResolve
csharp
cake-build__cake
src/Cake.Common/Security/DirectoryHash.cs
{ "start": 415, "end": 2681 }
public sealed class ____ { private readonly byte[] _hash; /// <summary> /// Initializes a new instance of the <see cref="DirectoryHash"/> class. /// </summary> /// <param name="directoryPath">The directory path.</param> /// <param name="hash">The computed hash.</param> /// <param name="hashAlgorithm">The algorithm used.</param> /// <param name="fileHashList">List of all computed <see cref="FileHash"/>.</param> public DirectoryHash( DirectoryPath directoryPath, byte[] hash, HashAlgorithm hashAlgorithm, IEnumerable<FileHash> fileHashList) { ArgumentNullException.ThrowIfNull(directoryPath); ArgumentNullException.ThrowIfNull(hash); ArgumentNullException.ThrowIfNull(fileHashList); Path = directoryPath; _hash = (byte[])hash.Clone(); Algorithm = hashAlgorithm; FileHashList.AddRange(fileHashList); } /// <summary> /// Gets the algorithm used for the hash computation. /// </summary> public HashAlgorithm Algorithm { get; } /// <summary> /// Gets the <see cref="DirectoryPath"/> for the directory. /// </summary> public DirectoryPath Path { get; } /// <summary> /// Gets the list of <see cref="FileHash"/> for all files of the directory. /// </summary> public List<FileHash> FileHashList { get; } = new List<FileHash>(); /// <summary> /// Gets the raw computed hash. /// </summary> public byte[] ComputedHash => (byte[])_hash.Clone(); /// <summary> /// Convert the directory hash to a hexadecimal string. /// </summary> /// <returns>A hexadecimal string representing the computed hash.</returns> public string ToHex() { // Each byte becomes two characters. Prepare the StringBuilder accordingly. var builder = new StringBuilder(_hash.Length * 2); foreach (var b in _hash) { builder.AppendFormat("{0:x2}", b); } return builder.ToString(); } } }
DirectoryHash