language stringclasses 1 value | repo stringclasses 133 values | path stringlengths 13 229 | class_span dict | source stringlengths 14 2.92M | target stringlengths 1 153 |
|---|---|---|---|---|---|
csharp | dotnet__efcore | test/EFCore.Specification.Tests/ModelBuilding/ModelBuilderTest.TestModel.cs | {
"start": 36534,
"end": 36768
} | protected class ____
{
public int Id { get; set; }
public Dictionary<string, object>? Reference { get; set; }
public List<Dictionary<string, object>>? Collection { get; set; }
}
| NestedOwnerOfSharedType |
csharp | jellyfin__jellyfin | src/Jellyfin.Extensions/Json/Converters/JsonFlagEnumConverterFactory.cs | {
"start": 217,
"end": 724
} | public class ____ : JsonConverterFactory
{
/// <inheritdoc />
public override bool CanConvert(Type typeToConvert)
{
return typeToConvert.IsEnum && typeToConvert.IsDefined(typeof(FlagsAttribute));
}
/// <inheritdoc />
public override JsonConverter? CreateConverter(Type typeToConvert, JsonSerializerOptions options)
{
return (JsonConverter?)Activator.CreateInstance(typeof(JsonFlagEnumConverter<>).MakeGenericType(typeToConvert));
}
}
| JsonFlagEnumConverterFactory |
csharp | dotnet__aspire | src/Aspire.Hosting.Kubernetes/Resources/TcpSocketActionV1.cs | {
"start": 526,
"end": 1054
} | public sealed class ____
{
/// <summary>
/// Specifies the hostname or IP address to be used for the TCP socket action.
/// </summary>
[YamlMember(Alias = "host")]
public string Host { get; set; } = null!;
/// <summary>
/// Gets or sets the port number to access for the TCP socket action.
/// This property specifies the numeric port on which the TCP connection
/// should be established.
/// </summary>
[YamlMember(Alias = "port")]
public int Port { get; set; }
}
| TcpSocketActionV1 |
csharp | dotnet__orleans | src/api/Orleans.Streaming/Orleans.Streaming.cs | {
"start": 21521,
"end": 21986
} | partial class ____
{
public MemoryMessageBody(System.Collections.Generic.IEnumerable<object> events, System.Collections.Generic.Dictionary<string, object> requestContext) { }
[Id(0)]
public System.Collections.Generic.List<object> Events { get { throw null; } }
[Id(1)]
public System.Collections.Generic.Dictionary<string, object> RequestContext { get { throw null; } }
}
[GenerateSerializer]
| MemoryMessageBody |
csharp | nopSolutions__nopCommerce | src/Plugins/Nop.Plugin.Misc.NopMobileApp/NopMobileAppDefaults.cs | {
"start": 103,
"end": 271
} | public class ____
{
/// <summary>
/// Gets a plugin system name
/// </summary>
public static string SystemName => "Misc.NopMobileApp";
} | NopMobileAppDefaults |
csharp | unoplatform__uno | src/Uno.UI.RemoteControl.DevServer.Tests/DevServerTests.cs | {
"start": 114,
"end": 2446
} | public class ____
{
private static ILogger<DevServerTestHelper> _logger = null!;
[ClassInitialize]
public static void ClassInitialize(TestContext context)
{
// Create a logger factory and logger
var loggerFactory = LoggerFactory.Create(builder =>
{
builder.AddConsole();
builder.AddDebug();
});
_logger = loggerFactory.CreateLogger<DevServerTestHelper>();
}
public TestContext? TestContext { get; set; }
private CancellationToken CT => TestContext?.CancellationTokenSource.Token ?? CancellationToken.None;
[TestMethod]
public async Task DevServer_ShouldStart()
{
// Arrange
await using var helper = new DevServerTestHelper(_logger);
try
{
// Act
var started = await helper.StartAsync(CT);
helper.EnsureStarted();
// Assert
started.Should().BeTrue("dev server should start successfully");
helper.AssertRunning();
helper.AssertConsoleOutputContains("Now listening on:");
}
finally
{
// Cleanup
await helper.StopAsync(CT);
}
}
[TestMethod]
public async Task DevServer_ShouldCaptureOutput()
{
// Arrange
await using var helper = new DevServerTestHelper(_logger, environmentVariables: _telemetryOptOutVariables);
try
{
// Act
var started = await helper.StartAsync(CT);
helper.EnsureStarted();
// Assert
started.Should().BeTrue("dev server should start successfully");
helper.ConsoleOutput.Should().NotBeEmpty("dev server should produce console output");
// The following assertions depend on the actual output of the dev server
// and may need to be adjusted based on the actual output
helper.AssertConsoleOutputContains("Now listening on:");
}
finally
{
// Cleanup
await helper.StopAsync(CT);
}
}
private readonly IReadOnlyDictionary<string, string>? _telemetryOptOutVariables =
new Dictionary<string, string>() { { "UNO_PLATFORM_TELEMETRY_OPTOUT ", "true" } };
[TestMethod]
public async Task DevServer_ShouldStopCleanly()
{
// Arrange
await using var helper = new DevServerTestHelper(_logger);
// Act
var started = await helper.StartAsync(CT);
helper.EnsureStarted();
await helper.StopAsync(CT);
// Assert
started.Should().BeTrue("dev server should start successfully");
helper.IsRunning.Should().BeFalse("dev server should not be running after stopping");
}
}
| DevServerTests |
csharp | microsoft__garnet | libs/server/Lua/LuaRunner.Loader.cs | {
"start": 3627,
"end": 10597
} | struct ____ (optional) inclusion into sandbox_env
local struct = {
pack = chain_func(error_wrapper_r1, garnet_struct_pack);
unpack = chain_func(error_wrapper_rvar, garnet_struct_unpack);
size = chain_func(error_wrapper_r1, garnet_struct_size);
}
-- define redis for (optional, but almost always) inclusion into sandbox_env
local garnetCallRef = chain_func(error_wrapper_r1, garnet_call)
local pCallRef = pcall
local redis = {
status_reply = function(text)
return text
end,
error_reply = function(text)
return { err = 'ERR ' .. text }
end,
call = garnetCallRef,
pcall = function(...)
local success, errOrRes = pCallRef(garnetCallRef, ...)
if success then
return errOrRes
end
return { err = errOrRes }
end,
sha1hex = chain_func(error_wrapper_r1, garnet_sha1hex),
LOG_DEBUG = 0,
LOG_VERBOSE = 1,
LOG_NOTICE = 2,
LOG_WARNING = 3,
log = chain_func(error_wrapper_r0, garnet_log),
REPL_ALL = 3,
REPL_AOF = 1,
REPL_REPLICA = 2,
REPL_SLAVE = 2,
REPL_NONE = 0,
set_repl = function(...)
-- this is a giant footgun, straight up not implementing it
error('ERR redis.set_repl is not supported in Garnet', 0)
end,
replicate_commands = function(...)
return true
end,
breakpoint = function(...)
-- this is giant and weird, not implementing
error('ERR redis.breakpoint is not supported in Garnet', 0)
end,
debug = function(...)
-- this is giant and weird, not implementing
error('ERR redis.debug is not supported in Garnet', 0)
end,
acl_check_cmd = chain_func(error_wrapper_r1, garnet_acl_check_cmd),
setresp = chain_func(error_wrapper_r0, garnet_setresp),
REDIS_VERSION = garnet_REDIS_VERSION,
REDIS_VERSION_NUM = garnet_REDIS_VERSION_NUM
}
-- added after Lua 5.1, removing to maintain Redis compat
string.pack = nil
string.unpack = nil
string.packsize = nil
math.maxinteger = nil
math.type = nil
math.mininteger = nil
math.tointeger = nil
math.ult = nil
table.pack = nil
table.unpack = nil
table.move = nil
-- in Lua 5.1 but not 5.4, so implemented on the .NET side
local loadstring = chain_func(error_wrapper_r2, garnet_loadstring)
math.atan2 = chain_func(error_wrapper_r1, garnet_atan2)
math.cosh = chain_func(error_wrapper_r1, garnet_cosh)
math.frexp = chain_func(error_wrapper_r2, garnet_frexp)
math.ldexp = chain_func(error_wrapper_r1, garnet_ldexp)
math.log10 = chain_func(error_wrapper_r1, garnet_log10)
math.pow = chain_func(error_wrapper_r1, garnet_pow)
math.sinh = chain_func(error_wrapper_r1, garnet_sinh)
math.tanh = chain_func(error_wrapper_r1, garnet_tanh)
table.maxn = chain_func(error_wrapper_r1, garnet_maxn)
local collectgarbageRef = collectgarbage
local setMetatableRef = setmetatable
local rawsetRef = rawset
-- prevent modification to metatables for readonly tables
-- Redis accomplishes this by patching Lua, we'd rather ship
-- vanilla Lua and do it in code
local setmetatable = function(table, metatable)
if table and table.__readonly then
error('Attempt to modify a readonly table', 0)
end
return setMetatableRef(table, metatable)
end
-- prevent bypassing metatables to update readonly tables
-- as above, Redis prevents this with a patch to Lua
local rawset = function(table, key, value)
if table and table.__readonly then
error('Attempt to modify a readonly table', 0)
end
return rawsetRef(table, key, value)
end
-- technically deprecated in 5.1, but available in Redis
-- this is only 'sort of' correct as 5.4 doesn't expose the same
-- gc primitives
local gcinfo = function()
return collectgarbageRef('count'), 0
end
-- global object used for the sandbox environment
--
-- replacements are performed before VM initialization
-- to allow configuring available functions
sandbox_env = {
_VERSION = _VERSION;
KEYS = KEYS;
ARGV = ARGV;
!!SANDBOX_ENV REPLACEMENT TARGET!!
}
-- timeout error must be raised on Lua
local debugRef = debug
local force_timeout = function()
error('ERR Lua script exceeded configured timeout', 0)
end
function request_timeout()
debugRef.sethook(force_timeout, '', 1)
end
-- no reference to outermost set of globals (_G) should survive sandboxing
sandbox_env._G = sandbox_env
-- lock down a table, recursively doing the same to all table members
local rawGetRef = rawget
local readonly_metatable = {
__index = function(onTable, key)
return rawGetRef(onTable, key)
end,
__newindex = function(onTable, key, value)
error('Attempt to modify a readonly table', 0)
end
}
function recursively_readonly_table(table)
if table.__readonly then
return table
end
table.__readonly = true
for key, value in pairs(table) do
if type(value) == 'table' and key ~= 'KEYS' and key ~= 'ARGV' then
recursively_readonly_table(value)
end
end
setMetatableRef(table, readonly_metatable)
end
-- do resets in the Lua side to minimize pinvokes
function reset_keys_and_argv(fromKey, fromArgv)
local keyRef = sandbox_env.KEYS
local keyCount = #keyRef
for i = fromKey, keyCount do
table.remove(keyRef)
end
local argvRef = sandbox_env.ARGV
local argvCount = #argvRef
for i = fromArgv, argvCount do
table.remove(argvRef)
end
end
-- force new 'global' environment to be readonly
recursively_readonly_table(sandbox_env)
-- responsible for sandboxing user provided code
function load_sandboxed(source)
local rawFunc, err = load(source, nil, nil, sandbox_env)
return err, rawFunc
end
";
internal static readonly HashSet<string> DefaultAllowedFunctions = [
// Built ins
"assert",
"collectgarbage",
"coroutine",
"error",
"gcinfo",
// Intentionally not supporting getfenv, as it's too weird to backport to Lua 5.4
"getmetatable",
"ipairs",
"load",
"loadstring",
"math",
"next",
"pairs",
"pcall",
"rawequal",
"rawget",
// Note rawset is proxied to implement readonly tables
"rawset",
"select",
// Intentionally not supporting setfenv, as it's too weird to backport to Lua 5.4
// Note setmetatable is proxied to implement readonly tables
"setmetatable",
"string",
"table",
"tonumber",
"tostring",
"type",
// Note unpack is actually table.unpack, and defined in the loader block
"unpack",
"xpcall",
// Runtime libs
"bit",
"cjson",
"cmsgpack",
// Note os only contains clock due to definition in the loader block
"os",
// Note | for |
csharp | graphql-dotnet__graphql-dotnet | src/GraphQL/Conversion/ListConverterFactories/CustomListConverterFactory.cs | {
"start": 1938,
"end": 3317
} | interface ____ cannot be instantiated.");
if (implementationType.IsGenericTypeDefinition && implementationType.GetGenericArguments().Length != 1)
throw new InvalidOperationException($"Type '{implementationType.GetFriendlyName()}' is a generic type definition with more than one generic argument.");
_implementationType = implementationType;
}
/// <summary>
/// Returns a <see cref="CustomListConverterFactory"/> which will work for any
/// compatible list type.
/// </summary>
public static CustomListConverterFactory DefaultInstance { get; } = new();
public IListConverter Create(
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors | DynamicallyAccessedMemberTypes.PublicMethods)]
Type listType)
{
var elementType = listType.IsGenericType
? listType.GetGenericArguments()[0]
: typeof(object);
if (_implementationType != null)
{
listType = _implementationType.IsGenericTypeDefinition
? _implementationType.MakeGenericType(elementType)
: _implementationType;
}
if (listType.IsArray || listType.IsInterface || listType.IsGenericTypeDefinition || listType.IsAbstract)
throw new InvalidOperationException($"Type '{listType.GetFriendlyName()}' is an array, | and |
csharp | MassTransit__MassTransit | src/MassTransit/Configuration/BusFactoryExtensions.cs | {
"start": 135,
"end": 1860
} | public static class ____
{
public static IBusControl Build(this IBusFactory factory, IBusConfiguration busConfiguration, IEnumerable<ISpecification> dependencies)
{
return Build(factory, busConfiguration, factory.Validate()
.Concat(dependencies.SelectMany(x => x.Validate())));
}
public static IBusControl Build(this IBusFactory factory, IBusConfiguration busConfiguration)
{
return Build(factory, busConfiguration, factory.Validate());
}
static IBusControl Build(IBusFactory factory, IBusConfiguration busConfiguration, IEnumerable<ValidationResult> validationResult)
{
if (LogContext.Current == null)
LogContext.ConfigureCurrentLogContext();
busConfiguration.HostConfiguration.LogContext = LogContext.Current;
IReadOnlyList<ValidationResult> result = validationResult.ThrowIfContainsFailure("The bus configuration is invalid:");
try
{
var busReceiveEndpointConfiguration = factory.CreateBusEndpointConfiguration(x => x.ConfigureConsumeTopology = false);
var host = busConfiguration.HostConfiguration.Build();
var bus = new MassTransitBus(host, busConfiguration.BusObservers, busReceiveEndpointConfiguration);
busConfiguration.BusObservers.PostCreate(bus);
return bus;
}
catch (Exception ex)
{
busConfiguration.BusObservers.CreateFaulted(ex);
throw new ConfigurationException(result, "An exception occurred during bus creation", ex);
}
}
}
}
| BusFactoryExtensions |
csharp | MonoGame__MonoGame | build/Tasks.cs | {
"start": 240,
"end": 692
} | public sealed class ____ : FrostingTask<BuildContext> { }
[TaskName("Build Frameworks")]
[IsDependentOn(typeof(BuildNativeTask))]
[IsDependentOn(typeof(BuildDesktopVKTask))]
[IsDependentOn(typeof(BuildDesktopGLTask))]
[IsDependentOn(typeof(BuildWindowsDXTask))]
[IsDependentOn(typeof(BuildAndroidTask))]
[IsDependentOn(typeof(BuildiOSTask))]
[IsDependentOn(typeof(BuildContentPipelineTask))]
[IsDependentOn(typeof(BuildConsoleCheckTask))]
| BuildShadersTask |
csharp | AvaloniaUI__Avalonia | tests/Avalonia.Controls.UnitTests/ScrollViewerTests.cs | {
"start": 21700,
"end": 21975
} | private class ____ : Control
{
public Size MeasureSize { get; set; } = new Size(1000, 2000);
protected override Size MeasureOverride(Size availableSize)
{
return MeasureSize;
}
}
}
}
| TestContent |
csharp | dotnetcore__Util | src/Util.Templates.Razor/RazorEngineCore/IRazorEngineCompiledTemplate.cs | {
"start": 78,
"end": 419
} | public interface ____
{
void SaveToStream(Stream stream);
Task SaveToStreamAsync(Stream stream);
void SaveToFile(string fileName);
Task SaveToFileAsync(string fileName);
string Run(object model = null);
Task<string> RunAsync(object model = null);
} | IRazorEngineCompiledTemplate |
csharp | dotnet__aspnetcore | src/Http/Routing/src/EndpointRoutingMiddleware.cs | {
"start": 740,
"end": 13310
} | partial class ____
{
private const string DiagnosticsEndpointMatchedKey = "Microsoft.AspNetCore.Routing.EndpointMatched";
private readonly MatcherFactory _matcherFactory;
private readonly ILogger _logger;
private readonly EndpointDataSource _endpointDataSource;
private readonly DiagnosticListener _diagnosticListener;
private readonly RoutingMetrics _metrics;
private readonly RequestDelegate _next;
private readonly RouteOptions _routeOptions;
private Task<Matcher>? _initializationTask;
public EndpointRoutingMiddleware(
MatcherFactory matcherFactory,
ILogger<EndpointRoutingMiddleware> logger,
IEndpointRouteBuilder endpointRouteBuilder,
EndpointDataSource rootCompositeEndpointDataSource,
DiagnosticListener diagnosticListener,
IOptions<RouteOptions> routeOptions,
RoutingMetrics metrics,
RequestDelegate next)
{
ArgumentNullException.ThrowIfNull(endpointRouteBuilder);
_matcherFactory = matcherFactory ?? throw new ArgumentNullException(nameof(matcherFactory));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_diagnosticListener = diagnosticListener ?? throw new ArgumentNullException(nameof(diagnosticListener));
_metrics = metrics;
_next = next ?? throw new ArgumentNullException(nameof(next));
_routeOptions = routeOptions.Value;
// rootCompositeEndpointDataSource is a constructor parameter only so it always gets disposed by DI. This ensures that any
// disposable EndpointDataSources also get disposed. _endpointDataSource is a component of rootCompositeEndpointDataSource.
_ = rootCompositeEndpointDataSource;
_endpointDataSource = new CompositeEndpointDataSource(endpointRouteBuilder.DataSources);
}
public Task Invoke(HttpContext httpContext)
{
// There's already an endpoint, skip matching completely
var endpoint = httpContext.GetEndpoint();
if (endpoint != null)
{
Log.MatchSkipped(_logger, endpoint);
return _next(httpContext);
}
// There's an inherent race condition between waiting for init and accessing the matcher
// this is OK because once `_matcher` is initialized, it will not be set to null again.
var matcherTask = InitializeAsync();
if (!matcherTask.IsCompletedSuccessfully)
{
return AwaitMatcher(this, httpContext, matcherTask);
}
var matchTask = matcherTask.Result.MatchAsync(httpContext);
if (!matchTask.IsCompletedSuccessfully)
{
return AwaitMatch(this, httpContext, matchTask);
}
return SetRoutingAndContinue(httpContext);
// Awaited fallbacks for when the Tasks do not synchronously complete
static async Task AwaitMatcher(EndpointRoutingMiddleware middleware, HttpContext httpContext, Task<Matcher> matcherTask)
{
var matcher = await matcherTask;
await matcher.MatchAsync(httpContext);
await middleware.SetRoutingAndContinue(httpContext);
}
static async Task AwaitMatch(EndpointRoutingMiddleware middleware, HttpContext httpContext, Task matchTask)
{
await matchTask;
await middleware.SetRoutingAndContinue(httpContext);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private Task SetRoutingAndContinue(HttpContext httpContext)
{
// If there was no mutation of the endpoint then log failure
var endpoint = httpContext.GetEndpoint();
if (endpoint == null)
{
Log.MatchFailure(_logger);
_metrics.MatchFailure();
}
else
{
// Raise an event if the route matched
if (_diagnosticListener.IsEnabled() && _diagnosticListener.IsEnabled(DiagnosticsEndpointMatchedKey))
{
Write(_diagnosticListener, httpContext);
}
if (_logger.IsEnabled(LogLevel.Debug) || _metrics.MatchSuccessCounterEnabled)
{
var isFallback = endpoint.Metadata.GetMetadata<FallbackMetadata>() is not null;
Log.MatchSuccess(_logger, endpoint);
if (isFallback)
{
Log.FallbackMatch(_logger, endpoint);
}
// It shouldn't be possible for a route to be matched via the route matcher and not have a route.
// Just in case, add a special (missing) value as the route tag to metrics.
var route = endpoint.Metadata.GetMetadata<IRouteDiagnosticsMetadata>()?.Route ?? "(missing)";
_metrics.MatchSuccess(route, isFallback);
}
// Map RequestSizeLimitMetadata to IHttpMaxRequestBodySizeFeature if present on the endpoint.
// We do this during endpoint routing to ensure that successive middlewares in the pipeline
// can access the feature with the correct value.
SetMaxRequestBodySize(httpContext);
var shortCircuitMetadata = endpoint.Metadata.GetMetadata<ShortCircuitMetadata>();
if (shortCircuitMetadata is not null)
{
return ExecuteShortCircuit(shortCircuitMetadata, endpoint, httpContext);
}
}
return _next(httpContext);
[UnconditionalSuppressMessage("ReflectionAnalysis", "IL2026:UnrecognizedReflectionPattern",
Justification = "The values being passed into Write are being consumed by the application already.")]
static void Write(DiagnosticListener diagnosticListener, HttpContext httpContext)
{
// We're just going to send the HttpContext since it has all of the relevant information
diagnosticListener.Write(DiagnosticsEndpointMatchedKey, httpContext);
}
}
private Task ExecuteShortCircuit(ShortCircuitMetadata shortCircuitMetadata, Endpoint endpoint, HttpContext httpContext)
{
// This check should be kept in sync with the one in EndpointMiddleware
if (!_routeOptions.SuppressCheckForUnhandledSecurityMetadata)
{
if (endpoint.Metadata.GetMetadata<IAuthorizeData>() is not null)
{
ThrowCannotShortCircuitAnAuthRouteException(endpoint);
}
if (endpoint.Metadata.GetMetadata<ICorsMetadata>() is not null)
{
ThrowCannotShortCircuitACorsRouteException(endpoint);
}
if (endpoint.Metadata.GetMetadata<IAntiforgeryMetadata>() is { RequiresValidation: true } &&
httpContext.Request.Method is {} method &&
HttpExtensions.IsValidHttpMethodForForm(method))
{
ThrowCannotShortCircuitAnAntiforgeryRouteException(endpoint);
}
}
if (shortCircuitMetadata.StatusCode.HasValue)
{
httpContext.Response.StatusCode = shortCircuitMetadata.StatusCode.Value;
}
if (endpoint.RequestDelegate is not null)
{
if (!_logger.IsEnabled(LogLevel.Information))
{
// Avoid the AwaitRequestTask state machine allocation if logging is disabled.
return endpoint.RequestDelegate(httpContext);
}
Log.ExecutingEndpoint(_logger, endpoint);
try
{
var requestTask = endpoint.RequestDelegate(httpContext);
if (!requestTask.IsCompletedSuccessfully)
{
return AwaitRequestTask(endpoint, requestTask, _logger);
}
}
catch
{
Log.ExecutedEndpoint(_logger, endpoint);
throw;
}
Log.ExecutedEndpoint(_logger, endpoint);
return Task.CompletedTask;
static async Task AwaitRequestTask(Endpoint endpoint, Task requestTask, ILogger logger)
{
try
{
await requestTask;
}
finally
{
Log.ExecutedEndpoint(logger, endpoint);
}
}
}
else
{
Log.ShortCircuitedEndpoint(_logger, endpoint);
}
return Task.CompletedTask;
}
// Initialization is async to avoid blocking threads while reflection and things
// of that nature take place.
//
// We've seen cases where startup is very slow if we allow multiple threads to race
// while initializing the set of endpoints/routes. Doing CPU intensive work is a
// blocking operation if you have a low core count and enough work to do.
private Task<Matcher> InitializeAsync()
{
var initializationTask = _initializationTask;
if (initializationTask != null)
{
return initializationTask;
}
return InitializeCoreAsync();
}
private Task<Matcher> InitializeCoreAsync()
{
var initialization = new TaskCompletionSource<Matcher>(TaskCreationOptions.RunContinuationsAsynchronously);
var initializationTask = Interlocked.CompareExchange(ref _initializationTask, initialization.Task, null);
if (initializationTask != null)
{
// This thread lost the race, join the existing task.
return initializationTask;
}
// This thread won the race, do the initialization.
try
{
var matcher = _matcherFactory.CreateMatcher(_endpointDataSource);
_initializationTask = Task.FromResult(matcher);
// Complete the task, this will unblock any requests that came in while initializing.
initialization.SetResult(matcher);
return initialization.Task;
}
catch (Exception ex)
{
// Allow initialization to occur again. Since DataSources can change, it's possible
// for the developer to correct the data causing the failure.
_initializationTask = null;
// Complete the task, this will throw for any requests that came in while initializing.
initialization.SetException(ex);
return initialization.Task;
}
}
private static void ThrowCannotShortCircuitAnAuthRouteException(Endpoint endpoint)
{
throw new InvalidOperationException($"Endpoint {endpoint.DisplayName} contains authorization metadata, " +
"but this endpoint is marked with short circuit and it will execute on Routing Middleware.");
}
private static void ThrowCannotShortCircuitACorsRouteException(Endpoint endpoint)
{
throw new InvalidOperationException($"Endpoint {endpoint.DisplayName} contains CORS metadata, " +
"but this endpoint is marked with short circuit and it will execute on Routing Middleware.");
}
private static void ThrowCannotShortCircuitAnAntiforgeryRouteException(Endpoint endpoint)
{
throw new InvalidOperationException($"Endpoint {endpoint.DisplayName} contains anti-forgery metadata, " +
"but this endpoint is marked with short circuit and it will execute on Routing Middleware.");
}
private void SetMaxRequestBodySize(HttpContext context)
{
var sizeLimitMetadata = context.GetEndpoint()?.Metadata?.GetMetadata<IRequestSizeLimitMetadata>();
if (sizeLimitMetadata == null)
{
Log.RequestSizeLimitMetadataNotFound(_logger);
return;
}
var maxRequestBodySizeFeature = context.Features.Get<IHttpMaxRequestBodySizeFeature>();
if (maxRequestBodySizeFeature == null)
{
Log.RequestSizeFeatureNotFound(_logger);
}
else if (maxRequestBodySizeFeature.IsReadOnly)
{
Log.RequestSizeFeatureIsReadOnly(_logger);
}
else
{
var maxRequestBodySize = sizeLimitMetadata.MaxRequestBodySize;
maxRequestBodySizeFeature.MaxRequestBodySize = maxRequestBodySize;
if (maxRequestBodySize.HasValue)
{
Log.MaxRequestBodySizeSet(_logger,
maxRequestBodySize.Value.ToString(CultureInfo.InvariantCulture));
}
else
{
Log.MaxRequestBodySizeDisabled(_logger);
}
}
}
private static | EndpointRoutingMiddleware |
csharp | Cysharp__UniTask | src/UniTask/Assets/Plugins/UniTask/Runtime/Linq/GroupBy.cs | {
"start": 191,
"end": 17412
} | partial class ____
{
// Ix-Async returns IGrouping but it is competely waste, use standard IGrouping.
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TSource>> GroupBy<TSource, TKey>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, TKey> keySelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
return new GroupBy<TSource, TKey, TSource>(source, keySelector, x => x, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TSource>> GroupBy<TSource, TKey>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, TKey> keySelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupBy<TSource, TKey, TSource>(source, keySelector, x => x, comparer);
}
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TElement>> GroupBy<TSource, TKey, TElement>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
return new GroupBy<TSource, TKey, TElement>(source, keySelector, elementSelector, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TElement>> GroupBy<TSource, TKey, TElement>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupBy<TSource, TKey, TElement>(source, keySelector, elementSelector, comparer);
}
public static IUniTaskAsyncEnumerable<TResult> GroupBy<TSource, TKey, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, TKey> keySelector, Func<TKey, IEnumerable<TSource>, TResult> resultSelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
return new GroupBy<TSource, TKey, TSource, TResult>(source, keySelector, x => x, resultSelector, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<TResult> GroupBy<TSource, TKey, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, TKey> keySelector, Func<TKey, IEnumerable<TSource>, TResult> resultSelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupBy<TSource, TKey, TSource, TResult>(source, keySelector, x => x, resultSelector, comparer);
}
public static IUniTaskAsyncEnumerable<TResult> GroupBy<TSource, TKey, TElement, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, Func<TKey, IEnumerable<TElement>, TResult> resultSelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
return new GroupBy<TSource, TKey, TElement, TResult>(source, keySelector, elementSelector, resultSelector, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<TResult> GroupBy<TSource, TKey, TElement, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, TKey> keySelector, Func<TSource, TElement> elementSelector, Func<TKey, IEnumerable<TElement>, TResult> resultSelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupBy<TSource, TKey, TElement, TResult>(source, keySelector, elementSelector, resultSelector, comparer);
}
// await
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TSource>> GroupByAwait<TSource, TKey>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, UniTask<TKey>> keySelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
return new GroupByAwait<TSource, TKey, TSource>(source, keySelector, x => UniTask.FromResult(x), EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TSource>> GroupByAwait<TSource, TKey>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, UniTask<TKey>> keySelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupByAwait<TSource, TKey, TSource>(source, keySelector, x => UniTask.FromResult(x), comparer);
}
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TElement>> GroupByAwait<TSource, TKey, TElement>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, UniTask<TKey>> keySelector, Func<TSource, UniTask<TElement>> elementSelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
return new GroupByAwait<TSource, TKey, TElement>(source, keySelector, elementSelector, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TElement>> GroupByAwait<TSource, TKey, TElement>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, UniTask<TKey>> keySelector, Func<TSource, UniTask<TElement>> elementSelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupByAwait<TSource, TKey, TElement>(source, keySelector, elementSelector, comparer);
}
public static IUniTaskAsyncEnumerable<TResult> GroupByAwait<TSource, TKey, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, UniTask<TKey>> keySelector, Func<TKey, IEnumerable<TSource>, UniTask<TResult>> resultSelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
return new GroupByAwait<TSource, TKey, TSource, TResult>(source, keySelector, x => UniTask.FromResult(x), resultSelector, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<TResult> GroupByAwait<TSource, TKey, TElement, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, UniTask<TKey>> keySelector, Func<TSource, UniTask<TElement>> elementSelector, Func<TKey, IEnumerable<TElement>, UniTask<TResult>> resultSelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
return new GroupByAwait<TSource, TKey, TElement, TResult>(source, keySelector, elementSelector, resultSelector, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<TResult> GroupByAwait<TSource, TKey, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, UniTask<TKey>> keySelector, Func<TKey, IEnumerable<TSource>, UniTask<TResult>> resultSelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupByAwait<TSource, TKey, TSource, TResult>(source, keySelector, x => UniTask.FromResult(x), resultSelector, comparer);
}
public static IUniTaskAsyncEnumerable<TResult> GroupByAwait<TSource, TKey, TElement, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, UniTask<TKey>> keySelector, Func<TSource, UniTask<TElement>> elementSelector, Func<TKey, IEnumerable<TElement>, UniTask<TResult>> resultSelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupByAwait<TSource, TKey, TElement, TResult>(source, keySelector, elementSelector, resultSelector, comparer);
}
// with ct
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TSource>> GroupByAwaitWithCancellation<TSource, TKey>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, CancellationToken, UniTask<TKey>> keySelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
return new GroupByAwaitWithCancellation<TSource, TKey, TSource>(source, keySelector, (x, _) => UniTask.FromResult(x), EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TSource>> GroupByAwaitWithCancellation<TSource, TKey>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, CancellationToken, UniTask<TKey>> keySelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupByAwaitWithCancellation<TSource, TKey, TSource>(source, keySelector, (x, _) => UniTask.FromResult(x), comparer);
}
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TElement>> GroupByAwaitWithCancellation<TSource, TKey, TElement>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, CancellationToken, UniTask<TKey>> keySelector, Func<TSource, CancellationToken, UniTask<TElement>> elementSelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
return new GroupByAwaitWithCancellation<TSource, TKey, TElement>(source, keySelector, elementSelector, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<IGrouping<TKey, TElement>> GroupByAwaitWithCancellation<TSource, TKey, TElement>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, CancellationToken, UniTask<TKey>> keySelector, Func<TSource, CancellationToken, UniTask<TElement>> elementSelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupByAwaitWithCancellation<TSource, TKey, TElement>(source, keySelector, elementSelector, comparer);
}
public static IUniTaskAsyncEnumerable<TResult> GroupByAwaitWithCancellation<TSource, TKey, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, CancellationToken, UniTask<TKey>> keySelector, Func<TKey, IEnumerable<TSource>, CancellationToken, UniTask<TResult>> resultSelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
return new GroupByAwaitWithCancellation<TSource, TKey, TSource, TResult>(source, keySelector, (x, _) => UniTask.FromResult(x), resultSelector, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<TResult> GroupByAwaitWithCancellation<TSource, TKey, TElement, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, CancellationToken, UniTask<TKey>> keySelector, Func<TSource, CancellationToken, UniTask<TElement>> elementSelector, Func<TKey, IEnumerable<TElement>, CancellationToken, UniTask<TResult>> resultSelector)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
return new GroupByAwaitWithCancellation<TSource, TKey, TElement, TResult>(source, keySelector, elementSelector, resultSelector, EqualityComparer<TKey>.Default);
}
public static IUniTaskAsyncEnumerable<TResult> GroupByAwaitWithCancellation<TSource, TKey, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, CancellationToken, UniTask<TKey>> keySelector, Func<TKey, IEnumerable<TSource>, CancellationToken, UniTask<TResult>> resultSelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupByAwaitWithCancellation<TSource, TKey, TSource, TResult>(source, keySelector, (x, _) => UniTask.FromResult(x), resultSelector, comparer);
}
public static IUniTaskAsyncEnumerable<TResult> GroupByAwaitWithCancellation<TSource, TKey, TElement, TResult>(this IUniTaskAsyncEnumerable<TSource> source, Func<TSource, CancellationToken, UniTask<TKey>> keySelector, Func<TSource, CancellationToken, UniTask<TElement>> elementSelector, Func<TKey, IEnumerable<TElement>, CancellationToken, UniTask<TResult>> resultSelector, IEqualityComparer<TKey> comparer)
{
Error.ThrowArgumentNullException(source, nameof(source));
Error.ThrowArgumentNullException(keySelector, nameof(keySelector));
Error.ThrowArgumentNullException(elementSelector, nameof(elementSelector));
Error.ThrowArgumentNullException(resultSelector, nameof(resultSelector));
Error.ThrowArgumentNullException(comparer, nameof(comparer));
return new GroupByAwaitWithCancellation<TSource, TKey, TElement, TResult>(source, keySelector, elementSelector, resultSelector, comparer);
}
}
| UniTaskAsyncEnumerable |
csharp | dotnet__aspnetcore | src/SignalR/server/SignalR/test/Microsoft.AspNetCore.SignalR.Tests/HubConnectionHandlerTestUtils/Hubs.cs | {
"start": 22450,
"end": 23490
} | public class ____<T> : ChannelReader<T>, IAsyncEnumerable<T>
{
private readonly ChannelReader<T> _inner;
public AsyncEnumerableImplChannelThrows(ChannelReader<T> inner)
{
_inner = inner;
}
public override bool TryRead(out T item)
{
// Not implemented to verify this is consumed as an IAsyncEnumerable<T> instead of a ChannelReader<T>.
throw new NotImplementedException();
}
public override ValueTask<bool> WaitToReadAsync(CancellationToken cancellationToken = default)
{
// Not implemented to verify this is consumed as an IAsyncEnumerable<T> instead of a ChannelReader<T>.
throw new NotImplementedException();
}
public IAsyncEnumerator<T> GetAsyncEnumerator(CancellationToken cancellationToken = default)
{
return new ChannelAsyncEnumerator(_inner, cancellationToken);
}
// Copied from AsyncEnumeratorAdapters
| AsyncEnumerableImplChannelThrows |
csharp | dotnet__aspnetcore | src/DataProtection/EntityFrameworkCore/test/DataProtectionKeyContext.cs | {
"start": 250,
"end": 502
} | class ____ : DbContext, IDataProtectionKeyContext
{
public DataProtectionKeyContext(DbContextOptions<DataProtectionKeyContext> options) : base(options) { }
public DbSet<DataProtectionKey> DataProtectionKeys { get; set; }
}
| DataProtectionKeyContext |
csharp | HangfireIO__Hangfire | tests/Hangfire.Core.Tests/Storage/MonitoringTypeFacts.cs | {
"start": 128,
"end": 1360
} | public class ____
{
[Fact]
public void EnqueuedJobDto_Ctor_SetsInEnqueuedState()
{
Assert.True(new EnqueuedJobDto().InEnqueuedState);
}
[Fact]
public void FailedJobDto_Ctor_SetsInFailedState()
{
Assert.True(new FailedJobDto().InFailedState);
}
[Fact]
public void ProcessingJobDto_Ctor_SetsInProcessingState()
{
Assert.True(new ProcessingJobDto().InProcessingState);
}
[Fact]
public void ScheduledJobDto_Ctor_SetsInScheduledState()
{
Assert.True(new ScheduledJobDto().InScheduledState);
}
[Fact]
public void SucceededJobDto_Ctor_SetsInSucceededState()
{
Assert.True(new SucceededJobDto().InSucceededState);
}
[Fact]
public void DeletedJobDto_Ctor_SetsInDeletedState()
{
Assert.True(new DeletedJobDto().InDeletedState);
}
[Fact]
public void JobList_Ctor_ShouldInitializeCollection()
{
var list = new JobList<int>(new Dictionary<string, int> { { "1", 2 } });
Assert.Single(list);
}
}
}
| MonitoringTypeFacts |
csharp | NLog__NLog | src/NLog.Targets.AtomicFile/AtomicFileTarget.cs | {
"start": 2169,
"end": 6696
} | public class ____ : FileTarget
{
/// <summary>
/// Gets or sets a value indicating whether concurrent writes to the log file by multiple processes on the same host.
/// </summary>
public bool ConcurrentWrites { get; set; } = true;
/// <inheritdoc />
protected override Stream CreateFileStream(string filePath, int bufferSize)
{
if (!ConcurrentWrites || !KeepFileOpen || ReplaceFileContentsOnEachWrite)
return base.CreateFileStream(filePath, bufferSize);
const int maxRetryCount = 5;
for (int i = 1; i <= maxRetryCount; ++i)
{
try
{
return CreateAtomicFileStream(filePath);
}
catch (DirectoryNotFoundException)
{
throw;
}
catch (IOException ex)
{
InternalLogger.Debug(ex, "{0}: Failed opening file: {1}", this, filePath);
if (i == maxRetryCount)
throw;
}
catch (Exception ex)
{
InternalLogger.Debug(ex, "{0}: Failed opening file: {1}", this, filePath);
if (i > 1)
throw;
}
}
throw new InvalidOperationException("Should not be reached.");
}
private Stream CreateAtomicFileStream(string filePath)
{
var fileShare = FileShare.ReadWrite;
if (EnableFileDelete)
fileShare |= FileShare.Delete;
#if NETFRAMEWORK
// https://blogs.msdn.microsoft.com/oldnewthing/20151127-00/?p=92211/
// https://msdn.microsoft.com/en-us/library/ff548289.aspx
// If only the FILE_APPEND_DATA and SYNCHRONIZE flags are set, the caller can write only to the end of the file,
// and any offset information about writes to the file is ignored.
// However, the file will automatically be extended as necessary for this type of write operation.
return new FileStream(
filePath,
FileMode.Append,
System.Security.AccessControl.FileSystemRights.AppendData | System.Security.AccessControl.FileSystemRights.Synchronize, // <- Atomic append
fileShare,
bufferSize: 1, // No internal buffer, write directly from user-buffer
FileOptions.None);
#else
#if !WINDOWS
if (!System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Windows))
{
return CreateUnixStream(filePath);
}
#endif
var systemRights = System.Security.AccessControl.FileSystemRights.AppendData | System.Security.AccessControl.FileSystemRights.Synchronize;
return FileSystemAclExtensions.Create(
new FileInfo(filePath),
FileMode.Append,
systemRights,
fileShare,
bufferSize: 1, // No internal buffer, write directly from user-buffer
FileOptions.None,
fileSecurity: null);
#endif
}
#if !NETFRAMEWORK && !WINDOWS
private Stream CreateUnixStream(string filePath)
{
// Use 0666 (read/write for all)
var permissions = (Mono.Unix.Native.FilePermissions)(6 | (6 << 3) | (6 << 6));
var openFlags = Mono.Unix.Native.OpenFlags.O_CREAT | Mono.Unix.Native.OpenFlags.O_WRONLY | Mono.Unix.Native.OpenFlags.O_APPEND;
int fd = Mono.Unix.Native.Syscall.open(filePath, openFlags, permissions);
if (fd == -1 && Mono.Unix.Native.Stdlib.GetLastError() == Mono.Unix.Native.Errno.ENOENT && CreateDirs)
{
var dirName = Path.GetDirectoryName(filePath);
if (dirName != null && !Directory.Exists(dirName))
Directory.CreateDirectory(dirName);
fd = Mono.Unix.Native.Syscall.open(filePath, openFlags, permissions);
}
if (fd == -1)
Mono.Unix.UnixMarshal.ThrowExceptionForLastError();
try
{
return new Mono.Unix.UnixStream(fd, true);
}
catch
{
Mono.Unix.Native.Syscall.close(fd);
throw;
}
}
#endif
}
}
| AtomicFileTarget |
csharp | grandnode__grandnode2 | src/Tests/Grand.Business.Checkout.Tests/Services/Orders/OrderServiceTests.cs | {
"start": 277,
"end": 2502
} | public class ____
{
private Mock<IMediator> _mediatorMock;
private Mock<IRepository<OrderNote>> _orderNoteRepositoryMock;
private Mock<IRepository<Order>> _orderRepositoryMock;
private OrderService _service;
[TestInitialize]
public void Init()
{
_orderRepositoryMock = new Mock<IRepository<Order>>();
_orderNoteRepositoryMock = new Mock<IRepository<OrderNote>>();
_mediatorMock = new Mock<IMediator>();
_service = new OrderService(_orderRepositoryMock.Object, _orderNoteRepositoryMock.Object, _mediatorMock.Object);
}
[TestMethod]
public async Task UpdateOrder_InvokeExpectedMethods()
{
await _service.UpdateOrder(new Order());
_orderRepositoryMock.Verify(c => c.UpdateAsync(It.IsAny<Order>()), Times.Once);
_mediatorMock.Verify(c => c.Publish(It.IsAny<EntityUpdated<Order>>(), default), Times.Once);
}
[TestMethod]
public void UpdateOrder_NullArguments_ThrowException()
{
Assert.ThrowsExceptionAsync<ArgumentNullException>(async () => await _service.UpdateOrder(null));
}
[TestMethod]
public async Task InsertOrderNote_InvokeExpectedMethods()
{
await _service.InsertOrderNote(new OrderNote());
_orderNoteRepositoryMock.Verify(c => c.InsertAsync(It.IsAny<OrderNote>()), Times.Once);
_mediatorMock.Verify(c => c.Publish(It.IsAny<EntityInserted<OrderNote>>(), default), Times.Once);
}
[TestMethod]
public void InsertOrderNote_NullArguments_ThrowException()
{
Assert.ThrowsExceptionAsync<ArgumentNullException>(async () => await _service.InsertOrderNote(null));
}
[TestMethod]
public async Task DeleteOrderNote_InvokeExpectedMethods()
{
await _service.DeleteOrderNote(new OrderNote());
_orderNoteRepositoryMock.Verify(c => c.DeleteAsync(It.IsAny<OrderNote>()), Times.Once);
_mediatorMock.Verify(c => c.Publish(It.IsAny<EntityDeleted<OrderNote>>(), default), Times.Once);
}
[TestMethod]
public void DeleteOrderNote_NullArguments_ThrowException()
{
Assert.ThrowsExceptionAsync<ArgumentNullException>(async () => await _service.DeleteOrderNote(null));
}
} | OrderServiceTests |
csharp | dotnet__orleans | src/api/Orleans.TestingHost/Orleans.TestingHost.cs | {
"start": 51416,
"end": 52898
} | partial class ____ : global::Orleans.Runtime.TaskRequest
{
public global::Orleans.Runtime.GrainId arg0;
public System.Exception arg1;
public override void Dispose() { }
public override string GetActivityName() { throw null; }
public override object GetArgument(int index) { throw null; }
public override int GetArgumentCount() { throw null; }
public override string GetInterfaceName() { throw null; }
public override System.Type GetInterfaceType() { throw null; }
public override System.Reflection.MethodInfo GetMethod() { throw null; }
public override string GetMethodName() { throw null; }
public override object GetTarget() { throw null; }
protected override System.Threading.Tasks.Task InvokeInner() { throw null; }
public override void SetArgument(int index, object value) { }
public override void SetTarget(global::Orleans.Serialization.Invocation.ITargetHolder holder) { }
}
[System.CodeDom.Compiler.GeneratedCode("OrleansCodeGen", "9.0.0.0")]
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
[System.Diagnostics.CodeAnalysis.ExcludeFromCodeCoverage]
[global::Orleans.CompoundTypeAlias(new[] { "inv", typeof(global::Orleans.Runtime.GrainReference), typeof(global::Orleans.TestingHost.IStorageFaultGrain), "5D91E1AF" })]
public sealed | Invokable_IStorageFaultGrain_GrainReference_1A607A31 |
csharp | EventStore__EventStore | src/KurrentDB.Core/DataStructures/ProbabilisticFilter/FileStreamPersistence.cs | {
"start": 422,
"end": 11119
} | class ____ : IPersistenceStrategy {
protected static readonly ILogger Log = Serilog.Log.ForContext<FileStreamPersistence>();
// We synchronize access to _dirtyPageBitmap because
// Flush can be called at the same time as OnPageDirty (we dont pause writes into the
// bloom filter while it is flushing). And they both write to the bitmap (to unset and
// set bits respectively)
// This lock is often obtained while holding the write lock to the filter
// therefore don't make any non-trivial external calls while holding this lock
private readonly object _bitmapLock = new();
private readonly long _logicalFilterSize;
private readonly string _path;
private AlignedMemory _bloomFilterMemory;
private AlignedMemory _dirtyPageBitmap;
private bool _disposed;
public FileStreamPersistence(long size, string path, bool create) {
Ensure.NotNull(path, nameof(path));
_logicalFilterSize = size;
_path = path;
Create = create;
}
// when we want to open an existing filter and we don't mind what size it is
public static FileStreamPersistence FromFile(string path) {
Ensure.NotNull(path, nameof(path));
var header = ReadHeader(path);
return new FileStreamPersistence(
size: header.NumBits / 8,
path: path,
create: false);
}
public BloomFilterAccessor DataAccessor { get; private set; }
public bool Create { get; }
// ms to wait after each batch flush. allows breathing room for other writes to continue.
public int FlushBatchDelay { get; private set; }
// max number of pages to flush to disk in each batch
public long FlushBatchSize { get; private set; }
public void Init() {
DataAccessor = new BloomFilterAccessor(
logicalFilterSize: _logicalFilterSize,
cacheLineSize: BloomFilterIntegrity.CacheLineSize,
hashSize: BloomFilterIntegrity.HashSize,
pageSize: BloomFilterIntegrity.PageSize,
onPageDirty: OnPageDirty,
log: Log);
// instead of flushing all the dirty pages as fast as we can, we flush them in batches
// with a sleep between each flush to allow other writes to proceed.
// we could consider making these configurable but we have set defaults as follows
// based on 8 KiB pages:
// 1. sleep 128ms between batches to gives a good chunk of time for other writes
// 2. calculate how many pages to flush per batch in order to give no more than 60s
// of sleep total. if the whole filter is dirty, total sleep will add up to 60s.
// this will yield bigger batches for bigger filters, but they ought also to be running
// on faster disks.
//
// For default filter this yields a batch size of 96 pages, 0.75 MiB
// For max size filter this yield a batch size of 1120 pages, 8.75 MiB
// For PTables up to 24gb this yields a batch size of 96 pages.
FlushBatchDelay = 128;
FlushBatchSize = DataAccessor.NumPages * FlushBatchDelay / 60_000;
FlushBatchSize = Math.Max(FlushBatchSize, 96);
FlushBatchSize = FlushBatchSize.RoundUpToMultipleOf(32);
// dirtypages: one bit per page, but pad to the nearest cacheline boundary
var numBits = DataAccessor.NumPages;
var numBitsPadded = numBits.RoundUpToMultipleOf(BloomFilterIntegrity.CacheLineSize * 8);
_dirtyPageBitmap = new AlignedMemory(
size: numBitsPadded / 8,
alignTo: BloomFilterIntegrity.CacheLineSize);
_dirtyPageBitmap.AsSpan().Clear(); // alignedmemory isn't initialized otherwise
// main filter:
_bloomFilterMemory = new AlignedMemory(
size: new IntPtr(DataAccessor.FileSize),
alignTo: BloomFilterIntegrity.CacheLineSize);
DataAccessor.Pointer = _bloomFilterMemory.Pointer;
// initialize the aligned memory
if (Create) {
DataAccessor.FillWithZeros();
} else {
// load the whole filter into memory for rapid access
BulkLoadExisting();
}
}
private void BulkLoadExisting() {
Log.Information(
"Reading persisted bloom filter {path} of size {size:N0} bytes into memory...",
_path,
DataAccessor.FileSize);
var sw = Stopwatch.StartNew();
using var bulkFileStream = new FileStream(
_path,
FileMode.Open,
FileAccess.Read,
FileShare.ReadWrite,
bufferSize: 65_536,
// consider if we should do something similar to chunk file reduce file cache pressure?
options: FileOptions.SequentialScan);
if (bulkFileStream.Length != DataAccessor.FileSize)
throw new SizeMismatchException(
$"The expected file size ({DataAccessor.FileSize:N0}) does not match " +
$"the actual file size ({bulkFileStream.Length:N0}) of file {_path}");
// linux only reads 2147479552 at a time (4095 bytes less than intmax)
var blockSize = int.MaxValue / 2;
var bytesToRead = DataAccessor.FileSize;
var bytesRead = 0L;
while (bytesToRead > 0) {
var bytesToReadInBlock = bytesToRead > blockSize
? blockSize
: (int)bytesToRead;
// consider reading in buffer size blocks. or using random access in net6
var read = bulkFileStream.Read(new Span<byte>(DataAccessor.Pointer + bytesRead, bytesToReadInBlock));
if (read != bytesToReadInBlock)
throw new Exception($"Read fewer bytes ({read}) from bloom filter ({_path}) than expected ({bytesToReadInBlock})");
bytesRead += bytesToReadInBlock;
bytesToRead -= bytesToReadInBlock;
}
var elapsed = sw.Elapsed;
var fileSizeMb = DataAccessor.FileSize / 1000 / 1000;
var megaBytesPerSecond = fileSizeMb / elapsed.TotalSeconds;
Log.Information(
"Read persisted bloom filter {path} into memory. Took {elapsed}. {megaBytesPerSecond:N2} MB/s",
_path,
elapsed,
megaBytesPerSecond);
}
private void OnPageDirty(long pageNumber) {
lock (_bitmapLock) {
ThrowIfDisposed();
var byteIndex = (int)(pageNumber / 8);
var bitIndex = pageNumber % 8;
ref var byteValue = ref _dirtyPageBitmap.AsSpan()[byteIndex];
byteValue = byteValue.SetBit(bitIndex);
}
}
public void Flush() {
using var fileStream = new FileStream(
_path,
FileMode.OpenOrCreate,
FileAccess.ReadWrite,
FileShare.ReadWrite,
bufferSize: DataAccessor.PageSize);
fileStream.SetLength(DataAccessor.FileSize);
Span<byte> localCacheLine = stackalloc byte[BloomFilterIntegrity.CacheLineSize];
localCacheLine.Clear();
var pageNumber = 0L;
var flushedPages = 0L;
var pauses = 0;
var activelyFlushing = Stopwatch.StartNew();
// consider Interlocked.Or/And into/outof the dirty page map. could remove need for lock
for (
var remaining = _dirtyPageBitmap.AsSpan();
!remaining.IsEmpty;
remaining = remaining[BloomFilterIntegrity.CacheLineSize..]
) {
lock (_bitmapLock) {
ThrowIfDisposed();
var cacheLine = remaining[..BloomFilterIntegrity.CacheLineSize];
cacheLine.CopyTo(localCacheLine);
cacheLine.Clear();
}
foreach (var @byte in localCacheLine) {
//we could skip based on 0L without checking each bit/byte
for (var bitOffset = 0; bitOffset < 8; bitOffset++) {
if (@byte.IsBitSet(bitOffset)) {
WritePage(pageNumber, fileStream);
flushedPages++;
// could be an unnecessary delay at the end
if (flushedPages % FlushBatchSize == 0) {
fileStream.FlushToDisk();
activelyFlushing.Stop();
pauses++;
Thread.Sleep(FlushBatchDelay);
activelyFlushing.Start();
}
}
pageNumber++;
if (pageNumber == DataAccessor.NumPages)
goto Done;
}
}
}
Done:
fileStream.FlushToDisk();
activelyFlushing.Stop();
var flushedBytes = flushedPages * DataAccessor.PageSize;
var flushedMegaBytes = (float)flushedBytes / 1000 / 1000;
var activeFlushRateMBperS = flushedMegaBytes / activelyFlushing.Elapsed.TotalSeconds;
Log.Verbose(
"Flushed {pages:N0} pages out of {totalPages:N0}. {bytes:N0} bytes. " +
"Delay {delay} ms per batch. Total delay {totalDelay:N0} ms. " +
"Actively flushing: {activeFlushTime} {activeFlushRate:N2} MB/s. ",
flushedPages, DataAccessor.NumPages, flushedBytes,
FlushBatchDelay, FlushBatchDelay * pauses,
activelyFlushing.Elapsed, activeFlushRateMBperS);
}
private void WritePage(long pageNumber, FileStream fileStream) {
var (fileOffset, pageSize) = DataAccessor.GetPagePositionInFile(pageNumber);
fileStream.Seek(offset: fileOffset, SeekOrigin.Begin);
fileStream.Write(DataAccessor.ReadBytes(fileOffset, pageSize));
}
// todo later: maybe could be a common implementation across the strategies that reads
// from the DataAccessor
public Header ReadHeader() => ReadHeader(_path);
private static Header ReadHeader(string path) {
using var fileStream = new FileStream(
path, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
try {
//read the version first
fileStream.Seek(offset: 0, SeekOrigin.Begin);
byte version = (byte)fileStream.ReadByte();
if (version != Header.CurrentVersion) {
throw new CorruptedFileException($"Unsupported version: {version}");
}
//then the full header
var headerBytes = new byte[Header.Size].AsSpan();
fileStream.Seek(offset: 0, SeekOrigin.Begin);
var read = fileStream.Read(headerBytes);
if (read != Header.Size) {
throw new CorruptedFileException(
$"File header size ({read} bytes) does not match expected header size ({Header.Size} bytes)");
}
return MemoryMarshal.AsRef<Header>(headerBytes);
} catch (Exception exc) when (exc is not CorruptedFileException) {
throw new CorruptedFileException("Failed to read the header", exc);
}
}
public void WriteHeader(Header header) {
Log.Information("Writing header and expanding file...");
using var fileStream = new FileStream(
_path, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.ReadWrite);
var span = MemoryMarshal.CreateReadOnlySpan(ref header, 1);
var headerBytes = MemoryMarshal.Cast<Header, byte>(span);
// this doesn't technically guarantee that the file is zeroed, but we expect file systems
// that could reasonably be running eventstore to do this for us because giving us other
// peoples data would be a security problem. if some filesystem does not to this the filter
// will be recognised as corrupt the next time it is opened.
fileStream.SetLength(DataAccessor.FileSize);
fileStream.Seek(offset: 0, SeekOrigin.Begin);
fileStream.Write(headerBytes);
fileStream.Seek(-1, SeekOrigin.End);
fileStream.WriteByte(0);
fileStream.FlushToDisk();
Log.Information("Wrote header and expanded file");
}
private void ThrowIfDisposed() {
if (_disposed) {
throw new ObjectDisposedException(nameof(FileStreamPersistence));
}
}
public void Dispose() {
lock (_bitmapLock) {
if (_disposed)
return;
_disposed = true;
if (DataAccessor is not null)
DataAccessor.Pointer = default;
_bloomFilterMemory?.Dispose();
_dirtyPageBitmap?.Dispose();
}
}
}
| FileStreamPersistence |
csharp | MaterialDesignInXAML__MaterialDesignInXamlToolkit | tests/MaterialDesignThemes.UITests/Samples/AutoSuggestBoxes/AutoSuggestTextBoxWithCollectionView.xaml.cs | {
"start": 303,
"end": 534
} | public partial class ____
{
public AutoSuggestTextBoxWithCollectionView()
{
DataContext = new AutoSuggestTextBoxWithCollectionViewViewModel();
InitializeComponent();
}
}
| AutoSuggestTextBoxWithCollectionView |
csharp | dotnet__efcore | src/EFCore/Diagnostics/TwoPropertyBaseCollectionsEventData.cs | {
"start": 504,
"end": 1820
} | public class ____ : EventData
{
/// <summary>
/// Constructs the event payload.
/// </summary>
/// <param name="eventDefinition">The event definition.</param>
/// <param name="messageGenerator">A delegate that generates a log message for this event.</param>
/// <param name="firstPropertyCollection">The first property collection.</param>
/// <param name="secondPropertyCollection">The second property collection.</param>
public TwoPropertyBaseCollectionsEventData(
EventDefinitionBase eventDefinition,
Func<EventDefinitionBase, EventData, string> messageGenerator,
IReadOnlyList<IReadOnlyPropertyBase> firstPropertyCollection,
IReadOnlyList<IReadOnlyPropertyBase> secondPropertyCollection)
: base(eventDefinition, messageGenerator)
{
FirstPropertyCollection = firstPropertyCollection;
SecondPropertyCollection = secondPropertyCollection;
}
/// <summary>
/// The first property collection.
/// </summary>
public virtual IReadOnlyList<IReadOnlyPropertyBase> FirstPropertyCollection { get; }
/// <summary>
/// The second property collection.
/// </summary>
public virtual IReadOnlyList<IReadOnlyPropertyBase> SecondPropertyCollection { get; }
}
| TwoPropertyBaseCollectionsEventData |
csharp | OrchardCMS__OrchardCore | src/OrchardCore/OrchardCore.Infrastructure/Scripting/Files/FilesScriptEngine.cs | {
"start": 126,
"end": 2183
} | public class ____ : IScriptingEngine
{
public string Prefix => "file";
public IScriptingScope CreateScope(IEnumerable<GlobalMethod> methods, IServiceProvider serviceProvider, IFileProvider fileProvider, string basePath)
{
return new FilesScriptScope(fileProvider, basePath);
}
public object Evaluate(IScriptingScope scope, string script)
{
ArgumentNullException.ThrowIfNull(scope);
if (scope is not FilesScriptScope fileScope)
{
throw new ArgumentException($"Expected a scope of type {nameof(FilesScriptScope)}", nameof(scope));
}
if (script.StartsWith("text('", StringComparison.Ordinal) && script.EndsWith("')", StringComparison.Ordinal))
{
var filePath = script[6..^2];
var fileInfo = fileScope.FileProvider.GetRelativeFileInfo(fileScope.BasePath, filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException(filePath);
}
using var fileStream = fileInfo.CreateReadStream();
using var streamReader = new StreamReader(fileStream);
return streamReader.ReadToEnd();
}
else if (script.StartsWith("base64('", StringComparison.Ordinal) && script.EndsWith("')", StringComparison.Ordinal))
{
var filePath = script[8..^2];
var fileInfo = fileScope.FileProvider.GetRelativeFileInfo(fileScope.BasePath, filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException(filePath);
}
using var fileStream = fileInfo.CreateReadStream();
using var memoryStream = MemoryStreamFactory.GetStream();
memoryStream.WriteTo(fileStream);
memoryStream.Seek(0, SeekOrigin.Begin);
return Convert.ToBase64String(memoryStream.GetBuffer(), 0, (int)memoryStream.Length);
}
else
{
throw new ArgumentException($"Unknown command '{script}'");
}
}
}
| FilesScriptEngine |
csharp | unoplatform__uno | src/Uno.UWP/Generated/3.0.0.0/Windows.Devices.PointOfService.Provider/BarcodeScannerStartSoftwareTriggerRequest.cs | {
"start": 314,
"end": 3274
} | public partial class ____
{
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
internal BarcodeScannerStartSoftwareTriggerRequest()
{
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Windows.Foundation.IAsyncAction ReportCompletedAsync()
{
throw new global::System.NotImplementedException("The member IAsyncAction BarcodeScannerStartSoftwareTriggerRequest.ReportCompletedAsync() is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=IAsyncAction%20BarcodeScannerStartSoftwareTriggerRequest.ReportCompletedAsync%28%29");
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Windows.Foundation.IAsyncAction ReportFailedAsync()
{
throw new global::System.NotImplementedException("The member IAsyncAction BarcodeScannerStartSoftwareTriggerRequest.ReportFailedAsync() is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=IAsyncAction%20BarcodeScannerStartSoftwareTriggerRequest.ReportFailedAsync%28%29");
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Windows.Foundation.IAsyncAction ReportFailedAsync(int reason)
{
throw new global::System.NotImplementedException("The member IAsyncAction BarcodeScannerStartSoftwareTriggerRequest.ReportFailedAsync(int reason) is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=IAsyncAction%20BarcodeScannerStartSoftwareTriggerRequest.ReportFailedAsync%28int%20reason%29");
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Windows.Foundation.IAsyncAction ReportFailedAsync(int reason, string failedReasonDescription)
{
throw new global::System.NotImplementedException("The member IAsyncAction BarcodeScannerStartSoftwareTriggerRequest.ReportFailedAsync(int reason, string failedReasonDescription) is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=IAsyncAction%20BarcodeScannerStartSoftwareTriggerRequest.ReportFailedAsync%28int%20reason%2C%20string%20failedReasonDescription%29");
}
#endif
}
}
| BarcodeScannerStartSoftwareTriggerRequest |
csharp | AvaloniaUI__Avalonia | samples/ControlCatalog/Pages/CustomDrawingExampleControl.cs | {
"start": 277,
"end": 8922
} | public class ____ : Control
{
private Point _cursorPoint;
public static readonly StyledProperty<double> ScaleProperty = AvaloniaProperty.Register<CustomDrawingExampleControl, double>(nameof(Scale), 1.0d);
public double Scale { get => GetValue(ScaleProperty); set => SetValue(ScaleProperty, value); }
public static readonly StyledProperty<double> RotationProperty = AvaloniaProperty.Register<CustomDrawingExampleControl, double>(nameof(Rotation),
coerce: (_, val) => val % (Math.PI * 2));
/// <summary>
/// Rotation, measured in Radians!
/// </summary>
public double Rotation
{
get => GetValue(RotationProperty);
set => SetValue(RotationProperty, value);
}
public static readonly StyledProperty<double> ViewportCenterYProperty = AvaloniaProperty.Register<CustomDrawingExampleControl, double>(nameof(ViewportCenterY), 0.0d);
public double ViewportCenterY { get => GetValue(ViewportCenterYProperty); set => SetValue(ViewportCenterYProperty, value); }
public static readonly StyledProperty<double> ViewportCenterXProperty = AvaloniaProperty.Register<CustomDrawingExampleControl, double>(nameof(ViewportCenterX), 0.0d);
public double ViewportCenterX { get => GetValue(ViewportCenterXProperty); set => SetValue(ViewportCenterXProperty, value); }
private IPen _pen;
private System.Diagnostics.Stopwatch _timeKeeper = System.Diagnostics.Stopwatch.StartNew();
private bool _isPointerCaptured = false;
public CustomDrawingExampleControl()
{
_pen = new Pen(new SolidColorBrush(Colors.Black), lineCap: PenLineCap.Round);
var _arc = new ArcSegment()
{
IsLargeArc = false,
Point = new Point(0, 0),
RotationAngle = 0,
Size = new Size(25, 25),
SweepDirection = SweepDirection.Clockwise,
};
StreamGeometry sg = new StreamGeometry();
using (var cntx = sg.Open())
{
cntx.BeginFigure(new Point(-25.0d, -10.0d), false);
cntx.ArcTo(new Point(25.0d, -10.0d), new Size(10.0d, 10.0d), 0.0d, false, SweepDirection.Clockwise);
cntx.EndFigure(true);
}
_smileGeometry = sg.Clone();
}
private Geometry _smileGeometry;
protected override void OnPointerMoved(PointerEventArgs e)
{
base.OnPointerMoved(e);
Point previousPoint = _cursorPoint;
_cursorPoint = e.GetPosition(this);
if (_isPointerCaptured)
{
Point oldWorldPoint = UIPointToWorldPoint(previousPoint, ViewportCenterX, ViewportCenterY, Scale, Rotation);
Point newWorldPoint = UIPointToWorldPoint(_cursorPoint, ViewportCenterX, ViewportCenterY, Scale, Rotation);
Vector diff = newWorldPoint - oldWorldPoint;
ViewportCenterX -= diff.X;
ViewportCenterY -= diff.Y;
}
}
protected override void OnPointerPressed(PointerPressedEventArgs e)
{
e.Handled = true;
e.Pointer.Capture(this);
_isPointerCaptured = true;
base.OnPointerPressed(e);
}
protected override void OnPointerWheelChanged(PointerWheelEventArgs e)
{
base.OnPointerWheelChanged(e);
var oldScale = Scale;
Scale *= (1.0d + e.Delta.Y / 12.0d);
Point oldWorldPoint = UIPointToWorldPoint(_cursorPoint, ViewportCenterX, ViewportCenterY, oldScale, Rotation);
Point newWorldPoint = UIPointToWorldPoint(_cursorPoint, ViewportCenterX, ViewportCenterY, Scale, Rotation);
Vector diff = newWorldPoint - oldWorldPoint;
ViewportCenterX -= diff.X;
ViewportCenterY -= diff.Y;
}
protected override void OnPointerReleased(PointerReleasedEventArgs e)
{
e.Pointer.Capture(null);
_isPointerCaptured = false;
base.OnPointerReleased(e);
}
public override void Render(DrawingContext context)
{
var localBounds = new Rect(new Size(this.Bounds.Width, this.Bounds.Height));
var clip = context.PushClip(this.Bounds);
context.DrawRectangle(Brushes.White, _pen, localBounds, 1.0d);
var halfMax = Math.Max(this.Bounds.Width / 2.0d, this.Bounds.Height / 2.0d) * Math.Sqrt(2.0d);
var halfMin = Math.Min(this.Bounds.Width / 2.0d, this.Bounds.Height / 2.0d) / 1.3d;
var halfWidth = this.Bounds.Width / 2.0d;
var halfHeight = this.Bounds.Height / 2.0d;
// 0,0 refers to the top-left of the control now. It is not prime time to draw gui stuff because it'll be under the world
var translateModifier = context.PushTransform(Avalonia.Matrix.CreateTranslation(new Avalonia.Vector(halfWidth, halfHeight)));
// now 0,0 refers to the ViewportCenter(X,Y).
var rotationMatrix = Avalonia.Matrix.CreateRotation(Rotation);
var rotationModifier = context.PushTransform(rotationMatrix);
// everything is rotated but not scaled
var scaleModifier = context.PushTransform(Avalonia.Matrix.CreateScale(Scale, -Scale));
var mapPositionModifier = context.PushTransform(Matrix.CreateTranslation(new Vector(-ViewportCenterX, -ViewportCenterY)));
// now everything is rotated and scaled, and at the right position, now we're drawing strictly in world coordinates
context.DrawEllipse(Brushes.White, _pen, new Point(0.0d, 0.0d), 50.0d, 50.0d);
context.DrawLine(_pen, new Point(-25.0d, -5.0d), new Point(-25.0d, 15.0d));
context.DrawLine(_pen, new Point(25.0d, -5.0d), new Point(25.0d, 15.0d));
context.DrawGeometry(null, _pen, _smileGeometry);
Point cursorInWorldPoint = UIPointToWorldPoint(_cursorPoint, ViewportCenterX, ViewportCenterY, Scale, Rotation);
context.DrawEllipse(Brushes.Gray, _pen, cursorInWorldPoint, 20.0d, 20.0d);
for (int i = 0; i < 10; i++)
{
double orbitRadius = i * 100 + 200;
var orbitInput = ((_timeKeeper.Elapsed.TotalMilliseconds + 987654d) / orbitRadius) / 10.0d;
if (i % 3 == 0)
orbitInput *= -1;
Point orbitPosition = new Point(Math.Sin(orbitInput) * orbitRadius, Math.Cos(orbitInput) * orbitRadius);
context.DrawEllipse(Brushes.Gray, _pen, orbitPosition, 20.0d, 20.0d);
}
// end drawing the world
mapPositionModifier.Dispose();
scaleModifier.Dispose();
rotationModifier.Dispose();
translateModifier.Dispose();
// this is prime time to draw gui stuff
context.DrawLine(_pen, _cursorPoint + new Vector(-20, 0), _cursorPoint + new Vector(20, 0));
context.DrawLine(_pen, _cursorPoint + new Vector(0, -20), _cursorPoint + new Vector(0, 20));
clip.Dispose();
// oh and draw again when you can, no rush, right?
Dispatcher.UIThread.Post(InvalidateVisual, DispatcherPriority.Background);
}
private Point UIPointToWorldPoint(Point inPoint, double viewportCenterX, double viewportCenterY, double scale, double rotation)
{
Point workingPoint = new Point(inPoint.X, -inPoint.Y);
workingPoint += new Vector(-this.Bounds.Width / 2.0d, this.Bounds.Height / 2.0d);
workingPoint /= scale;
workingPoint = Matrix.CreateRotation(rotation).Transform(workingPoint);
workingPoint += new Vector(viewportCenterX, viewportCenterY);
return workingPoint;
}
private Point WorldPointToUIPoint(Point inPoint, double viewportCenterX, double viewportCenterY, double scale, double rotation)
{
Point workingPoint = new Point(inPoint.X, inPoint.Y);
workingPoint -= new Vector(viewportCenterX, viewportCenterY);
// undo rotation
workingPoint = Matrix.CreateRotation(-rotation).Transform(workingPoint);
workingPoint *= scale;
workingPoint -= new Vector(-this.Bounds.Width / 2.0d, this.Bounds.Height / 2.0d);
workingPoint = new Point(workingPoint.X, -workingPoint.Y);
return workingPoint;
}
}
}
| CustomDrawingExampleControl |
csharp | dotnet__extensions | test/Libraries/Microsoft.Extensions.Options.ContextualOptions.Tests/AcceptanceTests.cs | {
"start": 762,
"end": 934
} | internal class ____
{
public string TemperatureScale { get; set; } = "Celsius"; // Celsius or Fahrenheit
public int ForecastDays { get; set; }
}
| WeatherForecastOptions |
csharp | cake-build__cake | src/Cake.Core.Tests/Unit/Diagnostics/Formatting/PropertyTokenTests.cs | {
"start": 182,
"end": 1707
} | public sealed class ____
{
[Fact]
public void Should_Throw_FormatException_When_Index_And_Args_Are_Mismatched()
{
// Given
var token = new PropertyToken(1, null);
// When
var ex = Record.Exception(() => token.Render(new object[] { "test" }));
// Then
Assert.IsType<FormatException>(ex);
Assert.Equal("Index (zero based) must be greater than or equal to zero and less than the size of the argument list.", ex.Message);
}
[Fact]
public void Should_Format_Argument_According_To_Formatting_Rules()
{
// Given
var token = new PropertyToken(0, "B");
// When
var result = token.Render(new object[] { new Guid("d6ed7358ef9645bf9245864025de28fa") });
// Then
Assert.Equal("{d6ed7358-ef96-45bf-9245-864025de28fa}", result);
}
[Fact]
public void Should_Format_Argument_As_String_When_No_Formatting_Rules_Specified()
{
// Given
var token = new PropertyToken(0, null);
// When
var result = token.Render(new object[] { new Guid("{d6ed7358-ef96-45bf-9245-864025de28fa}") });
// Then
Assert.Equal("d6ed7358-ef96-45bf-9245-864025de28fa", result);
}
}
}
}
| TheRenderMethod |
csharp | grandnode__grandnode2 | src/Web/Grand.Web.Common/Extensions/HttpContextExtensions.cs | {
"start": 112,
"end": 314
} | public static class ____
{
public static string GetStoreCookie(this HttpContext httpContext)
{
return httpContext?.Request.Cookies[CommonHelper.StoreCookieName];
}
} | HttpContextExtensions |
csharp | ardalis__Specification | tests/Ardalis.Specification.Tests/SpecificationTests.cs | {
"start": 5819,
"end": 6105
} | private class ____<T>
{
[System.Runtime.CompilerServices.UnsafeAccessor(System.Runtime.CompilerServices.UnsafeAccessorKind.Field, Name = "_items")]
public static extern ref Dictionary<string, object>? ItemsFieldOf(Specification<Customer> @this);
}
#endif
}
| Accessors |
csharp | dotnet__orleans | src/Orleans.Streaming/LoadShedQueueFlowController.cs | {
"start": 296,
"end": 3757
} | public class ____ : IQueueFlowController
{
private readonly LoadSheddingOptions options;
private readonly double loadSheddingLimit;
private readonly IEnvironmentStatisticsProvider environmentStatisticsProvider;
/// <summary>
/// Creates a flow controller triggered when the CPU reaches a percentage of the cluster load shedding limit.
/// This is intended to reduce queue read rate prior to causing the silo to shed load.
/// Note: Triggered only when load shedding is enabled.
/// </summary>
/// <param name="options">The silo statistics options.</param>
/// <param name="percentOfSiloSheddingLimit">Percentage of load shed limit which triggers a reduction of queue read rate.</param>
/// <param name="environmentStatisticsProvider">The silo environment statistics.</param>
/// <returns>The flow controller.</returns>
public static IQueueFlowController CreateAsPercentOfLoadSheddingLimit(LoadSheddingOptions options, IEnvironmentStatisticsProvider environmentStatisticsProvider, int percentOfSiloSheddingLimit = LoadSheddingOptions.DefaultCpuThreshold)
{
if (percentOfSiloSheddingLimit < 0.0 || percentOfSiloSheddingLimit > 100.0) throw new ArgumentOutOfRangeException(nameof(percentOfSiloSheddingLimit), "Percent value must be between 0-100");
// Start shedding before silo reaches shedding limit.
return new LoadShedQueueFlowController((int)(options.CpuThreshold * (percentOfSiloSheddingLimit / 100.0)), options, environmentStatisticsProvider);
}
/// <summary>
/// Creates a flow controller triggered when the CPU reaches the specified limit.
/// Note: Triggered only when load shedding is enabled.
/// </summary>
/// <param name="loadSheddingLimit">Percentage of CPU which triggers queue read rate reduction</param>
/// <param name="options">The silo statistics options.</param>
/// <param name="environmentStatisticsProvider">The silo environment statistics.</param>
/// <returns>The flow controller.</returns>
public static IQueueFlowController CreateAsPercentageOfCPU(int loadSheddingLimit, LoadSheddingOptions options, IEnvironmentStatisticsProvider environmentStatisticsProvider)
{
if (loadSheddingLimit < 0 || loadSheddingLimit > 100) throw new ArgumentOutOfRangeException(nameof(loadSheddingLimit), "Value must be between 0-100");
return new LoadShedQueueFlowController(loadSheddingLimit, options, environmentStatisticsProvider);
}
private LoadShedQueueFlowController(int loadSheddingLimit, LoadSheddingOptions options, IEnvironmentStatisticsProvider environmentStatisticsProvider)
{
this.options = options;
if (loadSheddingLimit < 0 || loadSheddingLimit > 100) throw new ArgumentOutOfRangeException(nameof(loadSheddingLimit), "Value must be between 0-100");
this.loadSheddingLimit = loadSheddingLimit != 0 ? loadSheddingLimit : int.MaxValue;
this.environmentStatisticsProvider = environmentStatisticsProvider;
}
/// <inheritdoc/>
public int GetMaxAddCount()
{
return options.LoadSheddingEnabled && environmentStatisticsProvider.GetEnvironmentStatistics().FilteredCpuUsagePercentage > loadSheddingLimit ? 0 : int.MaxValue;
}
}
}
| LoadShedQueueFlowController |
csharp | dotnet__maui | src/Essentials/src/Permissions/Permissions.ios.cs | {
"start": 2156,
"end": 3828
} | public partial class ____ : BasePlatformPermission
{
/// <inheritdoc/>
protected override Func<IEnumerable<string>> RequiredInfoPlistKeys =>
() => new string[] { "NSContactsUsageDescription" };
/// <inheritdoc/>
public override Task<PermissionStatus> CheckStatusAsync()
{
EnsureDeclared();
return Task.FromResult(GetAddressBookPermissionStatus());
}
/// <inheritdoc/>
public override Task<PermissionStatus> RequestAsync()
{
EnsureDeclared();
var status = GetAddressBookPermissionStatus();
if (status == PermissionStatus.Granted)
return Task.FromResult(status);
EnsureMainThread();
return RequestAddressBookPermission();
}
internal static PermissionStatus GetAddressBookPermissionStatus()
{
var status = global::Contacts.CNContactStore.GetAuthorizationStatus(global::Contacts.CNEntityType.Contacts);
return status switch
{
global::Contacts.CNAuthorizationStatus.Limited => PermissionStatus.Limited,
global::Contacts.CNAuthorizationStatus.Authorized => PermissionStatus.Granted,
global::Contacts.CNAuthorizationStatus.Denied => PermissionStatus.Denied,
global::Contacts.CNAuthorizationStatus.Restricted => PermissionStatus.Restricted,
_ => PermissionStatus.Unknown,
};
}
internal static async Task<PermissionStatus> RequestAddressBookPermission()
{
var contactStore = new global::Contacts.CNContactStore();
var result = await contactStore.RequestAccessAsync(global::Contacts.CNEntityType.Contacts);
if (result.Item2 != null)
return PermissionStatus.Denied;
return GetAddressBookPermissionStatus();
}
}
| ContactsRead |
csharp | SixLabors__ImageSharp | src/ImageSharp/Processing/Processors/Quantization/OctreeQuantizer{TPixel}.cs | {
"start": 5058,
"end": 5818
} | struct ____ : IQuantizingPixelRowDelegate<Rgba32>
{
private readonly Octree octree;
public PixelRowDelegate(Octree octree) => this.octree = octree;
public void Invoke(ReadOnlySpan<Rgba32> row, int rowIndex) => this.octree.AddColors(row);
}
/// <summary>
/// A hexadecatree-based color quantization structure used for fast color distance lookups and palette generation.
/// This tree maintains a fixed pool of nodes (capacity 4096) where each node can have up to 16 children, stores
/// color accumulation data, and supports dynamic node allocation and reduction. It offers near-constant-time insertions
/// and lookups while consuming roughly 240 KB for the node pool.
/// </summary>
| PixelRowDelegate |
csharp | OrchardCMS__OrchardCore | src/OrchardCore.Modules/OrchardCore.DataLocalization/Startup.cs | {
"start": 472,
"end": 1323
} | public class ____ : StartupBase
{
public override int ConfigureOrder => -100;
/// <inheritdocs />
public override void ConfigureServices(IServiceCollection services)
{
services.AddScoped<TranslationsManager>();
services.AddRecipeExecutionStep<TranslationsStep>();
services.AddTransient<IDeploymentSource, AllDataTranslationsDeploymentSource>();
services.AddSingleton<IDeploymentStepFactory>(new DeploymentStepFactory<AllDataTranslationsDeploymentStep>());
services.AddScoped<IDisplayDriver<DeploymentStep>, AllDataTranslationsDeploymentStepDriver>();
services.AddScoped<ILocalizationDataProvider, ContentTypeDataLocalizationProvider>();
services.AddScoped<ILocalizationDataProvider, ContentFieldDataLocalizationProvider>();
services.AddDataLocalization();
}
}
| Startup |
csharp | npgsql__npgsql | test/Npgsql.Tests/Support/PgPostmasterMock.cs | {
"start": 257,
"end": 9817
} | class ____ : IAsyncDisposable
{
const int ReadBufferSize = 8192;
const int WriteBufferSize = 8192;
const int CancelRequestCode = 1234 << 16 | 5678;
const int SslRequest = 80877103;
const int GssRequest = 80877104;
static readonly Encoding Encoding = NpgsqlWriteBuffer.UTF8Encoding;
static readonly Encoding RelaxedEncoding = NpgsqlWriteBuffer.RelaxedUTF8Encoding;
readonly Socket _socket;
readonly List<PgServerMock> _allServers = [];
bool _acceptingClients;
Task? _acceptClientsTask;
int _processIdCounter;
readonly bool _completeCancellationImmediately;
readonly string? _startupErrorCode;
ChannelWriter<Task<ServerOrCancellationRequest>> _pendingRequestsWriter { get; }
ChannelReader<Task<ServerOrCancellationRequest>> _pendingRequestsReader { get; }
internal string ConnectionString { get; }
internal string Host { get; }
internal int Port { get; }
volatile MockState _state;
internal MockState State
{
get => _state;
set => _state = value;
}
internal static PgPostmasterMock Start(
string? connectionString = null,
bool completeCancellationImmediately = true,
MockState state = MockState.MultipleHostsDisabled,
string? startupErrorCode = null)
{
var mock = new PgPostmasterMock(connectionString, completeCancellationImmediately, state, startupErrorCode);
mock.AcceptClients();
return mock;
}
internal PgPostmasterMock(
string? connectionString = null,
bool completeCancellationImmediately = true,
MockState state = MockState.MultipleHostsDisabled,
string? startupErrorCode = null)
{
var pendingRequestsChannel = Channel.CreateUnbounded<Task<ServerOrCancellationRequest>>();
_pendingRequestsReader = pendingRequestsChannel.Reader;
_pendingRequestsWriter = pendingRequestsChannel.Writer;
var connectionStringBuilder = new NpgsqlConnectionStringBuilder(connectionString);
_completeCancellationImmediately = completeCancellationImmediately;
State = state;
_startupErrorCode = startupErrorCode;
_socket = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);
var endpoint = new IPEndPoint(IPAddress.Loopback, 0);
_socket.Bind(endpoint);
var localEndPoint = (IPEndPoint)_socket.LocalEndPoint!;
Host = localEndPoint.Address.ToString();
Port = localEndPoint.Port;
connectionStringBuilder.Host = Host;
connectionStringBuilder.Port = Port;
#pragma warning disable CS0618 // Type or member is obsolete
connectionStringBuilder.ServerCompatibilityMode = ServerCompatibilityMode.NoTypeLoading;
#pragma warning restore CS0618 // Type or member is obsolete
ConnectionString = connectionStringBuilder.ConnectionString;
_socket.Listen(5);
}
public NpgsqlDataSource CreateDataSource(Action<NpgsqlDataSourceBuilder>? configure = null)
{
var builder = new NpgsqlDataSourceBuilder(ConnectionString);
configure?.Invoke(builder);
return builder.Build();
}
void AcceptClients()
{
_acceptingClients = true;
_acceptClientsTask = DoAcceptClients();
async Task DoAcceptClients()
{
while (true)
{
var serverOrCancellationRequest = await Accept(_completeCancellationImmediately);
if (serverOrCancellationRequest.Server is { } server)
{
// Hand off the new server to the client test only once startup is complete, to avoid reading/writing in parallel
// during startup. Don't wait for all this to complete - continue to accept other connections in case that's needed.
if (string.IsNullOrEmpty(_startupErrorCode))
{
// We may be accepting (and starting up) multiple connections in parallel, but some tests assume we return
// server connections in FIFO. As a result, we enqueue immediately into the _pendingRequestsWriter channel,
// but we enqueue a Task which represents the Startup completing.
await _pendingRequestsWriter.WriteAsync(Task.Run(async () =>
{
await server.Startup(State);
return serverOrCancellationRequest;
}));
}
else
_ = server.FailedStartup(_startupErrorCode);
}
else
{
await _pendingRequestsWriter.WriteAsync(Task.FromResult(serverOrCancellationRequest));
}
}
// ReSharper disable once FunctionNeverReturns
}
}
async Task<ServerOrCancellationRequest> Accept(bool completeCancellationImmediately)
{
var clientSocket = await _socket.AcceptAsync();
var stream = new NetworkStream(clientSocket, true);
var readBuffer = new NpgsqlReadBuffer(null!, stream, clientSocket, ReadBufferSize, Encoding,
RelaxedEncoding);
var writeBuffer = new NpgsqlWriteBuffer(null!, stream, clientSocket, WriteBufferSize, Encoding);
writeBuffer.MessageLengthValidation = false;
await readBuffer.EnsureAsync(4);
var len = readBuffer.ReadInt32();
await readBuffer.EnsureAsync(len - 4);
var request = readBuffer.ReadInt32();
if (request == GssRequest)
{
writeBuffer.WriteByte((byte)'N');
await writeBuffer.Flush(async: true);
await readBuffer.EnsureAsync(4);
len = readBuffer.ReadInt32();
await readBuffer.EnsureAsync(len - 4);
request = readBuffer.ReadInt32();
}
if (request == SslRequest)
{
writeBuffer.WriteByte((byte)'N');
await writeBuffer.Flush(async: true);
await readBuffer.EnsureAsync(4);
len = readBuffer.ReadInt32();
await readBuffer.EnsureAsync(len - 4);
request = readBuffer.ReadInt32();
}
if (request == CancelRequestCode)
{
var cancellationRequest = new PgCancellationRequest(readBuffer, writeBuffer, stream, readBuffer.ReadInt32(), readBuffer.ReadInt32());
if (completeCancellationImmediately)
{
cancellationRequest.Complete();
}
return new ServerOrCancellationRequest(cancellationRequest);
}
// This is not a cancellation, "spawn" a new server
readBuffer.ReadPosition -= 8;
var server = new PgServerMock(stream, readBuffer, writeBuffer, ++_processIdCounter);
_allServers.Add(server);
return new ServerOrCancellationRequest(server);
}
internal async Task<PgServerMock> AcceptServer(bool completeCancellationImmediately = true)
{
if (_acceptingClients)
throw new InvalidOperationException($"Already accepting clients via {nameof(AcceptClients)}");
var serverOrCancellationRequest = await Accept(completeCancellationImmediately);
if (serverOrCancellationRequest.Server is null)
throw new InvalidOperationException("Expected a server connection but got a cancellation request instead");
return serverOrCancellationRequest.Server;
}
internal async Task<PgCancellationRequest> AcceptCancellationRequest()
{
if (_acceptingClients)
throw new InvalidOperationException($"Already accepting clients via {nameof(AcceptClients)}");
var serverOrCancellationRequest = await Accept(completeCancellationImmediately: true);
if (serverOrCancellationRequest.CancellationRequest is null)
throw new InvalidOperationException("Expected a cancellation request but got a server connection instead");
return serverOrCancellationRequest.CancellationRequest;
}
internal async ValueTask<PgServerMock> WaitForServerConnection()
{
var serverOrCancellationRequest = await await _pendingRequestsReader.ReadAsync();
if (serverOrCancellationRequest.Server is null)
throw new InvalidOperationException("Expected a server connection but got a cancellation request instead");
return serverOrCancellationRequest.Server;
}
internal async ValueTask<PgCancellationRequest> WaitForCancellationRequest()
{
var serverOrCancellationRequest = await await _pendingRequestsReader.ReadAsync();
if (serverOrCancellationRequest.CancellationRequest is null)
throw new InvalidOperationException("Expected cancellation request but got a server connection instead");
return serverOrCancellationRequest.CancellationRequest;
}
public async ValueTask DisposeAsync()
{
var endpoint = _socket.LocalEndPoint as IPEndPoint;
Debug.Assert(endpoint is not null);
// Stop accepting new connections
_socket.Dispose();
try
{
var acceptTask = _acceptClientsTask;
if (acceptTask != null)
await acceptTask;
}
catch
{
// Swallow all exceptions
}
// Destroy all servers created by this postmaster
foreach (var server in _allServers)
server.Dispose();
}
internal readonly | PgPostmasterMock |
csharp | Cysharp__MemoryPack | sandbox/SandboxConsoleApp/Program.cs | {
"start": 10409,
"end": 10481
} | public partial record ____ FooStruct(int x, int y);
[MemoryPackable]
| struct |
csharp | cake-build__cake | src/Cake.Common.Tests/Unit/Build/Jenkins/Data/JenkinsChangeInfoTests.cs | {
"start": 2059,
"end": 2507
} | public sealed class ____
{
[Fact]
public void Should_Return_Correct_Value()
{
// Given
var info = new JenkinsInfoFixture().CreateEnvironmentInfo();
// When
var result = info.Change.AuthorDisplayName;
// Then
Assert.Equal("Cake User", result);
}
}
| TheChangeAuthorDisplayNameProperty |
csharp | unoplatform__uno | src/SamplesApp/UITests.Shared/Windows_UI_Xaml_Controls/ImageTests/ImageWithLateSourceUniformToFill.xaml.cs | {
"start": 849,
"end": 1091
} | partial class ____ : UserControl
{
public ImageWithLateSourceUniformToFill()
{
this.InitializeComponent();
DataContext = new ImageWithLateSourceViewModel(UnitTestDispatcherCompat.From(this));
}
}
}
| ImageWithLateSourceUniformToFill |
csharp | bchavez__Bogus | Source/Bogus.Tests/GitHubIssues/Issue255.cs | {
"start": 202,
"end": 283
} | private interface ____ : IParent
{
string City { get; set; }
}
| IChild |
csharp | protobuf-net__protobuf-net | src/Examples/Issues/SO7654066.cs | {
"start": 180,
"end": 268
} | public class ____
{
[ProtoContract(IgnoreListHandling = true)]
| SO7654066 |
csharp | mongodb__mongo-csharp-driver | src/MongoDB.Driver/Search/OperatorSearchDefinitions.cs | {
"start": 12560,
"end": 13560
} | internal sealed class ____<TDocument> : OperatorSearchDefinition<TDocument>
{
private readonly SearchQueryDefinition _query;
private readonly int? _slop;
private readonly string _synonyms;
public PhraseSearchDefinition(
SearchPathDefinition<TDocument> path,
SearchQueryDefinition query,
SearchPhraseOptions<TDocument> options)
: base(OperatorType.Phrase, path, options?.Score)
{
_query = Ensure.IsNotNull(query, nameof(query));
_slop = options?.Slop;
_synonyms = options?.Synonyms;
}
private protected override BsonDocument RenderArguments(
RenderArgs<TDocument> args,
IBsonSerializer fieldSerializer) => new()
{
{ "query", _query.Render() },
{ "slop", _slop, _slop != null },
{ "synonyms", _synonyms, _synonyms != null }
};
}
| PhraseSearchDefinition |
csharp | nopSolutions__nopCommerce | src/Libraries/Nop.Services/ExportImport/ExportProductAttribute.cs | {
"start": 39,
"end": 4289
} | public partial class ____
{
/// <summary>
/// Gets or sets the attribute identifier
/// </summary>
public int AttributeId { get; set; }
/// <summary>
/// Gets or sets the attribute name
/// </summary>
public string AttributeName { get; set; }
/// <summary>
/// Gets or sets a value a text prompt
/// </summary>
public string AttributeTextPrompt { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the entity is required
/// </summary>
public bool AttributeIsRequired { get; set; }
/// <summary>
/// Gets or sets the display order
/// </summary>
public int AttributeDisplayOrder { get; set; }
/// <summary>
/// Gets or sets the comma separated picture identifiers
/// </summary>
public string PictureIds { get; set; }
/// <summary>
/// Gets or sets the attribute control type identifier
/// </summary>
public int AttributeControlTypeId { get; set; }
/// <summary>
/// Gets or sets the product attribute mapping identifier
/// </summary>
public int AttributeMappingId { get; set; }
/// <summary>
/// Gets or sets the attribute value type identifier
/// </summary>
public int AttributeValueTypeId { get; set; }
/// <summary>
/// Gets or sets the associated product identifier (used only with AttributeValueType.AssociatedToProduct)
/// </summary>
public int AssociatedProductId { get; set; }
/// <summary>
/// Gets or sets the identifier
/// </summary>
public int Id { get; set; }
/// <summary>
/// Gets or sets the picture ID for image square (used with "Image squares" attribute type)
/// </summary>
public int ImageSquaresPictureId { get; set; }
/// <summary>
/// Gets or sets the name
/// </summary>
public string Name { get; set; }
/// <summary>
/// Gets or sets the weight adjustment
/// </summary>
public decimal WeightAdjustment { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the customer can enter the quantity of associated product (used only with AttributeValueType.AssociatedToProduct)
/// </summary>
public bool CustomerEntersQty { get; set; }
/// <summary>
/// Gets or sets the quantity of associated product (used only with AttributeValueType.AssociatedToProduct)
/// </summary>
public int Quantity { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the value is pre-selected
/// </summary>
public bool IsPreSelected { get; set; }
/// <summary>
/// Gets or sets the color RGB value (used with "Color squares" attribute type)
/// </summary>
public string ColorSquaresRgb { get; set; }
/// <summary>
/// Gets or sets the price adjustment
/// </summary>
public decimal PriceAdjustment { get; set; }
/// <summary>
/// Gets or sets the attribute value cost
/// </summary>
public decimal Cost { get; set; }
/// <summary>
/// Gets or sets the display order
/// </summary>
public int DisplayOrder { get; set; }
/// <summary>
/// Gets or sets a value indicating whether "price adjustment" is specified as percentage
/// </summary>
public bool PriceAdjustmentUsePercentage { get; set; }
/// <summary>
/// Gets or sets the default value (for textbox and multiline textbox)
/// </summary>
public string DefaultValue { get; set; }
/// <summary>
/// Gets or sets the validation rule for minimum length (for textbox and multiline textbox)
/// </summary>
public int? ValidationMinLength { get; set; }
/// <summary>
/// Gets or sets the validation rule for maximum length (for textbox and multiline textbox)
/// </summary>
public int? ValidationMaxLength { get; set; }
/// <summary>
/// Gets or sets the validation rule for file allowed extensions (for file upload)
/// </summary>
public string ValidationFileAllowedExtensions { get; set; }
/// <summary>
/// Gets or sets the validation rule for file maximum size in kilobytes (for file upload)
/// </summary>
public int? ValidationFileMaximumSize { get; set; }
} | ExportProductAttribute |
csharp | aspnetboilerplate__aspnetboilerplate | src/Abp/Runtime/System/IOSPlatformProvider.cs | {
"start": 75,
"end": 168
} | public interface ____
{
OSPlatform GetCurrentOSPlatform();
}
} | IOSPlatformProvider |
csharp | App-vNext__Polly | src/Polly.Core/Simmy/Fault/FaultGenerator.cs | {
"start": 410,
"end": 520
} | class ____ be assigned to the <see cref="ChaosFaultStrategyOptions.FaultGenerator"/> property.
/// </remarks>
| can |
csharp | spectreconsole__spectre.console | src/Spectre.Console.Tests/Unit/Widgets/GridTests.cs | {
"start": 109,
"end": 645
} | public sealed class ____
{
[Fact]
public void Should_Throw_If_Rows_Are_Not_Empty()
{
// Given
var grid = new Grid();
grid.AddColumn();
grid.AddRow("Hello World!");
// When
var result = Record.Exception(() => grid.AddColumn());
// Then
result.ShouldBeOfType<InvalidOperationException>()
.Message.ShouldBe("Cannot add new columns to grid with existing rows.");
}
}
| TheAddColumnMethod |
csharp | dotnet__efcore | src/EFCore/Metadata/Builders/ComplexTypePropertyBuilder.cs | {
"start": 839,
"end": 35013
} | public class ____ : IInfrastructure<IConventionPropertyBuilder>
{
/// <summary>
/// This is an internal API that supports the Entity Framework Core infrastructure and not subject to
/// the same compatibility standards as public APIs. It may be changed or removed without notice in
/// any release. You should only use it directly in your code with extreme caution and knowing that
/// doing so can result in application failures when updating to a new Entity Framework Core release.
/// </summary>
[EntityFrameworkInternal]
public ComplexTypePropertyBuilder(IMutableProperty property)
{
Check.NotNull(property);
Builder = ((Property)property).Builder;
}
/// <summary>
/// The internal builder being used to configure the property.
/// </summary>
IConventionPropertyBuilder IInfrastructure<IConventionPropertyBuilder>.Instance
=> Builder;
private InternalPropertyBuilder Builder { get; }
/// <summary>
/// The property being configured.
/// </summary>
public virtual IMutableProperty Metadata
=> Builder.Metadata;
/// <summary>
/// Adds or updates an annotation on the property. If an annotation with the key specified in
/// <paramref name="annotation" /> already exists its value will be updated.
/// </summary>
/// <param name="annotation">The key of the annotation to be added or updated.</param>
/// <param name="value">The value to be stored in the annotation.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasAnnotation(string annotation, object? value)
{
Check.NotEmpty(annotation);
Builder.HasAnnotation(annotation, value, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures whether this property must have a value assigned or <see langword="null" /> is a valid value.
/// A property can only be configured as non-required if it is based on a CLR type that can be
/// assigned <see langword="null" />.
/// </summary>
/// <param name="required">A value indicating whether the property is required.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder IsRequired(bool required = true)
{
Builder.IsRequired(required, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the maximum length of data that can be stored in this property.
/// Maximum length can only be set on array properties (including <see cref="string" /> properties).
/// </summary>
/// <param name="maxLength">
/// The maximum length of data allowed in the property. A value of <c>-1</c> indicates that the property has no maximum length.
/// </param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasMaxLength(int maxLength)
{
Builder.HasMaxLength(maxLength, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the value that will be used to determine if the property has been set or not. If the property is set to the
/// sentinel value, then it is considered not set. By default, the sentinel value is the CLR default value for the type of
/// the property.
/// </summary>
/// <param name="sentinel">The sentinel value.</param>
/// <returns>The same builder instance if the configuration was applied, <see langword="null" /> otherwise.</returns>
public virtual ComplexTypePropertyBuilder HasSentinel(object? sentinel)
{
Builder.HasSentinel(sentinel, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the precision and scale of the property.
/// </summary>
/// <param name="precision">The precision of the property.</param>
/// <param name="scale">The scale of the property.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasPrecision(int precision, int scale)
{
Builder.HasPrecision(precision, ConfigurationSource.Explicit);
Builder.HasScale(scale, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the precision of the property.
/// </summary>
/// <param name="precision">The precision of the property.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasPrecision(int precision)
{
Builder.HasPrecision(precision, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures whether the property as capable of persisting unicode characters.
/// Can only be set on <see cref="string" /> properties.
/// </summary>
/// <param name="unicode">A value indicating whether the property can contain unicode characters.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder IsUnicode(bool unicode = true)
{
Builder.IsUnicode(unicode, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the property as <see cref="ValueGeneratedOnAddOrUpdate" /> and
/// <see cref="IsConcurrencyToken" />.
/// </summary>
/// <remarks>
/// Database providers can choose to interpret this in different way, but it is commonly used
/// to indicate some form of automatic row-versioning as used for optimistic concurrency detection.
/// </remarks>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder IsRowVersion()
{
Builder.ValueGenerated(ValueGenerated.OnAddOrUpdate, ConfigurationSource.Explicit);
Builder.IsConcurrencyToken(true, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the <see cref="ValueGenerator" /> that will generate values for this property.
/// </summary>
/// <remarks>
/// <para>
/// Values are generated when the entity is added to the context using, for example,
/// <see cref="DbContext.Add{TEntity}" />. Values are generated only when the property is assigned
/// the CLR default value (<see langword="null" /> for <c>string</c>, <c>0</c> for <c>int</c>,
/// <c>Guid.Empty</c> for <c>Guid</c>, etc.).
/// </para>
/// <para>
/// A single instance of this type will be created and used to generate values for this property in all
/// instances of the complex type. The type must be instantiable and have a parameterless constructor.
/// </para>
/// <para>
/// This method is intended for use with custom value generation. Value generation for common cases is
/// usually handled automatically by the database provider.
/// </para>
/// </remarks>
/// <typeparam name="TGenerator">A type that inherits from <see cref="ValueGenerator" />.</typeparam>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasValueGenerator
<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)] TGenerator>()
where TGenerator : ValueGenerator
{
Builder.HasValueGenerator(typeof(TGenerator), ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the <see cref="ValueGenerator" /> that will generate values for this property.
/// </summary>
/// <remarks>
/// <para>
/// Values are generated when the entity is added to the context using, for example,
/// <see cref="DbContext.Add{TEntity}" />. Values are generated only when the property is assigned
/// the CLR default value (<see langword="null" /> for <c>string</c>, <c>0</c> for <c>int</c>,
/// <c>Guid.Empty</c> for <c>Guid</c>, etc.).
/// </para>
/// <para>
/// A single instance of this type will be created and used to generate values for this property in all
/// instances of the complex type. The type must be instantiable and have a parameterless constructor.
/// </para>
/// <para>
/// This method is intended for use with custom value generation. Value generation for common cases is
/// usually handled automatically by the database provider.
/// </para>
/// <para>
/// Setting <see langword="null" /> does not disable value generation for this property, it just clears any generator explicitly
/// configured for this property. The database provider may still have a value generator for the property type.
/// </para>
/// </remarks>
/// <param name="valueGeneratorType">A type that inherits from <see cref="ValueGenerator" />.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasValueGenerator(
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
Type? valueGeneratorType)
{
Builder.HasValueGenerator(valueGeneratorType, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the <see cref="ValueGeneratorFactory" /> for creating a <see cref="ValueGenerator" />
/// to use to generate values for this property.
/// </summary>
/// <remarks>
/// <para>
/// Values are generated when the entity is added to the context using, for example,
/// <see cref="DbContext.Add{TEntity}" />. Values are generated only when the property is assigned
/// the CLR default value (<see langword="null" /> for <c>string</c>, <c>0</c> for <c>int</c>,
/// <c>Guid.Empty</c> for <c>Guid</c>, etc.).
/// </para>
/// <para>
/// A single instance of this type will be created and used to generate values for this property in all
/// instances of the complex type. The type must be instantiable and have a parameterless constructor.
/// </para>
/// <para>
/// This method is intended for use with custom value generation. Value generation for common cases is
/// usually handled automatically by the database provider.
/// </para>
/// <para>
/// Setting <see langword="null" /> does not disable value generation for this property, it just clears any generator explicitly
/// configured for this property. The database provider may still have a value generator for the property type.
/// </para>
/// </remarks>
/// <typeparam name="TFactory">A type that inherits from <see cref="ValueGeneratorFactory" />.</typeparam>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasValueGeneratorFactory
<[DynamicallyAccessedMembers(ValueGeneratorFactory.DynamicallyAccessedMemberTypes)] TFactory>()
where TFactory : ValueGeneratorFactory
=> HasValueGeneratorFactory(typeof(TFactory));
/// <summary>
/// Configures the <see cref="ValueGeneratorFactory" /> for creating a <see cref="ValueGenerator" />
/// to use to generate values for this property.
/// </summary>
/// <remarks>
/// <para>
/// Values are generated when the entity is added to the context using, for example,
/// <see cref="DbContext.Add{TEntity}" />. Values are generated only when the property is assigned
/// the CLR default value (<see langword="null" /> for <c>string</c>, <c>0</c> for <c>int</c>,
/// <c>Guid.Empty</c> for <c>Guid</c>, etc.).
/// </para>
/// <para>
/// A single instance of this type will be created and used to generate values for this property in all
/// instances of the complex type. The type must be instantiable and have a parameterless constructor.
/// </para>
/// <para>
/// This method is intended for use with custom value generation. Value generation for common cases is
/// usually handled automatically by the database provider.
/// </para>
/// <para>
/// Setting <see langword="null" /> does not disable value generation for this property, it just clears any generator explicitly
/// configured for this property. The database provider may still have a value generator for the property type.
/// </para>
/// </remarks>
/// <param name="valueGeneratorFactoryType">A type that inherits from <see cref="ValueGeneratorFactory" />.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasValueGeneratorFactory(
[DynamicallyAccessedMembers(ValueGeneratorFactory.DynamicallyAccessedMemberTypes)]
Type? valueGeneratorFactoryType)
{
Builder.HasValueGeneratorFactory(valueGeneratorFactoryType, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures whether this property should be used as a concurrency token. When a property is configured
/// as a concurrency token the value in the database will be checked when an instance of this complex type
/// is updated or deleted during <see cref="DbContext.SaveChanges()" /> to ensure it has not changed since
/// the instance was retrieved from the database. If it has changed, an exception will be thrown and the
/// changes will not be applied to the database.
/// </summary>
/// <param name="concurrencyToken">A value indicating whether this property is a concurrency token.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder IsConcurrencyToken(bool concurrencyToken = true)
{
Builder.IsConcurrencyToken(concurrencyToken, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures a property to never have a value generated by the database when an instance of this
/// complex type is saved.
/// </summary>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
/// <remarks>
/// Note that values may still be generated by a client-side value generator, if one is set explicitly or by a convention.
/// </remarks>
public virtual ComplexTypePropertyBuilder ValueGeneratedNever()
{
Builder.ValueGenerated(ValueGenerated.Never, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures a property to have a value generated only when saving a new entity, unless a non-null,
/// non-temporary value has been set, in which case the set value will be saved instead. The value
/// may be generated by a client-side value generator or may be generated by the database as part
/// of saving the entity.
/// </summary>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder ValueGeneratedOnAdd()
{
Builder.ValueGenerated(ValueGenerated.OnAdd, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures a property to have a value generated when saving a new or existing entity.
/// </summary>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder ValueGeneratedOnAddOrUpdate()
{
Builder.ValueGenerated(ValueGenerated.OnAddOrUpdate, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures a property to have a value generated when saving an existing entity.
/// </summary>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder ValueGeneratedOnUpdate()
{
Builder.ValueGenerated(ValueGenerated.OnUpdate, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures a property to have a value generated under certain conditions when saving an existing entity.
/// </summary>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder ValueGeneratedOnUpdateSometimes()
{
Builder.ValueGenerated(ValueGenerated.OnUpdateSometimes, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Sets the backing field to use for this property.
/// </summary>
/// <remarks>
/// <para>
/// Backing fields are normally found by convention.
/// This method is useful for setting backing fields explicitly in cases where the
/// correct field is not found by convention.
/// </para>
/// <para>
/// By default, the backing field, if one is found or has been specified, is used when
/// new objects are constructed, typically when entities are queried from the database.
/// Properties are used for all other accesses. This can be changed by calling
/// <see cref="UsePropertyAccessMode" />.
/// </para>
/// <para>
/// See <see href="https://aka.ms/efcore-docs-backing-fields">Backing fields</see> for more information and examples.
/// </para>
/// </remarks>
/// <param name="fieldName">The field name.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasField(string fieldName)
{
Check.NotEmpty(fieldName);
Builder.HasField(fieldName, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Sets the <see cref="PropertyAccessMode" /> to use for this property.
/// </summary>
/// <remarks>
/// <para>
/// By default, the backing field, if one is found by convention or has been specified, is used when
/// new objects are constructed, typically when entities are queried from the database.
/// Properties are used for all other accesses. Calling this method will change that behavior
/// for this property as described in the <see cref="PropertyAccessMode" /> enum.
/// </para>
/// <para>
/// Calling this method overrides for this property any access mode that was set on the
/// complex type or model.
/// </para>
/// </remarks>
/// <param name="propertyAccessMode">The <see cref="PropertyAccessMode" /> to use for this property.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder UsePropertyAccessMode(PropertyAccessMode propertyAccessMode)
{
Builder.UsePropertyAccessMode(propertyAccessMode, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <typeparam name="TConversion">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</typeparam>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion<
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
TConversion>()
=> HasConversion(typeof(TConversion));
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <param name="conversionType">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion(
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
Type? conversionType)
{
if (typeof(ValueConverter).IsAssignableFrom(conversionType))
{
Builder.HasConverter(conversionType, ConfigurationSource.Explicit);
}
else
{
Builder.HasConversion(conversionType, ConfigurationSource.Explicit);
}
return this;
}
/// <summary>
/// Configures the property so that the property value is converted to and from the database
/// using the given <see cref="ValueConverter" />.
/// </summary>
/// <param name="converter">The converter to use.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion(ValueConverter? converter)
=> HasConversion(converter, null, null);
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <param name="valueComparer">The comparer to use for values before conversion.</param>
/// <typeparam name="TConversion">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</typeparam>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion<
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
TConversion>(
ValueComparer? valueComparer)
=> HasConversion(typeof(TConversion), valueComparer);
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <param name="valueComparer">The comparer to use for values before conversion.</param>
/// <param name="providerComparer">The comparer to use for the provider values.</param>
/// <typeparam name="TConversion">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</typeparam>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion
<[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)] TConversion>(
ValueComparer? valueComparer,
ValueComparer? providerComparer)
=> HasConversion(typeof(TConversion), valueComparer, providerComparer);
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <param name="conversionType">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</param>
/// <param name="valueComparer">The comparer to use for values before conversion.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion(
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
Type conversionType,
ValueComparer? valueComparer)
=> HasConversion(conversionType, valueComparer, null);
// DynamicallyAccessedMemberTypes.PublicParameterlessConstructor
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <param name="conversionType">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</param>
/// <param name="valueComparer">The comparer to use for values before conversion.</param>
/// <param name="providerComparer">The comparer to use for the provider values.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion(
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
Type conversionType,
ValueComparer? valueComparer,
ValueComparer? providerComparer)
{
Check.NotNull(conversionType);
if (typeof(ValueConverter).IsAssignableFrom(conversionType))
{
Builder.HasConverter(conversionType, ConfigurationSource.Explicit);
}
else
{
Builder.HasConversion(conversionType, ConfigurationSource.Explicit);
}
Builder.HasValueComparer(valueComparer, ConfigurationSource.Explicit);
Builder.HasProviderValueComparer(providerComparer, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the property so that the property value is converted to and from the database
/// using the given <see cref="ValueConverter" />.
/// </summary>
/// <param name="converter">The converter to use.</param>
/// <param name="valueComparer">The comparer to use for values before conversion.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion(ValueConverter? converter, ValueComparer? valueComparer)
=> HasConversion(converter, valueComparer, null);
/// <summary>
/// Configures the property so that the property value is converted to and from the database
/// using the given <see cref="ValueConverter" />.
/// </summary>
/// <param name="converter">The converter to use.</param>
/// <param name="valueComparer">The comparer to use for values before conversion.</param>
/// <param name="providerComparer">The comparer to use for the provider values.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion(
ValueConverter? converter,
ValueComparer? valueComparer,
ValueComparer? providerComparer)
{
Builder.HasConversion(converter, ConfigurationSource.Explicit);
Builder.HasValueComparer(valueComparer, ConfigurationSource.Explicit);
Builder.HasProviderValueComparer(providerComparer, ConfigurationSource.Explicit);
return this;
}
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <typeparam name="TConversion">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</typeparam>
/// <typeparam name="TComparer">A type that inherits from <see cref="ValueComparer" />.</typeparam>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion<
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
TConversion,
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
TComparer>()
where TComparer : ValueComparer
=> HasConversion(typeof(TConversion), typeof(TComparer));
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <typeparam name="TConversion">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</typeparam>
/// <typeparam name="TComparer">A type that inherits from <see cref="ValueComparer" />.</typeparam>
/// <typeparam name="TProviderComparer">A type that inherits from <see cref="ValueComparer" /> to use for the provider values.</typeparam>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion<
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
TConversion,
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
TComparer,
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
TProviderComparer>()
where TComparer : ValueComparer
where TProviderComparer : ValueComparer
=> HasConversion(typeof(TConversion), typeof(TComparer), typeof(TProviderComparer));
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <param name="conversionType">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</param>
/// <param name="comparerType">A type that inherits from <see cref="ValueComparer" />.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion(
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
Type conversionType,
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
Type? comparerType)
=> HasConversion(conversionType, comparerType, null);
/// <summary>
/// Configures the property so that the property value is converted before
/// writing to the database and converted back when reading from the database.
/// </summary>
/// <param name="conversionType">The type to convert to and from or a type that inherits from <see cref="ValueConverter" />.</param>
/// <param name="comparerType">A type that inherits from <see cref="ValueComparer" />.</param>
/// <param name="providerComparerType">A type that inherits from <see cref="ValueComparer" /> to use for the provider values.</param>
/// <returns>The same builder instance so that multiple configuration calls can be chained.</returns>
public virtual ComplexTypePropertyBuilder HasConversion(
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
Type conversionType,
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
Type? comparerType,
[DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicParameterlessConstructor)]
Type? providerComparerType)
{
Check.NotNull(conversionType);
if (typeof(ValueConverter).IsAssignableFrom(conversionType))
{
Builder.HasConverter(conversionType, ConfigurationSource.Explicit);
}
else
{
Builder.HasConversion(conversionType, ConfigurationSource.Explicit);
}
Builder.HasValueComparer(comparerType, ConfigurationSource.Explicit);
Builder.HasProviderValueComparer(providerComparerType, ConfigurationSource.Explicit);
return this;
}
#region Hidden System.Object members
/// <summary>
/// Returns a string that represents the current object.
/// </summary>
/// <returns>A string that represents the current object.</returns>
[EditorBrowsable(EditorBrowsableState.Never)]
public override string? ToString()
=> base.ToString();
/// <summary>
/// Determines whether the specified object is equal to the current object.
/// </summary>
/// <param name="obj">The object to compare with the current object.</param>
/// <returns><see langword="true" /> if the specified object is equal to the current object; otherwise, <see langword="false" />.</returns>
[EditorBrowsable(EditorBrowsableState.Never)]
// ReSharper disable once BaseObjectEqualsIsObjectEquals
public override bool Equals(object? obj)
=> base.Equals(obj);
/// <summary>
/// Serves as the default hash function.
/// </summary>
/// <returns>A hash code for the current object.</returns>
[EditorBrowsable(EditorBrowsableState.Never)]
// ReSharper disable once BaseObjectGetHashCodeCallInGetHashCode
public override int GetHashCode()
=> base.GetHashCode();
#endregion
}
| ComplexTypePropertyBuilder |
csharp | NSubstitute__NSubstitute | tests/NSubstitute.Acceptance.Specs/Infrastructure/FluentSomething.cs | {
"start": 57,
"end": 479
} | public class ____ : IFluentSomething
{
public IFluentSomething Chain()
{
return this;
}
public IFluentSomething Me()
{
return this;
}
public IFluentSomething Together()
{
return this;
}
public ISomething SorryNoChainingHere()
{
return null;
}
public ISomething SorryNoChainingHereEither()
{
return null;
}
} | FluentSomething |
csharp | graphql-dotnet__graphql-dotnet | src/GraphQL.Analyzers/Helpers/ExpressionExtensions.cs | {
"start": 11322,
"end": 13294
} | interface ____ INotifyCompletion.OnCompleted or ICriticalNotifyCompletion.UnsafeOnCompleted is invoked
// NOTE: (rather than any OnCompleted method conforming to a certain pattern).
// NOTE: Should this code be updated to match the spec?
// void OnCompleted(Action)
// Actions are delegates, so we'll just check for delegates.
if (!methods.Any(x => x.Name == WellKnownMemberNames.OnCompleted &&
x is { ReturnsVoid: true, Parameters: [{ Type.TypeKind: TypeKind.Delegate }] }))
{
return false;
}
// void GetResult() || T GetResult()
return methods.Any(m => m.Name == WellKnownMemberNames.GetResult && !m.Parameters.Any());
}
private static ArgumentSyntax? GetArgument(
string argumentName,
Dictionary<string, ArgumentSyntax> namedArguments,
InvocationExpressionSyntax invocation,
IMethodSymbol methodSymbol)
{
if (namedArguments.TryGetValue(argumentName, out var arg))
return arg;
int paramIndex = GetParamIndex(argumentName, methodSymbol);
var argument = paramIndex != -1 && invocation.ArgumentList.Arguments.Count > paramIndex
? invocation.ArgumentList.Arguments[paramIndex]
: null;
// if requested argument is a named argument we should find it in 'namedArguments' dict
// if we got here and found named argument - it's another argument placed at the requested
// argument index, and requested argument has a default value (optional)
return argument is { NameColon: null }
? argument
: null;
}
private static int GetParamIndex(string argumentName, IMethodSymbol methodSymbol)
{
var param = methodSymbol.Parameters.SingleOrDefault(p => p.Name == argumentName);
return param != null
? methodSymbol.Parameters.IndexOf(param)
: -1;
}
}
| method |
csharp | EventStore__EventStore | src/KurrentDB.Core.Tests/Services/Storage/ReadIndexTestScenario.cs | {
"start": 978,
"end": 20082
} | public abstract class ____<TLogFormat, TStreamId> : SpecificationWithDirectoryPerTestFixture {
protected readonly int MaxEntriesInMemTable;
protected readonly int StreamInfoCacheCapacity;
protected readonly long MetastreamMaxCount;
protected readonly bool PerformAdditionalCommitChecks;
protected readonly byte IndexBitnessVersion;
protected LogFormatAbstractor<TStreamId> _logFormat;
protected IRecordFactory<TStreamId> _recordFactory;
protected INameIndex<TStreamId> _streamNameIndex;
protected INameIndex<TStreamId> _eventTypeIndex;
protected ITableIndex<TStreamId> TableIndex;
protected IReadIndex<TStreamId> ReadIndex;
protected IHasher<TStreamId> LowHasher { get; private set; }
protected IHasher<TStreamId> HighHasher { get; private set; }
protected ILongHasher<TStreamId> Hasher { get; private set; }
protected TFChunkDb Db;
protected TFChunkWriter Writer;
protected ICheckpoint WriterCheckpoint;
protected ICheckpoint ChaserCheckpoint;
private readonly int _chunkSize;
private TFChunkScavenger<TStreamId> _scavenger;
private bool _scavenge;
private bool _completeLastChunkOnScavenge;
private bool _mergeChunks;
private bool _scavengeIndex;
protected ReadIndexTestScenario(
int maxEntriesInMemTable = 20,
long metastreamMaxCount = 1,
int streamInfoCacheCapacity = 0,
byte indexBitnessVersion = Opts.IndexBitnessVersionDefault,
bool performAdditionalChecks = true,
int chunkSize = 10_000,
IHasher<TStreamId> lowHasher = null,
IHasher<TStreamId> highHasher = null) {
Ensure.Positive(maxEntriesInMemTable, "maxEntriesInMemTable");
MaxEntriesInMemTable = maxEntriesInMemTable;
StreamInfoCacheCapacity = streamInfoCacheCapacity;
MetastreamMaxCount = metastreamMaxCount;
IndexBitnessVersion = indexBitnessVersion;
PerformAdditionalCommitChecks = performAdditionalChecks;
LowHasher = lowHasher;
HighHasher = highHasher;
_chunkSize = chunkSize;
}
public override async Task TestFixtureSetUp() {
await base.TestFixtureSetUp();
var indexDirectory = GetFilePathFor("index");
_logFormat = LogFormatHelper<TLogFormat, TStreamId>.LogFormatFactory.Create(new() {
IndexDirectory = indexDirectory,
});
_recordFactory = _logFormat.RecordFactory;
_streamNameIndex = _logFormat.StreamNameIndex;
_eventTypeIndex = _logFormat.EventTypeIndex;
LowHasher ??= _logFormat.LowHasher;
HighHasher ??= _logFormat.HighHasher;
Hasher = new CompositeHasher<TStreamId>(LowHasher, HighHasher);
WriterCheckpoint = new InMemoryCheckpoint(0);
ChaserCheckpoint = new InMemoryCheckpoint(0);
Db = new TFChunkDb(TFChunkHelper.CreateDbConfig(PathName, WriterCheckpoint, ChaserCheckpoint,
replicationCheckpoint: new InMemoryCheckpoint(-1), chunkSize: _chunkSize));
await Db.Open();
// create db
Writer = new TFChunkWriter(Db);
await Writer.Open(CancellationToken.None);
await WriteTestScenario(CancellationToken.None);
await Writer.DisposeAsync();
Writer = null;
WriterCheckpoint.Flush();
ChaserCheckpoint.Write(WriterCheckpoint.Read());
ChaserCheckpoint.Flush();
var reader = new TFChunkReader(Db, Db.Config.WriterCheckpoint);
var emptyStreamId = _logFormat.EmptyStreamId;
TableIndex = TransformTableIndex(new TableIndex<TStreamId>(indexDirectory, LowHasher, HighHasher, emptyStreamId,
() => new HashListMemTable(IndexBitnessVersion, MaxEntriesInMemTable * 2),
reader,
IndexBitnessVersion,
int.MaxValue,
MaxEntriesInMemTable));
_logFormat.StreamNamesProvider.SetTableIndex(TableIndex);
var readIndex = new ReadIndex<TStreamId>(new NoopPublisher(),
reader,
TableIndex,
_logFormat.StreamNameIndexConfirmer,
_logFormat.StreamIds,
_logFormat.StreamNamesProvider,
_logFormat.EmptyStreamId,
_logFormat.StreamIdValidator,
_logFormat.StreamIdSizer,
_logFormat.StreamExistenceFilter,
_logFormat.StreamExistenceFilterReader,
_logFormat.EventTypeIndexConfirmer,
new LRUCache<TStreamId, IndexBackend<TStreamId>.EventNumberCached>("LastEventNumber", StreamInfoCacheCapacity),
new LRUCache<TStreamId, IndexBackend<TStreamId>.MetadataCached>("StreamMetadata", StreamInfoCacheCapacity),
additionalCommitChecks: PerformAdditionalCommitChecks,
metastreamMaxCount: MetastreamMaxCount,
hashCollisionReadLimit: Opts.HashCollisionReadLimitDefault,
skipIndexScanOnReads: Opts.SkipIndexScanOnReadsDefault,
replicationCheckpoint: Db.Config.ReplicationCheckpoint,
indexCheckpoint: Db.Config.IndexCheckpoint,
indexStatusTracker: new IndexStatusTracker.NoOp(),
indexTracker: new IndexTracker.NoOp(),
cacheTracker: new CacheHitsMissesTracker.NoOp());
await readIndex.IndexCommitter.Init(ChaserCheckpoint.Read(), CancellationToken.None);
ReadIndex = readIndex;
// wait for tables to be merged
TableIndex.WaitForBackgroundTasks(16_000);
// scavenge must run after readIndex is built
if (_scavenge) {
if (_completeLastChunkOnScavenge)
await (await Db.Manager.GetInitializedChunk(Db.Manager.ChunksCount - 1, CancellationToken.None))
.Complete(CancellationToken.None);
_scavenger = new TFChunkScavenger<TStreamId>(Log.Logger, Db, new FakeTFScavengerLog(), TableIndex,
ReadIndex, _logFormat.Metastreams);
await _scavenger.Scavenge(alwaysKeepScavenged: true, mergeChunks: _mergeChunks,
scavengeIndex: _scavengeIndex);
}
}
public override async Task TestFixtureTearDown() {
_logFormat?.Dispose();
ReadIndex?.Close();
ReadIndex?.Dispose();
TableIndex?.Close();
await (Db?.DisposeAsync() ?? ValueTask.CompletedTask);
await base.TestFixtureTearDown();
}
protected virtual ITableIndex<TStreamId> TransformTableIndex(ITableIndex<TStreamId> tableIndex) {
return tableIndex;
}
protected virtual ValueTask WriteTestScenario(CancellationToken token)
=> token.IsCancellationRequested ? ValueTask.FromCanceled(token) : ValueTask.CompletedTask;
protected async ValueTask<(TStreamId, long)> GetOrReserve(string eventStreamName, CancellationToken token) {
var newPos = Writer.Position;
_streamNameIndex.GetOrReserve(_logFormat.RecordFactory, eventStreamName, newPos, out var eventStreamId, out var streamRecord);
if (streamRecord is not null) {
(_, newPos) = await Writer.Write(streamRecord, token);
}
return (eventStreamId, newPos);
}
protected async ValueTask<(TStreamId, long)> GetOrReserveEventType(string eventType, CancellationToken token) {
var newPos = Writer.Position;
_eventTypeIndex.GetOrReserveEventType(_logFormat.RecordFactory, eventType, newPos, out var eventTypeId, out var eventTypeRecord);
if (eventTypeRecord is not null) {
(_, newPos) = await Writer.Write(eventTypeRecord, token);
}
return (eventTypeId, newPos);
}
protected async ValueTask<EventRecord> WriteSingleEvent(string eventStreamName,
long eventNumber,
string data,
DateTime? timestamp = null,
Guid eventId = default,
bool retryOnFail = false,
string eventType = "some-type",
CancellationToken token = default) {
var (eventStreamId, _) = await GetOrReserve(eventStreamName, token);
var (eventTypeId, pos) = await GetOrReserveEventType(eventType, token);
var prepare = LogRecord.SingleWrite(_recordFactory, pos,
eventId == default(Guid) ? Guid.NewGuid() : eventId,
Guid.NewGuid(),
eventStreamId,
eventNumber - 1,
eventTypeId,
Helper.UTF8NoBom.GetBytes(data),
null,
timestamp);
if (!retryOnFail) {
Assert.IsTrue(await Writer.Write(prepare, token) is (true, _));
} else {
long firstPos = prepare.LogPosition;
(var success, pos) = await Writer.Write(prepare, token);
if (!success) {
prepare = LogRecord.SingleWrite(_recordFactory, pos,
prepare.CorrelationId,
prepare.EventId,
prepare.EventStreamId,
prepare.ExpectedVersion,
prepare.EventType,
prepare.Data,
prepare.Metadata,
prepare.TimeStamp);
if (await Writer.Write(prepare, token) is (false, _))
Assert.Fail("Second write try failed when first writing prepare at {0}, then at {1}.", firstPos,
prepare.LogPosition);
}
}
var commit = LogRecord.Commit(Writer.Position, prepare.CorrelationId, prepare.LogPosition,
eventNumber);
if (!retryOnFail) {
Assert.IsTrue(await Writer.Write(commit, token) is (true, _));
} else {
var firstPos = commit.LogPosition;
(var success, pos) = await Writer.Write(commit, token);
if (!success) {
commit = LogRecord.Commit(pos, prepare.CorrelationId, prepare.LogPosition,
eventNumber);
if (await Writer.Write(commit, token) is (false, _))
Assert.Fail("Second write try failed when first writing prepare at {0}, then at {1}.", firstPos,
prepare.LogPosition);
}
}
Assert.AreEqual(eventStreamId, prepare.EventStreamId);
var eventRecord = new EventRecord(eventNumber, prepare, eventStreamName, eventType);
return eventRecord;
}
protected async ValueTask<EventRecord> WriteStreamMetadata(string eventStreamName, long eventNumber, string metadata,
DateTime? timestamp = null,
CancellationToken token = default) {
var (eventStreamId, _) = await GetOrReserve(SystemStreams.MetastreamOf(eventStreamName), token);
var (eventTypeId, pos) = await GetOrReserveEventType(SystemEventTypes.StreamMetadata, token);
var prepare = LogRecord.SingleWrite(_recordFactory, pos,
Guid.NewGuid(),
Guid.NewGuid(),
eventStreamId,
eventNumber - 1,
eventTypeId,
Helper.UTF8NoBom.GetBytes(metadata),
null,
timestamp ?? DateTime.UtcNow,
PrepareFlags.IsJson);
Assert.IsTrue(await Writer.Write(prepare, token) is (true, _));
var commit = LogRecord.Commit(Writer.Position, prepare.CorrelationId, prepare.LogPosition,
eventNumber);
Assert.IsTrue(await Writer.Write(commit, token) is (true, _));
Assert.AreEqual(eventStreamId, prepare.EventStreamId);
var eventRecord = new EventRecord(eventNumber, prepare, SystemStreams.MetastreamOf(eventStreamName), SystemEventTypes.StreamMetadata);
return eventRecord;
}
protected async ValueTask<EventRecord> WriteTransactionBegin(string eventStreamName, long expectedVersion, long eventNumber,
string eventData, CancellationToken token) {
LogFormatHelper<TLogFormat, TStreamId>.CheckIfExplicitTransactionsSupported();
var (eventStreamId, _) = await GetOrReserve(eventStreamName, token);
var (eventTypeId, pos) = await GetOrReserveEventType("some-type", token);
var prepare = LogRecord.Prepare(_recordFactory, pos,
Guid.NewGuid(),
Guid.NewGuid(),
Writer.Position,
0,
eventStreamId,
expectedVersion,
PrepareFlags.Data | PrepareFlags.TransactionBegin,
eventTypeId,
Helper.UTF8NoBom.GetBytes(eventData),
null);
Assert.IsTrue(await Writer.Write(prepare, token) is (true, _));
Assert.AreEqual(eventStreamId, prepare.EventStreamId);
return new EventRecord(eventNumber, prepare, eventStreamName, "some-type");
}
protected async ValueTask<IPrepareLogRecord<TStreamId>> WriteTransactionBegin(string eventStreamName, long expectedVersion, CancellationToken token) {
LogFormatHelper<TLogFormat, TStreamId>.CheckIfExplicitTransactionsSupported();
var (eventStreamId, pos) = await GetOrReserve(eventStreamName, token);
var prepare = LogRecord.TransactionBegin(_recordFactory, pos, Guid.NewGuid(), eventStreamId,
expectedVersion);
Assert.IsTrue(await Writer.Write(prepare, token) is (true, _));
return prepare;
}
protected async ValueTask<EventRecord> WriteTransactionEvent(Guid correlationId,
long transactionPos,
int transactionOffset,
string eventStreamName,
long eventNumber,
string eventData,
PrepareFlags flags,
bool retryOnFail = false,
string eventType = "some-type",
CancellationToken token = default) {
LogFormatHelper<TLogFormat, TStreamId>.CheckIfExplicitTransactionsSupported();
var (eventStreamId, _) = await GetOrReserve(eventStreamName, token);
var (eventTypeId, pos) = await GetOrReserveEventType(eventType, token);
var prepare = LogRecord.Prepare(_recordFactory, pos,
correlationId,
Guid.NewGuid(),
transactionPos,
transactionOffset,
eventStreamId,
ExpectedVersion.Any,
flags,
eventTypeId,
Helper.UTF8NoBom.GetBytes(eventData),
null);
if (retryOnFail) {
long firstPos = prepare.LogPosition;
var (success, newPos) = await Writer.Write(prepare, token);
if (!success) {
var tPos = prepare.TransactionPosition == prepare.LogPosition
? newPos
: prepare.TransactionPosition;
prepare = prepare.CopyForRetry(
logPosition: newPos,
transactionPosition: tPos);
if (await Writer.Write(prepare, token) is (false, _))
Assert.Fail("Second write try failed when first writing prepare at {0}, then at {1}.", firstPos,
prepare.LogPosition);
}
Assert.AreEqual(eventStreamId, prepare.EventStreamId);
return new EventRecord(eventNumber, prepare, eventStreamName, eventType);
}
Assert.IsTrue(await Writer.Write(prepare, token) is (true, _));
Assert.AreEqual(eventStreamId, prepare.EventStreamId);
return new EventRecord(eventNumber, prepare, eventStreamName, eventType);
}
protected async ValueTask<IPrepareLogRecord<TStreamId>> WriteTransactionEnd(Guid correlationId, long transactionId, string eventStreamName, CancellationToken token) {
LogFormatHelper<TLogFormat, TStreamId>.CheckIfExplicitTransactionsSupported();
var (eventStreamId, _) = await GetOrReserve(eventStreamName, token);
return await WriteTransactionEnd(correlationId, transactionId, eventStreamId, token);
}
protected async ValueTask<IPrepareLogRecord<TStreamId>> WriteTransactionEnd(Guid correlationId, long transactionId, TStreamId eventStreamId, CancellationToken token) {
LogFormatHelper<TLogFormat, TStreamId>.CheckIfExplicitTransactionsSupported();
var prepare = LogRecord.TransactionEnd(_recordFactory, Writer.Position,
correlationId,
Guid.NewGuid(),
transactionId,
eventStreamId);
Assert.IsTrue(await Writer.Write(prepare, token) is (true, _));
return prepare;
}
protected async ValueTask<IPrepareLogRecord<TStreamId>> WritePrepare(string eventStreamName,
long expectedVersion,
Guid eventId = default,
string eventType = null,
string data = null,
PrepareFlags additionalFlags = PrepareFlags.None,
CancellationToken token = default) {
var (eventStreamId, _) = await GetOrReserve(eventStreamName, token);
var (eventTypeId, pos) = await GetOrReserveEventType(eventType.IsEmptyString() ? "some-type" : eventType, token);
var prepare = LogRecord.SingleWrite(_recordFactory, pos,
Guid.NewGuid(),
eventId == default ? Guid.NewGuid() : eventId,
eventStreamId,
expectedVersion,
eventTypeId,
data.IsEmptyString() ? LogRecord.NoData : Helper.UTF8NoBom.GetBytes(data),
LogRecord.NoData,
DateTime.UtcNow,
additionalFlags);
Assert.IsTrue(await Writer.Write(prepare, token) is (true, _));
return prepare;
}
protected async ValueTask<CommitLogRecord> WriteCommit(long preparePos, string eventStreamName, long eventNumber, CancellationToken token) {
LogFormatHelper<TLogFormat, TStreamId>.CheckIfExplicitTransactionsSupported();
var commit = LogRecord.Commit(Writer.Position, Guid.NewGuid(), preparePos, eventNumber);
Assert.IsTrue(await Writer.Write(commit, token) is (true, _));
return commit;
}
protected async ValueTask<long> WriteCommit(Guid correlationId, long transactionId, string eventStreamName,
long eventNumber, CancellationToken token) {
LogFormatHelper<TLogFormat, TStreamId>.CheckIfExplicitTransactionsSupported();
var (eventStreamId, _) = await GetOrReserve(eventStreamName, token);
return await WriteCommit(correlationId, transactionId, eventStreamId, eventNumber, token);
}
protected async ValueTask<long> WriteCommit(Guid correlationId, long transactionId, TStreamId eventStreamId, long eventNumber, CancellationToken token) {
LogFormatHelper<TLogFormat, TStreamId>.CheckIfExplicitTransactionsSupported();
var commit = LogRecord.Commit(Writer.Position, correlationId, transactionId, eventNumber);
Assert.IsTrue(await Writer.Write(commit, token) is (true, _));
return commit.LogPosition;
}
protected async ValueTask<EventRecord> WriteDelete(string eventStreamName, CancellationToken token) {
var (eventStreamId, _) = await GetOrReserve(eventStreamName, token);
var (streamDeletedEventTypeId, pos) = await GetOrReserveEventType(SystemEventTypes.StreamDeleted, token);
var prepare = LogRecord.DeleteTombstone(_recordFactory, pos,
Guid.NewGuid(), Guid.NewGuid(), eventStreamId, streamDeletedEventTypeId, EventNumber.DeletedStream - 1);
Assert.IsTrue(await Writer.Write(prepare, token) is (true, _));
var commit = LogRecord.Commit(Writer.Position,
prepare.CorrelationId,
prepare.LogPosition,
EventNumber.DeletedStream);
Assert.IsTrue(await Writer.Write(commit, token) is (true, _));
Assert.AreEqual(eventStreamId, prepare.EventStreamId);
return new EventRecord(EventNumber.DeletedStream, prepare, eventStreamName, SystemEventTypes.StreamDeleted);
}
protected async ValueTask<IPrepareLogRecord<TStreamId>> WriteDeletePrepare(string eventStreamName, CancellationToken token) {
var (eventStreamId, _) = await GetOrReserve(eventStreamName, token);
var (streamDeletedEventTypeId, pos) = await GetOrReserveEventType(SystemEventTypes.StreamDeleted, token);
var prepare = LogRecord.DeleteTombstone(_recordFactory, pos,
Guid.NewGuid(), Guid.NewGuid(), eventStreamId, streamDeletedEventTypeId, ExpectedVersion.Any);
Assert.IsTrue(await Writer.Write(prepare, token) is (true, _));
return prepare;
}
protected async ValueTask<CommitLogRecord> WriteDeleteCommit(IPrepareLogRecord prepare, CancellationToken token) {
LogFormatHelper<TLogFormat, TStreamId>.CheckIfExplicitTransactionsSupported();
var commit = LogRecord.Commit(Writer.Position,
prepare.CorrelationId,
prepare.LogPosition,
EventNumber.DeletedStream);
Assert.IsTrue(await Writer.Write(commit, token) is (true, _));
return commit;
}
// This is LogV2 specific
protected async ValueTask<PrepareLogRecord> WriteSingleEventWithLogVersion0(Guid id, string streamId,
long position,
long expectedVersion, PrepareFlags? flags = null,
CancellationToken token = default) {
if (!flags.HasValue) {
flags = PrepareFlags.SingleWrite;
}
var record = new PrepareLogRecord(position, id, id, position, 0, streamId, null, expectedVersion,
DateTime.UtcNow,
flags.Value, "type", null, new byte[10], new byte[0], LogRecordVersion.LogRecordV0);
var (_, pos) = await Writer.Write(record, token);
await Writer.Write(
new CommitLogRecord(pos, id, position, DateTime.UtcNow, expectedVersion, LogRecordVersion.LogRecordV0),
token);
return record;
}
protected TFPos GetBackwardReadPos() {
var pos = new TFPos(WriterCheckpoint.ReadNonFlushed(), WriterCheckpoint.ReadNonFlushed());
return pos;
}
protected void Scavenge(bool completeLast, bool mergeChunks, bool scavengeIndex = true) {
if (_scavenge)
throw new InvalidOperationException("Scavenge can be executed only once in ReadIndexTestScenario");
_scavenge = true;
_completeLastChunkOnScavenge = completeLast;
_mergeChunks = mergeChunks;
_scavengeIndex = scavengeIndex;
}
}
| ReadIndexTestScenario |
csharp | dotnet__aspnetcore | src/Components/Components/test/CascadingParameterTest.cs | {
"start": 42045,
"end": 42405
} | class ____ : AutoRenderComponent
{
private readonly RenderFragment _renderFragment;
public TestComponent(RenderFragment renderFragment)
{
_renderFragment = renderFragment;
}
protected override void BuildRenderTree(RenderTreeBuilder builder)
=> _renderFragment(builder);
}
| TestComponent |
csharp | CommunityToolkit__WindowsCommunityToolkit | UnitTests/UnitTests.UWP/UI/Controls/Test_ConstrainedBox.AspectRatio.cs | {
"start": 560,
"end": 7921
} | public partial class ____ : VisualUITestBase
{
[TestCategory("ConstrainedBox")]
[TestMethod]
public async Task Test_ConstrainedBox_Normal_AspectHorizontal()
{
await App.DispatcherQueue.EnqueueAsync(async () =>
{
var treeRoot = XamlReader.Load(@"<Page
xmlns=""http://schemas.microsoft.com/winfx/2006/xaml/presentation""
xmlns:x=""http://schemas.microsoft.com/winfx/2006/xaml""
xmlns:controls=""using:Microsoft.Toolkit.Uwp.UI.Controls"">
<controls:ConstrainedBox x:Name=""ConstrainedBox"" AspectRatio=""2:1"" Width=""200"">
<Border HorizontalAlignment=""Stretch"" VerticalAlignment=""Stretch"" Background=""Red""/>
</controls:ConstrainedBox>
</Page>") as FrameworkElement;
Assert.IsNotNull(treeRoot, "Could not load XAML tree.");
// Initialize Visual Tree
await SetTestContentAsync(treeRoot);
var panel = treeRoot.FindChild("ConstrainedBox") as ConstrainedBox;
Assert.IsNotNull(panel, "Could not find ConstrainedBox in tree.");
// Force Layout calculations
panel.UpdateLayout();
var child = panel.Content as Border;
Assert.IsNotNull(child, "Could not find inner Border");
// Check Size
Assert.AreEqual(200, child.ActualWidth, 0.01, "Actual width does not meet expected value of 200");
Assert.AreEqual(100, child.ActualHeight, 0.01, "Actual height does not meet expected value of 100");
});
}
[TestCategory("ConstrainedBox")]
[TestMethod]
public async Task Test_ConstrainedBox_Normal_AspectVertical()
{
await App.DispatcherQueue.EnqueueAsync(async () =>
{
var treeRoot = XamlReader.Load(@"<Page
xmlns=""http://schemas.microsoft.com/winfx/2006/xaml/presentation""
xmlns:x=""http://schemas.microsoft.com/winfx/2006/xaml""
xmlns:controls=""using:Microsoft.Toolkit.Uwp.UI.Controls"">
<controls:ConstrainedBox x:Name=""ConstrainedBox"" AspectRatio=""1:2"" Height=""200"">
<Border HorizontalAlignment=""Stretch"" VerticalAlignment=""Stretch"" Background=""Red""/>
</controls:ConstrainedBox>
</Page>") as FrameworkElement;
Assert.IsNotNull(treeRoot, "Could not load XAML tree.");
// Initialize Visual Tree
await SetTestContentAsync(treeRoot);
var panel = treeRoot.FindChild("ConstrainedBox") as ConstrainedBox;
Assert.IsNotNull(panel, "Could not find ConstrainedBox in tree.");
// Force Layout calculations
panel.UpdateLayout();
var child = panel.Content as Border;
Assert.IsNotNull(child, "Could not find inner Border");
// Check Size
Assert.AreEqual(100, child.ActualWidth, 0.01, "Actual width does not meet expected value of 100");
Assert.AreEqual(200, child.ActualHeight, 0.01, "Actual height does not meet expected value of 200");
});
}
[TestCategory("ConstrainedBox")]
[TestMethod]
public async Task Test_ConstrainedBox_Normal_IntegerWidth()
{
await App.DispatcherQueue.EnqueueAsync(async () =>
{
var treeRoot = XamlReader.Load(@"<Page
xmlns=""http://schemas.microsoft.com/winfx/2006/xaml/presentation""
xmlns:x=""http://schemas.microsoft.com/winfx/2006/xaml""
xmlns:controls=""using:Microsoft.Toolkit.Uwp.UI.Controls"">
<controls:ConstrainedBox x:Name=""ConstrainedBox"" AspectRatio=""2"" Height=""100"">
<Border HorizontalAlignment=""Stretch"" VerticalAlignment=""Stretch"" Background=""Red""/>
</controls:ConstrainedBox>
</Page>") as FrameworkElement;
Assert.IsNotNull(treeRoot, "Could not load XAML tree.");
// Initialize Visual Tree
await SetTestContentAsync(treeRoot);
var panel = treeRoot.FindChild("ConstrainedBox") as ConstrainedBox;
Assert.IsNotNull(panel, "Could not find ConstrainedBox in tree.");
// Check Size
Assert.AreEqual(2.0, panel.AspectRatio, 0.01, "ApectRatio does not meet expected value of 2.0");
// Force Layout calculations
panel.UpdateLayout();
var child = panel.Content as Border;
Assert.IsNotNull(child, "Could not find inner Border");
// Check Size
Assert.AreEqual(200, child.ActualWidth, 0.01, "Actual width does not meet expected value of 200");
Assert.AreEqual(100, child.ActualHeight, 0.01, "Actual height does not meet expected value of 100");
});
}
[TestCategory("ConstrainedBox")]
[TestMethod]
public void Test_ConstrainedBox_AspectRatioParsing_WidthAndHeight()
{
CultureInfo currentCulture = CultureInfo.CurrentCulture;
try
{
CultureInfo.CurrentCulture = CultureInfo.InvariantCulture;
AspectRatio ratio = AspectRatio.ConvertToAspectRatio("1.666:1.2");
Assert.AreEqual(ratio.Width, 1.666);
Assert.AreEqual(ratio.Height, 1.2);
// Explicit tests for other culture infos, see https://github.com/CommunityToolkit/WindowsCommunityToolkit/issues/4252
CultureInfo.CurrentCulture = CultureInfo.GetCultureInfo("it-IT");
ratio = AspectRatio.ConvertToAspectRatio("1.666:1.2");
Assert.AreEqual(ratio.Width, 1.666);
Assert.AreEqual(ratio.Height, 1.2);
CultureInfo.CurrentCulture = CultureInfo.GetCultureInfo("fr-FR");
ratio = AspectRatio.ConvertToAspectRatio("1.666:1.2");
Assert.AreEqual(ratio.Width, 1.666);
Assert.AreEqual(ratio.Height, 1.2);
}
finally
{
CultureInfo.CurrentCulture = currentCulture;
}
}
[TestCategory("ConstrainedBox")]
[TestMethod]
public void Test_ConstrainedBox_AspectRatioParsing_Ratio()
{
CultureInfo currentCulture = CultureInfo.CurrentCulture;
try
{
CultureInfo.CurrentCulture = CultureInfo.InvariantCulture;
AspectRatio ratio = AspectRatio.ConvertToAspectRatio("1.666");
Assert.AreEqual(ratio.Width, 1.666);
Assert.AreEqual(ratio.Height, 1);
CultureInfo.CurrentCulture = CultureInfo.GetCultureInfo("it-IT");
ratio = AspectRatio.ConvertToAspectRatio("1.666");
Assert.AreEqual(ratio.Width, 1.666);
Assert.AreEqual(ratio.Height, 1);
CultureInfo.CurrentCulture = CultureInfo.GetCultureInfo("fr-FR");
ratio = AspectRatio.ConvertToAspectRatio("1.666");
Assert.AreEqual(ratio.Width, 1.666);
Assert.AreEqual(ratio.Height, 1);
}
finally
{
CultureInfo.CurrentCulture = currentCulture;
}
}
}
}
| Test_ConstrainedBox |
csharp | bitwarden__server | src/Core/Vault/Models/Data/CipherCardData.cs | {
"start": 133,
"end": 453
} | public class ____ : CipherData
{
public CipherCardData() { }
public string CardholderName { get; set; }
public string Brand { get; set; }
public string Number { get; set; }
public string ExpMonth { get; set; }
public string ExpYear { get; set; }
public string Code { get; set; }
}
| CipherCardData |
csharp | dotnet__aspnetcore | src/Hosting/Hosting/src/Internal/StartupLoader.cs | {
"start": 2265,
"end": 4385
} | class ____ returns an <see cref="IServiceProvider"/> and there is at least an <see cref="IStartupConfigureServicesFilter"/> registered we
// throw as the filters can't be applied.
public static StartupMethods LoadMethods(IServiceProvider hostingServiceProvider, [DynamicallyAccessedMembers(StartupLinkerOptions.Accessibility)] Type startupType, string environmentName, object? instance = null)
{
var configureMethod = FindConfigureDelegate(startupType, environmentName);
var servicesMethod = FindConfigureServicesDelegate(startupType, environmentName);
var configureContainerMethod = FindConfigureContainerDelegate(startupType, environmentName);
if (instance == null && (!configureMethod.MethodInfo.IsStatic || (servicesMethod?.MethodInfo != null && !servicesMethod.MethodInfo.IsStatic)))
{
instance = ActivatorUtilities.GetServiceOrCreateInstance(hostingServiceProvider, startupType);
}
// The type of the TContainerBuilder. If there is no ConfigureContainer method we can just use object as it's not
// going to be used for anything.
var type = configureContainerMethod.MethodInfo != null ? configureContainerMethod.GetContainerType() : typeof(object);
var builder = (ConfigureServicesDelegateBuilder)Activator.CreateInstance(
CreateConfigureServicesDelegateBuilder(type),
hostingServiceProvider,
servicesMethod,
configureContainerMethod,
instance)!;
return new StartupMethods(instance, configureMethod.Build(instance), builder.Build());
[return: DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicConstructors)]
[UnconditionalSuppressMessage("AOT", "IL3050:RequiresDynamicCode",
Justification = "There is a runtime check for ValueType startup container. It's unlikely anyone will use a ValueType here.")]
static Type CreateConfigureServicesDelegateBuilder(Type type)
{
// Configure container uses MakeGenericType with the container type. MakeGenericType + | ConfigureServices |
csharp | mongodb__mongo-csharp-driver | tests/MongoDB.Bson.Tests/Serialization/Serializers/ExtraElementsWithPartiallyImmutableClassTests.cs | {
"start": 4013,
"end": 12260
} | private class ____
{
private readonly int _id;
private readonly int _a;
private int _b;
private IDictionary<string, object> _x;
[BsonConstructor]
public C(int id, int a)
{
_id = id;
_a = a;
}
[BsonId(Order = 1)]
public int Id => _id;
[BsonElement(Order = 2)]
public int A => _a;
[BsonElement(Order = 3)]
public int B
{
get { return _b; }
set { _b = value; }
}
[BsonElement(Order = 4)]
[BsonExtraElements]
public IDictionary<string, object> X
{
get { return _x; }
set { _x = value; }
}
}
[Fact]
public void TestNoExtraElements()
{
var json = "{ '_id' : 1, 'A' : 2, 'B' : 3 }".Replace("'", "\"");
var c = BsonSerializer.Deserialize<C>(json);
Assert.Equal(json, c.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }));
}
[Fact]
public void TestOneExtraInt32Element()
{
var json = "{ '_id' : 1, 'A' : 2, 'B' : 3, 'C' : 4 }".Replace("'", "\"");
var c = BsonSerializer.Deserialize<C>(json);
Assert.Equal(json, c.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }));
}
[Fact]
public void TestOneExtraInt32ElementNamedX()
{
var json = "{ '_id' : 1, 'A' : 2, 'B' : 3, 'X' : 4 }".Replace("'", "\"");
var c = BsonSerializer.Deserialize<C>(json);
Assert.Equal(json, c.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }));
}
[Fact]
public void TestOneExtraStringElement()
{
var json = "{ '_id' : 1, 'A' : 2, 'B' : 3, 'C' : 'xyz' }".Replace("'", "\"");
var c = BsonSerializer.Deserialize<C>(json);
Assert.Equal(json, c.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }));
}
[Fact]
public void TestOneExtraDocumentElement()
{
var json = "{ '_id' : 1, 'A' : 2, 'B' : 3, 'C' : { 'D' : 4, 'E' : 'xyz' } }".Replace("'", "\"");
var c = BsonSerializer.Deserialize<C>(json);
Assert.Equal(json, c.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }));
}
[Fact]
public void TestTwoExtraElements()
{
var json = "{ '_id' : 1, 'A' : 2, 'B' : 3, 'C' : 4, 'D' : 'xyz' }".Replace("'", "\"");
var c = BsonSerializer.Deserialize<C>(json);
Assert.Equal(json, c.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell }));
}
[Fact]
public void TestExtraElementsOfAllTypes()
{
var json = "{ '_id' : 1, 'A' : 2, 'B' : 3, #X }";
var extraElements = new List<string[]>
{
new string[] { "XArray", "[1, 2.0]" },
new string[] { "XBinary", "HexData(2, '1234')" },
new string[] { "XBoolean", "true" },
new string[] { "XByteArray", "HexData(0, '1234')" },
new string[] { "XDateTime", "ISODate('2012-03-16T11:19:00Z')" },
new string[] { "XDocument", "{ 'a' : 1 }" },
new string[] { "XDouble", "1.0" },
new string[] { "XInt32", "1" },
new string[] { "XInt64", "NumberLong(1)" },
new string[] { "XJavaScript", "{ '$code' : 'abc' }" },
new string[] { "XJavaScriptWithScope", "{ '$code' : 'abc', '$scope' : { 'x' : 1 } }" },
new string[] { "XMaxKey", "MaxKey" },
new string[] { "XMinKey", "MinKey" },
new string[] { "XNull", "null" },
new string[] { "XObjectId", "ObjectId('00112233445566778899aabb')" },
new string[] { "XRegularExpression", "/abc/" },
new string[] { "XString", "'abc'" },
new string[] { "XSymbol", "{ '$symbol' : 'abc' }" },
new string[] { "XTimestamp", "{ '$timestamp' : NumberLong(1234) }" },
new string[] { "XUndefined", "undefined" },
};
var extraElementsRepresentation = string.Join(", ", extraElements.Select(e => string.Format("'{0}' : {1}", e[0], e[1])).ToArray());
json = json.Replace("#X", extraElementsRepresentation).Replace("'", "\"");
var c = BsonSerializer.Deserialize<C>(new JsonReader(json, new JsonReaderSettings()));
// round trip it both ways before checking individual values
json = c.ToJson(writerSettings: new JsonWriterSettings { OutputMode = JsonOutputMode.Shell });
c = BsonSerializer.Deserialize<C>(new JsonReader(json, new JsonReaderSettings()));
Assert.IsType<List<object>>(c.X["XArray"]);
Assert.IsType<BsonBinaryData>(c.X["XBinary"]);
Assert.IsType<bool>(c.X["XBoolean"]);
Assert.IsType<byte[]>(c.X["XByteArray"]);
Assert.IsType<DateTime>(c.X["XDateTime"]);
Assert.IsType<Dictionary<string, object>>(c.X["XDocument"]);
Assert.IsType<double>(c.X["XDouble"]);
Assert.IsType<int>(c.X["XInt32"]);
Assert.IsType<long>(c.X["XInt64"]);
Assert.IsType<BsonJavaScript>(c.X["XJavaScript"]);
Assert.IsType<BsonJavaScriptWithScope>(c.X["XJavaScriptWithScope"]);
Assert.IsType<BsonMaxKey>(c.X["XMaxKey"]);
Assert.IsType<BsonMinKey>(c.X["XMinKey"]);
Assert.Null(c.X["XNull"]);
Assert.IsType<ObjectId>(c.X["XObjectId"]);
Assert.IsType<BsonRegularExpression>(c.X["XRegularExpression"]);
Assert.IsType<string>(c.X["XString"]);
Assert.IsType<BsonSymbol>(c.X["XSymbol"]);
Assert.IsType<BsonTimestamp>(c.X["XTimestamp"]);
Assert.IsType<BsonUndefined>(c.X["XUndefined"]);
Assert.Equal(extraElements.Count, c.X.Count);
Assert.True(new object[] { 1, 2.0 }.SequenceEqual((List<object>)c.X["XArray"]));
#pragma warning disable CS0618 // Type or member is obsolete
Assert.Equal(BsonBinarySubType.OldBinary, ((BsonBinaryData)c.X["XBinary"]).SubType);
#pragma warning restore CS0618 // Type or member is obsolete
Assert.True(new byte[] { 0x12, 0x34 }.SequenceEqual(((BsonBinaryData)c.X["XBinary"]).Bytes));
Assert.Equal(true, c.X["XBoolean"]);
Assert.True(new byte[] { 0x12, 0x34 }.SequenceEqual((byte[])c.X["XByteArray"]));
Assert.Equal(new DateTime(2012, 3, 16, 11, 19, 0, DateTimeKind.Utc), c.X["XDateTime"]);
Assert.Equal(1, ((IDictionary<string, object>)c.X["XDocument"]).Count);
Assert.Equal(1, ((IDictionary<string, object>)c.X["XDocument"])["a"]);
Assert.Equal(1.0, c.X["XDouble"]);
Assert.Equal(1, c.X["XInt32"]);
Assert.Equal(1L, c.X["XInt64"]);
Assert.Equal("abc", ((BsonJavaScript)c.X["XJavaScript"]).Code);
Assert.Equal("abc", ((BsonJavaScriptWithScope)c.X["XJavaScriptWithScope"]).Code);
Assert.Equal(1, ((BsonJavaScriptWithScope)c.X["XJavaScriptWithScope"]).Scope.ElementCount);
Assert.Equal(new BsonInt32(1), ((BsonJavaScriptWithScope)c.X["XJavaScriptWithScope"]).Scope["x"]);
Assert.Same(BsonMaxKey.Value, c.X["XMaxKey"]);
Assert.Same(BsonMinKey.Value, c.X["XMinKey"]);
Assert.Equal(null, c.X["XNull"]);
Assert.Equal(ObjectId.Parse("00112233445566778899aabb"), c.X["XObjectId"]);
Assert.Equal(new BsonRegularExpression("abc"), c.X["XRegularExpression"]);
Assert.Equal("abc", c.X["XString"]);
Assert.Same(BsonSymbolTable.Lookup("abc"), c.X["XSymbol"]);
Assert.Equal(new BsonTimestamp(1234), c.X["XTimestamp"]);
Assert.Same(BsonUndefined.Value, c.X["XUndefined"]);
}
}
}
| C |
csharp | dotnet__reactive | Rx.NET/Source/src/System.Reactive/Notification.cs | {
"start": 8025,
"end": 13596
} | internal sealed class ____ : Notification<T>
{
/// <summary>
/// Constructs a notification of an exception.
/// </summary>
public OnErrorNotification(Exception exception)
{
Exception = exception;
}
/// <summary>
/// Throws the exception.
/// </summary>
public override T Value { get { Exception.Throw(); return default!; } }
/// <summary>
/// Returns the exception.
/// </summary>
public override Exception Exception { get; }
/// <summary>
/// Returns <c>false</c>.
/// </summary>
public override bool HasValue => false;
/// <summary>
/// Returns <see cref="NotificationKind.OnError"/>.
/// </summary>
public override NotificationKind Kind => NotificationKind.OnError;
/// <summary>
/// Returns the hash code for this instance.
/// </summary>
public override int GetHashCode() => Exception.GetHashCode();
/// <summary>
/// Indicates whether this instance and other are equal.
/// </summary>
public override bool Equals(Notification<T>? other)
{
if (ReferenceEquals(this, other))
{
return true;
}
if (other is null)
{
return false;
}
if (other.Kind != NotificationKind.OnError)
{
return false;
}
return Equals(Exception, other.Exception);
}
/// <summary>
/// Returns a string representation of this instance.
/// </summary>
public override string ToString() => string.Format(CultureInfo.CurrentCulture, "OnError({0})", Exception.GetType().FullName);
/// <summary>
/// Invokes the observer's method corresponding to the notification.
/// </summary>
/// <param name="observer">Observer to invoke the notification on.</param>
public override void Accept(IObserver<T> observer)
{
if (observer == null)
{
throw new ArgumentNullException(nameof(observer));
}
observer.OnError(Exception);
}
/// <summary>
/// Invokes the observer's method corresponding to the notification and returns the produced result.
/// </summary>
/// <param name="observer">Observer to invoke the notification on.</param>
/// <returns>Result produced by the observation.</returns>
public override TResult Accept<TResult>(IObserver<T, TResult> observer)
{
if (observer == null)
{
throw new ArgumentNullException(nameof(observer));
}
return observer.OnError(Exception);
}
/// <summary>
/// Invokes the delegate corresponding to the notification.
/// </summary>
/// <param name="onNext">Delegate to invoke for an OnNext notification.</param>
/// <param name="onError">Delegate to invoke for an OnError notification.</param>
/// <param name="onCompleted">Delegate to invoke for an OnCompleted notification.</param>
public override void Accept(Action<T> onNext, Action<Exception> onError, Action onCompleted)
{
if (onNext == null)
{
throw new ArgumentNullException(nameof(onNext));
}
if (onError == null)
{
throw new ArgumentNullException(nameof(onError));
}
if (onCompleted == null)
{
throw new ArgumentNullException(nameof(onCompleted));
}
onError(Exception);
}
/// <summary>
/// Invokes the delegate corresponding to the notification and returns the produced result.
/// </summary>
/// <param name="onNext">Delegate to invoke for an OnNext notification.</param>
/// <param name="onError">Delegate to invoke for an OnError notification.</param>
/// <param name="onCompleted">Delegate to invoke for an OnCompleted notification.</param>
/// <returns>Result produced by the observation.</returns>
public override TResult Accept<TResult>(Func<T, TResult> onNext, Func<Exception, TResult> onError, Func<TResult> onCompleted)
{
if (onNext == null)
{
throw new ArgumentNullException(nameof(onNext));
}
if (onError == null)
{
throw new ArgumentNullException(nameof(onError));
}
if (onCompleted == null)
{
throw new ArgumentNullException(nameof(onCompleted));
}
return onError(Exception);
}
}
/// <summary>
/// Represents an OnCompleted notification to an observer.
/// </summary>
[DebuggerDisplay("OnCompleted()")]
[Serializable]
| OnErrorNotification |
csharp | bitwarden__server | src/Core/Auth/Models/Business/Tokenables/DuoUserStateTokenable.cs | {
"start": 126,
"end": 938
} | public class ____ : Tokenable
{
public const string ClearTextPrefix = "BwDuoUserId";
public const string DataProtectorPurpose = "DuoUserIdTokenDataProtector";
public const string TokenIdentifier = "DuoUserIdToken";
public string Identifier { get; set; } = TokenIdentifier;
public Guid UserId { get; set; }
public override bool Valid => Identifier == TokenIdentifier &&
UserId != default;
[JsonConstructor]
public DuoUserStateTokenable()
{
}
public DuoUserStateTokenable(User user)
{
UserId = user?.Id ?? default;
}
public bool TokenIsValid(User user)
{
if (UserId == default || user == null)
{
return false;
}
return UserId == user.Id;
}
}
| DuoUserStateTokenable |
csharp | dotnet__maui | src/Controls/tests/Xaml.UnitTests/Issues/Maui25141.xaml.cs | {
"start": 499,
"end": 956
} | class ____
{
[SetUp]
public void Setup()
{
Application.SetCurrentApplication(new MockApplication());
DispatcherProvider.SetCurrent(new DispatcherProviderStub());
}
[TearDown]
public void TearDown()
{
AppInfo.SetCurrent(null);
DeviceInfo.SetCurrent(null);
}
[Test]
public void BindingsInDataTriggerAndMultiBindingAreCompiledCorrectly()
{
MockCompiler.Compile(typeof(Maui25141), treatWarningsAsErrors: true);
}
}
}
| Test |
csharp | bitwarden__server | src/Core/NotificationCenter/Repositories/INotificationRepository.cs | {
"start": 291,
"end": 1849
} | public interface ____ : IRepository<Notification, Guid>
{
/// <summary>
/// Get notifications for a user with the given filters.
/// Includes global notifications.
/// </summary>
/// <param name="userId">User Id</param>
/// <param name="clientType">
/// Filter for notifications by client type. Always includes notifications with <see cref="ClientType.All"/>.
/// </param>
/// <param name="statusFilter">
/// Filters notifications by status.
/// If both <see cref="NotificationStatusFilter.Read"/> and <see cref="NotificationStatusFilter.Deleted"/>
/// are not set, includes notifications without a status.
/// </param>
/// <param name="pageOptions">
/// Pagination options.
/// </param>
/// <returns>
/// Paged results ordered by priority (descending, highest to lowest) and creation date (descending).
/// Includes all fields from <see cref="Notification"/> and <see cref="NotificationStatus"/>
/// </returns>
Task<PagedResult<NotificationStatusDetails>> GetByUserIdAndStatusAsync(Guid userId, ClientType clientType,
NotificationStatusFilter? statusFilter, PageOptions pageOptions);
/// <summary>
/// Marks notifications as deleted by a taskId.
/// </summary>
/// <param name="taskId">The unique identifier of the task.</param>
/// <returns>
/// A collection of UserIds for the notifications that are now marked as deleted.
/// </returns>
Task<IEnumerable<Guid>> MarkNotificationsAsDeletedByTask(Guid taskId);
}
| INotificationRepository |
csharp | OrchardCMS__OrchardCore | src/OrchardCore/OrchardCore.ContentTypes.Abstractions/Events/IContentDefinitionHandler.cs | {
"start": 334,
"end": 1839
} | public interface ____
{
/// <summary>
/// Invoked during the building of a content type.
/// Allows modifications or custom logic to be applied to the content type being created.
/// </summary>
/// <param name="context">The context for the content type being built.</param>
void ContentTypeBuilding(ContentTypeBuildingContext context);
/// <summary>
/// Invoked during the building of a content part definition.
/// Allows modification or customization of content parts before they are finalized in the content definition.
/// </summary>
/// <param name="context">The context for the content part definition being built.</param>
void ContentPartBuilding(ContentPartBuildingContext context);
/// <summary>
/// Invoked during the building of a part on a content type.
/// Enables modification or customization of the content part as it is attached to a content type.
/// </summary>
/// <param name="context">The context for the content type part being built.</param>
void ContentTypePartBuilding(ContentTypePartBuildingContext context);
/// <summary>
/// Invoked during the building of a field on a content part.
/// Allows customization of fields added to content parts before the final definition is created.
/// </summary>
/// <param name="context">The context for the content part field being built.</param>
void ContentPartFieldBuilding(ContentPartFieldBuildingContext context);
}
| IContentDefinitionHandler |
csharp | CommunityToolkit__WindowsCommunityToolkit | Microsoft.Toolkit.Uwp.UI/Extensions/ListViewBase/ItemContainerStretchDirection.cs | {
"start": 308,
"end": 655
} | public enum ____
{
/// <summary>
/// Horizontal stretch
/// </summary>
Horizontal,
/// <summary>
/// Vertical stretch
/// </summary>
Vertical,
/// <summary>
/// Horizontal and Vertical stretch
/// </summary>
Both
}
} | ItemContainerStretchDirection |
csharp | ChilliCream__graphql-platform | src/HotChocolate/Fusion-vnext/test/Fusion.Composition.Tests/SourceSchemaMerger.TagDirective.Tests.cs | {
"start": 1489,
"end": 1748
} | interface ____ @tag(name: "a") {
field(arg: Int @tag(name: "a")): Int @tag(name: "a")
}
union FooUnion @tag(name: "a") = FooObject
scalar FooScalar @tag(name: "a")
| FooInterface |
csharp | getsentry__sentry-dotnet | test/Sentry.Tests/HubTests.cs | {
"start": 11027,
"end": 58805
} | internal class ____
{
// This property will throw an exception during serialization.
// ReSharper disable once UnusedMember.Local
public string Thrower => throw new InvalidDataException();
}
#if !__MOBILE__
[Theory]
[InlineData(true)]
[InlineData(false)]
public async Task CaptureEvent_NonSerializableContextAndOfflineCaching_CapturesEventWithContextKey(bool offlineCaching)
{
// This test has proven to be flaky, so we'll skip it on mobile targets.
// We'll also retry it a few times when we run it for non-mobile targets.
// As long as it doesn't consistently fail, that should be good enough.
// TODO: The retry and/or #if can be removed if we can confidently figure out the source of the flakiness.
await TestHelpers.RetryTestAsync(
maxAttempts: 3,
_output,
() => CapturesEventWithContextKey_Implementation(offlineCaching));
}
private async Task CapturesEventWithContextKey_Implementation(bool offlineCaching)
{
#if NET6_0_OR_GREATER
JsonExtensions.AddJsonSerializerContext(o => new HubTestsJsonContext(o));
#endif
var tcs = new TaskCompletionSource<bool>();
var expectedMessage = Guid.NewGuid().ToString();
var requests = new List<string>();
async Task Verify(HttpRequestMessage message)
{
var payload = await message.Content!.ReadAsStringAsync();
requests.Add(payload);
if (payload.Contains(expectedMessage))
{
tcs.TrySetResult(true);
}
}
var cts = new CancellationTokenSource();
cts.Token.Register(() => tcs.TrySetCanceled());
using var tempDirectory = offlineCaching ? new TempDirectory() : null;
var logger = Substitute.ForPartsOf<TestOutputDiagnosticLogger>(_output);
var options = new SentryOptions
{
Dsn = ValidDsn,
// To go through a round trip serialization of cached envelope
CacheDirectoryPath = tempDirectory?.Path,
// So we don't need to deal with gzip payloads
RequestBodyCompressionLevel = CompressionLevel.NoCompression,
CreateHttpMessageHandler = () => new CallbackHttpClientHandler(Verify),
// Not to send some session envelope
AutoSessionTracking = false,
Debug = true,
DiagnosticLogger = logger,
// This keeps all writing-to-file operations in memory instead of actually writing to disk
FileSystem = new FakeFileSystem()
};
// Disable process exit flush to resolve "There is no currently active test." errors.
options.DisableAppDomainProcessExitFlush();
try
{
using var hub = new Hub(options);
var expectedContextKey = Guid.NewGuid().ToString();
var evt = new SentryEvent
{
Contexts = { [expectedContextKey] = new EvilContext() },
Message = new()
{
Formatted = expectedMessage
}
};
hub.CaptureEvent(evt);
await hub.FlushAsync();
// Synchronizing in the tests to go through the caching and http transports
cts.CancelAfter(TimeSpan.FromSeconds(3));
var ex = await Record.ExceptionAsync(() => tcs.Task);
Assert.False(ex is OperationCanceledException || !tcs.Task.Result, "Event not captured");
Assert.Null(ex);
Assert.True(requests.All(p => p.Contains(expectedContextKey)),
"Un-serializable context key should exist");
logger.Received().Log(SentryLevel.Error,
"Failed to serialize object for property '{0}'. Original depth: {1}, current depth: {2}",
Arg.Any<InvalidDataException>(),
Arg.Any<object[]>());
}
finally
{
// ensure the task is complete before leaving the test
tcs.TrySetResult(false);
await tcs.Task;
if (options.Transport is CachingTransport cachingTransport)
{
// Disposing the caching transport will ensure its worker
// is shut down before we try to dispose and delete the temp folder
await cachingTransport.DisposeAsync();
}
}
}
#endif
[Fact]
public void CaptureEvent_ActiveSession_UnhandledExceptionSessionEndedAsCrashed()
{
// Arrange
var worker = Substitute.For<IBackgroundWorker>();
var options = new SentryOptions
{
Dsn = ValidDsn,
Release = "release"
};
var sessionManager = new GlobalSessionManager(options);
var client = new SentryClient(options, worker, sessionManager: sessionManager);
var hub = new Hub(options, client, sessionManager);
hub.StartSession();
// Act
hub.CaptureEvent(new()
{
SentryExceptions = new[]
{
new SentryException
{
Mechanism = new()
{
Handled = false
}
}
}
});
// Assert
worker.Received().EnqueueEnvelope(
Arg.Is<Envelope>(e =>
e.Items
.Select(i => i.Payload)
.OfType<JsonSerializable>()
.Select(i => i.Source)
.OfType<SessionUpdate>()
.Single()
.EndStatus == SessionEndStatus.Crashed
));
}
[Fact]
public void CaptureEvent_Client_GetsHint()
{
// Arrange
var @event = new SentryEvent();
var hint = new SentryHint();
var hub = _fixture.GetSut();
// Act
hub.CaptureEvent(@event, hint: hint);
// Assert
_fixture.Client.Received(1).CaptureEvent(
Arg.Any<SentryEvent>(),
Arg.Any<Scope>(), Arg.Is<SentryHint>(h => h == hint));
}
[Fact]
public void CaptureEvent_TerminalUnhandledException_AbortsActiveTransaction()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
var hub = _fixture.GetSut();
var transaction = hub.StartTransaction("test", "operation");
hub.ConfigureScope(scope => scope.Transaction = transaction);
var exception = new Exception("test");
exception.SetSentryMechanism("test", handled: false, terminal: true);
// Act
hub.CaptureEvent(new SentryEvent(exception));
// Assert
transaction.Status.Should().Be(SpanStatus.Aborted);
transaction.IsFinished.Should().BeTrue();
}
[Fact]
public void CaptureEvent_NonTerminalUnhandledException_DoesNotAbortActiveTransaction()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
var hub = _fixture.GetSut();
var transaction = hub.StartTransaction("test", "operation");
hub.ConfigureScope(scope => scope.Transaction = transaction);
var exception = new Exception("test");
exception.SetSentryMechanism("TestException", handled: false, terminal: false);
// Act
hub.CaptureEvent(new SentryEvent(exception));
// Assert
transaction.IsFinished.Should().BeFalse();
}
[Fact]
public void CaptureEvent_HandledException_DoesNotAbortActiveTransaction()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
var hub = _fixture.GetSut();
var transaction = hub.StartTransaction("test", "operation");
hub.ConfigureScope(scope => scope.Transaction = transaction);
var exception = new Exception("test");
exception.SetSentryMechanism("test", handled: true);
// Act
hub.CaptureEvent(new SentryEvent(exception));
// Assert
transaction.IsFinished.Should().BeFalse();
}
[Fact]
public void CaptureEvent_EventWithoutException_DoesNotAbortActiveTransaction()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
var hub = _fixture.GetSut();
var transaction = hub.StartTransaction("test", "operation");
hub.ConfigureScope(scope => scope.Transaction = transaction);
// Act
hub.CaptureEvent(new SentryEvent { Message = "test message" });
// Assert
transaction.IsFinished.Should().BeFalse();
}
[Fact]
public void AppDomainUnhandledExceptionIntegration_ActiveSession_UnhandledExceptionSessionEndedAsCrashed()
{
// Arrange
var worker = Substitute.For<IBackgroundWorker>();
var options = new SentryOptions
{
Dsn = ValidDsn,
Release = "release"
};
var sessionManager = new GlobalSessionManager(options);
var client = new SentryClient(options, worker, sessionManager: sessionManager);
var hub = new Hub(options, client, sessionManager);
var integration = new AppDomainUnhandledExceptionIntegration(Substitute.For<IAppDomain>());
integration.Register(hub, options);
hub.StartSession();
// Act
// Simulate a terminating exception
integration.Handle(this, new UnhandledExceptionEventArgs(new Exception("test"), true));
// Assert
worker.Received().EnqueueEnvelope(
Arg.Is<Envelope>(e =>
e.Items
.Select(i => i.Payload)
.OfType<JsonSerializable>()
.Select(i => i.Source)
.OfType<SessionUpdate>()
.Single()
.EndStatus == SessionEndStatus.Crashed
));
}
[Fact]
public void StartTransaction_NameOpDescription_Works()
{
// Arrange
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction("name", "operation", "description");
// Assert
transaction.Name.Should().Be("name");
transaction.Operation.Should().Be("operation");
transaction.Description.Should().Be("description");
}
[Fact]
public void StartTransaction_FromTraceHeader_CopiesContext()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
var hub = _fixture.GetSut();
var traceHeader = new SentryTraceHeader(
SentryId.Parse("75302ac48a024bde9a3b3734a82e36c8"),
SpanId.Parse("2000000000000000"),
true);
// Act
var transaction = hub.StartTransaction("name", "operation", traceHeader);
// Assert
transaction.TraceId.Should().Be(SentryId.Parse("75302ac48a024bde9a3b3734a82e36c8"));
transaction.ParentSpanId.Should().Be(SpanId.Parse("2000000000000000"));
transaction.IsSampled.Should().BeTrue();
}
[Fact]
public void StartTransaction_FromTraceHeader_SampledInheritedFromParentRegardlessOfSampleRate()
{
// Arrange
_fixture.Options.TracesSampleRate = 0.0;
var hub = _fixture.GetSut();
var traceHeader = new SentryTraceHeader(
SentryId.Parse("75302ac48a024bde9a3b3734a82e36c8"),
SpanId.Parse("2000000000000000"),
true);
// Act
var transaction = hub.StartTransaction("name", "operation", traceHeader);
// Assert
transaction.IsSampled.Should().BeTrue();
}
[Fact]
public void StartTransaction_FromTraceHeader_CustomSamplerCanSampleOutTransaction()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
_fixture.Options.TracesSampler = _ => 0.0;
var hub = _fixture.GetSut();
var traceHeader = new SentryTraceHeader(
SentryId.Parse("75302ac48a024bde9a3b3734a82e36c8"),
SpanId.Parse("2000000000000000"),
true);
// Act
var transaction = hub.StartTransaction("foo", "bar", traceHeader);
// Assert
transaction.IsSampled.Should().BeFalse();
}
[Fact]
public void StartTransaction_StaticSampling_SampledIn()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction("name", "operation");
// Assert
transaction.IsSampled.Should().BeTrue();
}
[Fact]
public void StartTransaction_StaticSampling_SampledOut()
{
// Arrange
_fixture.Options.TracesSampleRate = 0.0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction("name", "operation");
// Assert
transaction.IsSampled.Should().BeFalse();
}
[Fact]
public void StartTransaction_EnableTracing_SampledIn()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction("name", "operation");
// Assert
transaction.IsSampled.Should().BeTrue();
}
[Fact]
public void StartTransaction_SameInstrumenter_SampledIn()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
_fixture.Options.Instrumenter = Instrumenter.Sentry; // The default... making it explicit for this test though
var hub = _fixture.GetSut();
var transactionContext = new TransactionContext("name", "operation")
{
Instrumenter = _fixture.Options.Instrumenter
};
// Act
var transaction = hub.StartTransaction(transactionContext);
// Assert
transaction.IsSampled.Should().BeTrue();
}
[Fact]
public void StartTransaction_DynamicSamplingContextWithSampleRate_UsesSampleRate()
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
var dsc = BaggageHeader.Create(new List<KeyValuePair<string, string>>
{
{"sentry-trace_id", "43365712692146d08ee11a729dfbcaca"},
{"sentry-public_key", "d4d82fc1c2c4032a83f3a29aa3a3aff"},
{"sentry-sample_rate", "0.5"},
{"sentry-sample_rand", "0.1234"},
}).CreateDynamicSamplingContext();
_fixture.Options.TracesSampler = _ => 0.5;
_fixture.Options.TracesSampleRate = 0.5;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, new Dictionary<string, object>(), dsc);
// Assert
var transactionTracer = transaction.Should().BeOfType<TransactionTracer>().Subject;
transactionTracer.SampleRate.Should().Be(0.5);
transactionTracer.DynamicSamplingContext.Should().BeSameAs(dsc);
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void StartTransaction_Backpressure_Downsamples(bool usesTracesSampler)
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
var clock = new MockClock(DateTimeOffset.UtcNow);
_fixture.Options.EnableBackpressureHandling = true;
_fixture.BackpressureMonitor = new BackpressureMonitor(null, clock, enablePeriodicHealthCheck: false);
_fixture.BackpressureMonitor.SetDownsampleLevel(1);
var sampleRate = 0.5f;
var expectedDownsampledRate = sampleRate * _fixture.BackpressureMonitor.DownsampleFactor;
if (usesTracesSampler)
{
_fixture.Options.TracesSampler = _ => sampleRate;
}
else
{
_fixture.Options.TracesSampleRate = sampleRate;
}
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, new Dictionary<string, object>());
switch (transaction)
{
// Assert
case TransactionTracer tracer:
tracer.SampleRate.Should().Be(expectedDownsampledRate);
break;
case UnsampledTransaction unsampledTransaction:
unsampledTransaction.SampleRate.Should().Be(expectedDownsampledRate);
break;
default:
throw new Exception("Unexpected transaction type.");
}
}
[Theory]
[InlineData(true, 0.4f, "backpressure")]
[InlineData(true, 0.6f, "sample_rate")]
[InlineData(false, 0.4f, "backpressure")]
[InlineData(false, 0.6f, "sample_rate")]
public void StartTransaction_Backpressure_SetsDiscardReason(bool usesTracesSampler, double sampleRand, string discardReason)
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
var clock = new MockClock(DateTimeOffset.UtcNow);
_fixture.SampleRandHelper = Substitute.For<ISampleRandHelper>();
_fixture.SampleRandHelper.GenerateSampleRand(Arg.Any<string>()).Returns(sampleRand);
_fixture.Options.EnableBackpressureHandling = true;
_fixture.BackpressureMonitor = new BackpressureMonitor(null, clock, enablePeriodicHealthCheck: false);
_fixture.BackpressureMonitor.SetDownsampleLevel(1);
var sampleRate = 0.5f;
if (usesTracesSampler)
{
_fixture.Options.TracesSampler = _ => sampleRate;
}
else
{
_fixture.Options.TracesSampleRate = sampleRate;
}
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, new Dictionary<string, object>());
transaction.Should().BeOfType<UnsampledTransaction>();
var unsampledTransaction = (UnsampledTransaction)transaction;
var expectedReason = new DiscardReason(discardReason);
unsampledTransaction.DiscardReason.Should().Be(expectedReason);
}
// overwrite the 'sample_rate' of the Dynamic Sampling Context (DSC) when a sampling decisions is made in the downstream SDK
// 1. overwrite when 'TracesSampler' reaches a sampling decision
// 2. keep when a sampling decision has been made upstream (via 'TransactionContext.IsSampled')
// 3. overwrite when 'TracesSampleRate' reaches a sampling decision
// 4. keep otherwise
[SkippableTheory]
[InlineData(null, 0.3, 0.4, true, 0.3, true)]
[InlineData(null, 0.3, null, true, 0.3, true)]
[InlineData(null, null, 0.4, true, 0.4, true)]
[InlineData(null, null, null, false, 0.0, false)]
[InlineData(true, 0.3, 0.4, true, 0.3, true)]
[InlineData(true, 0.3, null, true, 0.3, true)]
[InlineData(true, null, 0.4, true, 0.4, false)]
[InlineData(true, null, null, true, 0.0, false)]
[InlineData(false, 0.3, 0.4, true, 0.3, true)]
[InlineData(false, 0.3, null, true, 0.3, true)]
[InlineData(false, null, 0.4, false, 0.4, false)]
[InlineData(false, null, null, false, 0.0, false)]
public void StartTransaction_DynamicSamplingContextWithSampleRate_OverwritesSampleRate(bool? isSampled, double? tracesSampler, double? tracesSampleRate, bool expectedIsSampled, double expectedSampleRate, bool expectedDscOverwritten)
{
// Arrange
var transactionContext = new TransactionContext("name", "operation", isSampled: isSampled);
var dsc = BaggageHeader.Create(new List<KeyValuePair<string, string>>
{
{"sentry-trace_id", "43365712692146d08ee11a729dfbcaca"},
{"sentry-public_key", "d4d82fc1c2c4032a83f3a29aa3a3aff"},
{"sentry-sample_rate", "0.5"},
{"sentry-sample_rand", "0.1234"},
}).CreateDynamicSamplingContext();
var originalDsc = dsc?.Clone();
_fixture.Options.TracesSampler = _ => tracesSampler;
_fixture.Options.TracesSampleRate = tracesSampleRate;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, new Dictionary<string, object>(), dsc);
// Assert
if (expectedIsSampled)
{
var transactionTracer = transaction.Should().BeOfType<TransactionTracer>().Subject;
transactionTracer.SampleRate.Should().Be(expectedSampleRate);
if (expectedDscOverwritten)
{
transactionTracer.DynamicSamplingContext.Should().BeEquivalentTo(originalDsc.CloneWithSampleRate(expectedSampleRate));
}
else
{
transactionTracer.DynamicSamplingContext.Should().BeEquivalentTo(originalDsc);
}
}
else
{
var unsampledTransaction = transaction.Should().BeOfType<UnsampledTransaction>().Subject;
unsampledTransaction.SampleRate.Should().Be(expectedSampleRate);
if (expectedDscOverwritten)
{
unsampledTransaction.DynamicSamplingContext.Should().BeEquivalentTo(originalDsc.CloneWithSampleRate(expectedSampleRate));
}
else
{
unsampledTransaction.DynamicSamplingContext.Should().BeEquivalentTo(originalDsc);
}
}
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void StartTransaction_DynamicSamplingContextWithReplayId_UsesActiveReplaySessionId(bool replaySessionIsActive)
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
var dummyReplaySession = Substitute.For<IReplaySession>();
dummyReplaySession.ActiveReplayId.Returns((SentryId?)null); // So the replay id in the baggage header is used
var dsc = BaggageHeader.Create(new List<KeyValuePair<string, string>>
{
{"sentry-trace_id", "43365712692146d08ee11a729dfbcaca"},
{"sentry-public_key", "d4d82fc1c2c4032a83f3a29aa3a3aff"},
{"sentry-sampled", "true"},
{"sentry-sample_rate", "0.5"}, // Required in the baggage header, but ignored by sampling logic
{"sentry-replay_id", "bfd31b89a59d41c99d96dc2baf840ecd"}
}).CreateDynamicSamplingContext(dummyReplaySession);
_fixture.Options.TracesSampleRate = 1.0;
_fixture.ReplaySession.ActiveReplayId.Returns(replaySessionIsActive ? SentryId.Create() : null); // This one gets used by the SUT
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, new Dictionary<string, object>(), dsc);
// Assert
var transactionTracer = transaction.Should().BeOfType<TransactionTracer>().Subject;
transactionTracer.IsSampled.Should().BeTrue();
transactionTracer.DynamicSamplingContext.Should().NotBeNull();
var expectedDsc = dsc.CloneWithSampleRate(_fixture.Options.TracesSampleRate.Value);
if (replaySessionIsActive)
{
// We overwrite the replay_id when we have an active replay session
// Otherwise we propagate whatever was in the baggage header
expectedDsc = expectedDsc.CloneWithReplayId(_fixture.ReplaySession);
}
transactionTracer.DynamicSamplingContext.Should().BeEquivalentTo(expectedDsc);
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void StartTransaction_NoDynamicSamplingContext_UsesActiveReplaySessionId(bool replaySessionIsActive)
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
_fixture.ReplaySession.ActiveReplayId.Returns(replaySessionIsActive ? SentryId.Create() : null);
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, new Dictionary<string, object>());
// Assert
var transactionTracer = transaction.Should().BeOfType<TransactionTracer>().Subject;
transactionTracer.SampleRand.Should().NotBeNull();
transactionTracer.DynamicSamplingContext.Should().NotBeNull();
if (replaySessionIsActive)
{
// We add the replay_id when we have an active replay session
transactionTracer.DynamicSamplingContext!.Items["replay_id"].Should().Be(_fixture.ReplaySession.ActiveReplayId.ToString());
}
else
{
transactionTracer.DynamicSamplingContext!.Items.Should().NotContainKey("replay_id");
}
}
[Fact]
public void StartTransaction_NoDynamicSamplingContext_GeneratesSampleRand()
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
var customContext = new Dictionary<string, object>();
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, customContext);
// Assert
var transactionTracer = transaction.Should().BeOfType<TransactionTracer>().Subject;
transactionTracer.SampleRand.Should().NotBeNull();
transactionTracer.DynamicSamplingContext.Should().NotBeNull();
transactionTracer.DynamicSamplingContext!.Items.Should().ContainKey("sample_rand");
transactionTracer.DynamicSamplingContext.Items["sample_rand"].Should().Be(transactionTracer.SampleRand!.Value.ToString("N4", CultureInfo.InvariantCulture));
}
[Fact]
public void StartTransaction_DynamicSamplingContextWithoutSampleRand_SampleRandNotPropagated()
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, new Dictionary<string, object>(), DynamicSamplingContext.Empty());
// Assert
var transactionTracer = transaction.Should().BeOfType<TransactionTracer>().Subject;
transactionTracer.SampleRand.Should().NotBeNull();
transactionTracer.DynamicSamplingContext.Should().NotBeNull();
// See https://develop.sentry.dev/sdk/telemetry/traces/dynamic-sampling-context/#freezing-dynamic-sampling-context
transactionTracer.DynamicSamplingContext!.Items.Should().NotContainKey("sample_rand");
}
[Fact]
public void StartTransaction_DynamicSamplingContextWithSampleRand_InheritsSampleRand()
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
var dummyReplaySession = Substitute.For<IReplaySession>();
var dsc = BaggageHeader.Create(new List<KeyValuePair<string, string>>
{
{"sentry-trace_id", "43365712692146d08ee11a729dfbcaca"},
{"sentry-public_key", "d4d82fc1c2c4032a83f3a29aa3a3aff"},
{"sentry-sampled", "true"},
{"sentry-sample_rate", "0.5"}, // Required in the baggage header, but ignored by sampling logic
{"sentry-sample_rand", "0.1234"}
}).CreateDynamicSamplingContext(dummyReplaySession);
var originalDsc = dsc.Clone();
_fixture.Options.TracesSampleRate = 0.4;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, new Dictionary<string, object>(), dsc);
// Assert
var transactionTracer = transaction.Should().BeOfType<TransactionTracer>().Subject;
transactionTracer.IsSampled.Should().BeTrue();
transactionTracer.SampleRate.Should().Be(0.4);
transactionTracer.SampleRand.Should().Be(0.1234);
transactionTracer.DynamicSamplingContext.Should().BeEquivalentTo(originalDsc.CloneWithSampleRate(0.4));
}
[Theory]
[InlineData(0.1, false)]
[InlineData(0.2, true)]
public void StartTransaction_TraceSampler_UsesSampleRand(double sampleRate, bool expectedIsSampled)
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
var customContext = new Dictionary<string, object>();
var dsc = BaggageHeader.Create(new List<KeyValuePair<string, string>>
{
{"sentry-trace_id", "43365712692146d08ee11a729dfbcaca"},
{"sentry-public_key", "d4d82fc1c2c4032a83f3a29aa3a3aff"},
{"sentry-sampled", "true"},
{"sentry-sample_rate", "0.5"},
{"sentry-sample_rand", "0.1234"}
}).CreateDynamicSamplingContext(_fixture.ReplaySession);
var originalDsc = dsc.Clone();
_fixture.Options.TracesSampler = _ => sampleRate;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, customContext, dsc);
// Assert
if (expectedIsSampled)
{
var transactionTracer = transaction.Should().BeOfType<TransactionTracer>().Subject;
transactionTracer.IsSampled.Should().BeTrue();
transactionTracer.SampleRate.Should().Be(sampleRate);
transactionTracer.SampleRand.Should().Be(0.1234);
transactionTracer.DynamicSamplingContext.Should().BeEquivalentTo(originalDsc.CloneWithSampleRate(sampleRate));
}
else
{
var unsampledTransaction = transaction.Should().BeOfType<UnsampledTransaction>().Subject;
unsampledTransaction.IsSampled.Should().BeFalse();
unsampledTransaction.SampleRate.Should().Be(sampleRate);
unsampledTransaction.SampleRand.Should().Be(0.1234);
unsampledTransaction.DynamicSamplingContext.Should().BeEquivalentTo(originalDsc.CloneWithSampleRate(sampleRate));
}
}
[Theory]
[InlineData(0.1, false)]
[InlineData(0.2, true)]
public void StartTransaction_StaticSampler_UsesSampleRand(double sampleRate, bool expectedIsSampled)
{
// Arrange
var transactionContext = new TransactionContext("name", "operation");
var customContext = new Dictionary<string, object>();
var dummyReplaySession = Substitute.For<IReplaySession>();
var dsc = BaggageHeader.Create(new List<KeyValuePair<string, string>>
{
{"sentry-trace_id", "43365712692146d08ee11a729dfbcaca"},
{"sentry-public_key", "d4d82fc1c2c4032a83f3a29aa3a3aff"},
{"sentry-sample_rate", "0.5"}, // Static sampling ignores this and uses options.TracesSampleRate instead
{"sentry-sample_rand", "0.1234"}
}).CreateDynamicSamplingContext(dummyReplaySession);
var originalDsc = dsc.Clone();
_fixture.Options.TracesSampleRate = sampleRate;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(transactionContext, customContext, dsc);
// Assert
if (expectedIsSampled)
{
var transactionTracer = transaction.Should().BeOfType<TransactionTracer>().Subject;
transactionTracer.IsSampled.Should().BeTrue();
transactionTracer.SampleRate.Should().Be(sampleRate);
transactionTracer.SampleRand.Should().Be(0.1234);
transactionTracer.DynamicSamplingContext.Should().BeEquivalentTo(originalDsc.CloneWithSampleRate(sampleRate));
}
else
{
var unsampledTransaction = transaction.Should().BeOfType<UnsampledTransaction>().Subject;
unsampledTransaction.IsSampled.Should().BeFalse();
unsampledTransaction.SampleRate.Should().Be(sampleRate);
unsampledTransaction.SampleRand.Should().Be(0.1234);
unsampledTransaction.DynamicSamplingContext.Should().BeEquivalentTo(originalDsc.CloneWithSampleRate(sampleRate));
}
}
[Fact]
public void StartTransaction_DifferentInstrumenter_SampledIn()
{
// Arrange
_fixture.Options.TracesSampleRate = 1.0;
_fixture.Options.Instrumenter = Instrumenter.OpenTelemetry;
var hub = _fixture.GetSut();
var transactionContext = new TransactionContext("name", "operation")
{
Instrumenter = Instrumenter.Sentry // The default... making it explicit for this test though
};
// Act
var transaction = hub.StartTransaction(transactionContext);
// Assert
transaction.IsSampled.Should().BeTrue();
}
[Fact]
public void StartTransaction_EnableTracing_Sampler_SampledIn()
{
// Arrange
_fixture.Options.TracesSampler = _ => 1.0;
_fixture.Options.TracesSampleRate = 1.0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction("name", "operation");
// Assert
transaction.IsSampled.Should().BeTrue();
}
[Theory]
[InlineData(0.25f)]
[InlineData(0.50f)]
[InlineData(0.75f)]
public void StartTransaction_StaticSampling_AppropriateDistribution(float sampleRate)
{
// Arrange
const int numEvents = 1000;
const double allowedRelativeDeviation = 0.15;
const uint allowedDeviation = (uint)(allowedRelativeDeviation * numEvents);
var expectedSampled = (int)(sampleRate * numEvents);
var worker = Substitute.For<IBackgroundWorker>();
worker.EnqueueEnvelope(Arg.Any<Envelope>()).Returns(true);
var options = new SentryOptions
{
Dsn = ValidDsn,
TracesSampleRate = sampleRate,
AttachStacktrace = false,
AutoSessionTracking = false,
BackgroundWorker = worker
};
// This test expects an approximate uniform distribution of random numbers, so we'll retry a few times.
TestHelpers.RetryTest(maxAttempts: 3, _output, () =>
{
var randomValuesFactory = new IsolatedRandomValuesFactory();
var hub = new Hub(options, randomValuesFactory: randomValuesFactory);
// Act
var countSampled = 0;
for (var i = 0; i < numEvents; i++)
{
var transaction = hub.StartTransaction($"name[{i}]", $"operation[{i}]");
if (transaction.IsSampled == true)
{
countSampled++;
}
}
// Assert
countSampled.Should().BeCloseTo(expectedSampled, allowedDeviation);
});
}
[Fact]
public void StartTransaction_TracesSampler_SampledIn()
{
// Arrange
_fixture.Options.TracesSampler = ctx => ctx.TransactionContext.Name == "foo" ? 1 : 0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction("foo", "op");
// Assert
transaction.IsSampled.Should().BeTrue();
}
[Fact]
public void StartTransaction_TracesSampler_SampledOut()
{
// Arrange
_fixture.Options.TracesSampler = ctx => ctx.TransactionContext.Name == "foo" ? 1 : 0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction("bar", "op");
// Assert
transaction.IsSampled.Should().BeFalse();
}
[Fact]
public void StartTransaction_TracesSampler_WithCustomContext_SampledIn()
{
// Arrange
_fixture.Options.TracesSampler = ctx =>
ctx.CustomSamplingContext.GetValueOrDefault("xxx") as string == "zzz" ? 1 : 0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(
new TransactionContext("foo", "op"),
new Dictionary<string, object> { ["xxx"] = "zzz" });
// Assert
transaction.IsSampled.Should().BeTrue();
}
[Fact]
public void StartTransaction_TracesSampler_WithCustomContext_SampledOut()
{
// Arrange
_fixture.Options.TracesSampler = ctx =>
ctx.CustomSamplingContext.GetValueOrDefault("xxx") as string == "zzz" ? 1 : 0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction(
new TransactionContext("foo", "op"),
new Dictionary<string, object> { ["xxx"] = "yyy" });
// Assert
transaction.IsSampled.Should().BeFalse();
}
[Fact]
public void StartTransaction_TracesSampler_FallbackToStatic_SampledIn()
{
// Arrange
_fixture.Options.TracesSampler = _ => null;
_fixture.Options.TracesSampleRate = 1.0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction("foo", "bar");
// Assert
transaction.IsSampled.Should().BeTrue();
}
[Fact]
public void StartTransaction_TracesSampler_FallbackToStatic_SampledOut()
{
// Arrange
_fixture.Options.TracesSampler = _ => null;
_fixture.Options.TracesSampleRate = 0.0;
var hub = _fixture.GetSut();
// Act
var transaction = hub.StartTransaction("foo", "bar");
// Assert
transaction.IsSampled.Should().BeFalse();
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void GetTraceHeader_ReturnsHeaderForActiveSpan(bool isSampled)
{
// Arrange
_fixture.Options.TracesSampleRate = isSampled ? 1 : 0;
var hub = _fixture.GetSut();
var transaction = hub.StartTransaction("foo", "bar");
hub.ConfigureScope(scope => scope.Transaction = transaction);
// Act
var header = hub.GetTraceHeader();
// Assert
header.Should().NotBeNull();
header.SpanId.Should().Be(transaction.SpanId);
header.TraceId.Should().Be(transaction.TraceId);
header.IsSampled.Should().Be(transaction.IsSampled);
}
[Fact]
public void GetTraceHeader_NoSpanActive_ReturnsHeaderFromPropagationContext()
{
// Arrange
var hub = _fixture.GetSut();
var propagationContext = new SentryPropagationContext(
SentryId.Parse("75302ac48a024bde9a3b3734a82e36c8"),
SpanId.Parse("2000000000000000"));
hub.ConfigureScope(scope => scope.SetPropagationContext(propagationContext));
// Act
var header = hub.GetTraceHeader();
// Assert
header.Should().NotBeNull();
header.SpanId.Should().Be(propagationContext.SpanId);
header.TraceId.Should().Be(propagationContext.TraceId);
header.IsSampled.Should().BeNull();
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void GetBaggage_SpanActive_ReturnsBaggageFromSpan(bool isSampled)
{
// Arrange
_fixture.Options.TracesSampleRate = isSampled ? 1 : 0;
var hub = _fixture.GetSut();
var expectedBaggage = BaggageHeader.Create(new List<KeyValuePair<string, string>>
{
{"sentry-trace_id", "43365712692146d08ee11a729dfbcaca"},
{"sentry-public_key", "d4d82fc1c2c4032a83f3a29aa3a3aff"},
{"sentry-sample_rate", "0.0"}
});
var replaySession = Substitute.For<IReplaySession>();
replaySession.ActiveReplayId.Returns((SentryId?)null);
var dsc = expectedBaggage.CreateDynamicSamplingContext(replaySession);
var transaction = hub.StartTransaction(new TransactionContext("name", "op"),
new Dictionary<string, object>(), dsc);
hub.ConfigureScope(scope => scope.Transaction = transaction);
// Act
var baggage = hub.GetBaggage();
// Assert
baggage.Should().NotBeNull();
var sampleRand = isSampled ? ((TransactionTracer)transaction).SampleRand : ((UnsampledTransaction)transaction).SampleRand;
baggage.Members.Should().Equal([
new KeyValuePair<string, string>("sentry-trace_id", "43365712692146d08ee11a729dfbcaca"),
new KeyValuePair<string, string>("sentry-public_key", "d4d82fc1c2c4032a83f3a29aa3a3aff"),
new KeyValuePair<string, string>("sentry-sample_rate", isSampled ? "1" : "0"),
new KeyValuePair<string, string>("sentry-sample_rand", sampleRand!.Value.ToString(CultureInfo.InvariantCulture)),
]);
}
[Fact]
public void GetBaggage_NoSpanActive_ReturnsBaggageFromPropagationContext()
{
// Arrange
var hub = _fixture.GetSut();
var propagationContext = new SentryPropagationContext(
SentryId.Parse("43365712692146d08ee11a729dfbcaca"), SpanId.Parse("1000000000000000"));
hub.ConfigureScope(scope => scope.SetPropagationContext(propagationContext));
// Act
var baggage = hub.GetBaggage();
// Assert
baggage.Should().NotBeNull();
Assert.Contains("sentry-trace_id=43365712692146d08ee11a729dfbcaca", baggage!.ToString());
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public void GetTraceparentHeader_ReturnsHeaderForActiveSpan(bool isSampled)
{
// Arrange
_fixture.Options.TracesSampleRate = isSampled ? 1 : 0;
var hub = _fixture.GetSut();
var transaction = hub.StartTransaction("foo", "bar");
hub.ConfigureScope(scope => scope.Transaction = transaction);
// Act
var header = hub.GetTraceparentHeader();
// Assert
header.Should().NotBeNull();
header.SpanId.Should().Be(transaction.SpanId);
header.TraceId.Should().Be(transaction.TraceId);
header.IsSampled.Should().Be(transaction.IsSampled);
}
[Fact]
public void GetTraceparentHeader_NoSpanActive_ReturnsHeaderFromPropagationContext()
{
// Arrange
var hub = _fixture.GetSut();
var propagationContext = new SentryPropagationContext(
SentryId.Parse("75302ac48a024bde9a3b3734a82e36c8"),
SpanId.Parse("2000000000000000"));
hub.ConfigureScope(scope => scope.SetPropagationContext(propagationContext));
// Act
var header = hub.GetTraceparentHeader();
// Assert
header.Should().NotBeNull();
header.SpanId.Should().Be(propagationContext.SpanId);
header.TraceId.Should().Be(propagationContext.TraceId);
header.IsSampled.Should().BeNull();
}
[Fact]
public void ContinueTrace_ReceivesHeaders_SetsPropagationContextAndReturnsTransactionContext()
{
// Arrange
var hub = _fixture.GetSut();
var propagationContext = new SentryPropagationContext(
SentryId.Parse("43365712692146d08ee11a729dfbcaca"), SpanId.Parse("1000000000000000"));
hub.ConfigureScope(scope => scope.SetPropagationContext(propagationContext));
var traceHeader = new SentryTraceHeader(SentryId.Parse("5bd5f6d346b442dd9177dce9302fd737"),
SpanId.Parse("2000000000000000"), null);
var baggageHeader = BaggageHeader.Create(new List<KeyValuePair<string, string>>
{
{"sentry-trace_id", "5bd5f6d346b442dd9177dce9302fd737"},
{"sentry-public_key", "49d0f7386ad645858ae85020e393bef3"},
{"sentry-sample_rate", "1.0"}
});
hub.ScopeManager.ConfigureScope(scope => scope.PropagationContext.TraceId.Should().Be(SentryId.Parse("43365712692146d08ee11a729dfbcaca"))); // Sanity check
// Act
var transactionContext = hub.ContinueTrace(traceHeader, baggageHeader, "test-name");
// Assert
hub.ScopeManager.ConfigureScope(scope =>
{
scope.PropagationContext.TraceId.Should().Be(SentryId.Parse("5bd5f6d346b442dd9177dce9302fd737"));
scope.PropagationContext.ParentSpanId.Should().Be(SpanId.Parse("2000000000000000"));
Assert.NotNull(scope.PropagationContext._dynamicSamplingContext);
scope.PropagationContext._dynamicSamplingContext.Items.Should().Contain(baggageHeader.GetSentryMembers());
});
transactionContext.TraceId.Should().Be(SentryId.Parse("5bd5f6d346b442dd9177dce9302fd737"));
transactionContext.ParentSpanId.Should().Be(SpanId.Parse("2000000000000000"));
}
[Fact]
public void ContinueTrace_DoesNotReceiveHeaders_CreatesRootTrace()
{
// Arrange
var hub = _fixture.GetSut();
// Act
var transactionContext = hub.ContinueTrace((SentryTraceHeader)null, (BaggageHeader)null, "test-name", "test-operation");
// Assert
hub.ScopeManager.ConfigureScope(scope =>
{
Assert.Null(scope.PropagationContext.ParentSpanId);
Assert.Null(scope.PropagationContext._dynamicSamplingContext);
});
transactionContext.Name.Should().Be("test-name");
transactionContext.Operation.Should().Be("test-operation");
transactionContext.SpanId.Should().NotBeNull();
transactionContext.ParentSpanId.Should().BeNull();
transactionContext.TraceId.Should().NotBeNull();
transactionContext.IsSampled.Should().BeNull();
transactionContext.IsParentSampled.Should().BeNull();
}
[Fact]
public void ContinueTrace_ReceivesHeadersAsStrings_SetsPropagationContextAndReturnsTransactionContext()
{
// Arrange
var hub = _fixture.GetSut();
var propagationContext = new SentryPropagationContext(
SentryId.Parse("43365712692146d08ee11a729dfbcaca"), SpanId.Parse("1000000000000000"));
hub.ConfigureScope(scope => scope.SetPropagationContext(propagationContext));
var traceHeader = "5bd5f6d346b442dd9177dce9302fd737-2000000000000000";
var baggageHeader = "sentry-trace_id=5bd5f6d346b442dd9177dce9302fd737, sentry-public_key=49d0f7386ad645858ae85020e393bef3, sentry-sample_rate=1.0";
hub.ScopeManager.ConfigureScope(scope => scope.PropagationContext.TraceId.Should().Be(SentryId.Parse("43365712692146d08ee11a729dfbcaca"))); // Sanity check
// Act
var transactionContext = hub.ContinueTrace(traceHeader, baggageHeader, "test-name");
// Assert
hub.ScopeManager.ConfigureScope(scope =>
{
scope.PropagationContext.TraceId.Should().Be(SentryId.Parse("5bd5f6d346b442dd9177dce9302fd737"));
scope.PropagationContext.ParentSpanId.Should().Be(SpanId.Parse("2000000000000000"));
Assert.NotNull(scope.PropagationContext._dynamicSamplingContext);
scope.PropagationContext._dynamicSamplingContext.ToBaggageHeader().Members.Should().Contain(BaggageHeader.TryParse(baggageHeader)!.Members);
});
transactionContext.TraceId.Should().Be(SentryId.Parse("5bd5f6d346b442dd9177dce9302fd737"));
transactionContext.ParentSpanId.Should().Be(SpanId.Parse("2000000000000000"));
}
[Fact]
public void ContinueTrace_DoesNotReceiveHeadersAsStrings_CreatesRootTrace()
{
// Arrange
var hub = _fixture.GetSut();
// Act
var transactionContext = hub.ContinueTrace((string)null, (string)null, "test-name");
// Assert
hub.ScopeManager.ConfigureScope(scope =>
{
Assert.Null(scope.PropagationContext.ParentSpanId);
Assert.Null(scope.PropagationContext._dynamicSamplingContext);
});
transactionContext.Name.Should().Be("test-name");
transactionContext.Operation.Should().BeEmpty();
transactionContext.SpanId.Should().NotBeNull();
transactionContext.ParentSpanId.Should().BeNull();
transactionContext.TraceId.Should().NotBeNull();
transactionContext.IsSampled.Should().BeNull();
transactionContext.IsParentSampled.Should().BeNull();
}
[Fact]
public void CaptureTransaction_AfterTransactionFinishes_ResetsTransactionOnScope()
{
// Arrange
var hub = _fixture.GetSut();
var transaction = hub.StartTransaction("foo", "bar");
hub.ConfigureScope(scope => scope.Transaction = transaction);
// Act
transaction.Finish();
// Assert
hub.ScopeManager.ConfigureScope(scope => scope.Transaction.Should().BeNull());
}
#nullable enable
| EvilContext |
csharp | unoplatform__uno | src/SamplesApp/UITests.Shared/Windows_UI_Xaml_Controls/GridTestsControl/Quadrant_uneven_split.xaml.cs | {
"start": 195,
"end": 326
} | partial class ____ : UserControl
{
public Quadrant_uneven_split()
{
this.InitializeComponent();
}
}
}
| Quadrant_uneven_split |
csharp | getsentry__sentry-dotnet | src/Sentry/Extensibility/DisabledHub.cs | {
"start": 168,
"end": 6325
} | public class ____ : IHub, IDisposable
{
/// <summary>
/// The singleton instance.
/// </summary>
public static readonly DisabledHub Instance = new();
/// <summary>
/// Always disabled.
/// </summary>
public bool IsEnabled => false;
/// <summary>
/// Always returns false.
/// </summary>
public bool IsSessionActive => false;
private DisabledHub()
{
}
/// <summary>
/// No-Op.
/// </summary>
public void ConfigureScope(Action<Scope> configureScope)
{
}
/// <summary>
/// No-Op.
/// </summary>
public void ConfigureScope<TArg>(Action<Scope, TArg> configureScope, TArg arg)
{
}
/// <summary>
/// No-Op.
/// </summary>
public Task ConfigureScopeAsync(Func<Scope, Task> configureScope) => Task.CompletedTask;
/// <summary>
/// No-Op.
/// </summary>
public Task ConfigureScopeAsync<TArg>(Func<Scope, TArg, Task> configureScope, TArg arg) => Task.CompletedTask;
/// <summary>
/// No-Op.
/// </summary>
public void SetTag(string key, string value)
{
}
/// <summary>
/// No-Op.
/// </summary>
public void UnsetTag(string key)
{
}
/// <summary>
/// No-Op.
/// </summary>
public IDisposable PushScope() => this;
/// <summary>
/// No-Op.
/// </summary>
public IDisposable PushScope<TState>(TState state) => this;
/// <summary>
/// Returns a dummy transaction.
/// </summary>
public ITransactionTracer StartTransaction(ITransactionContext context,
IReadOnlyDictionary<string, object?> customSamplingContext) => NoOpTransaction.Instance;
/// <summary>
/// No-Op.
/// </summary>
public void BindException(Exception exception, ISpan span)
{
}
/// <summary>
/// Returns null.
/// </summary>
public ISpan? GetSpan() => null;
/// <summary>
/// Returns null.
/// </summary>
public SentryTraceHeader? GetTraceHeader() => null;
/// <summary>
/// Returns null.
/// </summary>
public BaggageHeader? GetBaggage() => null;
/// <summary>
/// Returns null.
/// </summary>
public W3CTraceparentHeader? GetTraceparentHeader() => null;
/// <summary>
/// Returns sampled out transaction context.
/// </summary>
public TransactionContext ContinueTrace(
string? traceHeader,
string? baggageHeader,
string? name = null,
string? operation = null)
{
// Transactions from DisabledHub are always sampled out
return new TransactionContext(name ?? string.Empty, operation ?? string.Empty, isSampled: false);
}
/// <summary>
/// Returns sampled out transaction context.
/// </summary>
public TransactionContext ContinueTrace(
SentryTraceHeader? traceHeader,
BaggageHeader? baggageHeader,
string? name = null,
string? operation = null)
{
// Transactions from DisabledHub are always sampled out
return new TransactionContext(name ?? string.Empty, operation ?? string.Empty, isSampled: false);
}
/// <summary>
/// No-Op.
/// </summary>
public void StartSession()
{
}
/// <summary>
/// No-Op.
/// </summary>
public void PauseSession()
{
}
/// <summary>
/// No-Op.
/// </summary>
public void ResumeSession()
{
}
/// <summary>
/// No-Op.
/// </summary>
public void EndSession(SessionEndStatus status = SessionEndStatus.Exited)
{
}
/// <summary>
/// No-Op.
/// </summary>
public void BindClient(ISentryClient client)
{
}
/// <summary>
/// No-Op.
/// </summary>
public bool CaptureEnvelope(Envelope envelope)
{
return false;
}
/// <summary>
/// No-Op.
/// </summary>
public SentryId CaptureEvent(SentryEvent evt, Scope? scope = null, SentryHint? hint = null) => SentryId.Empty;
/// <summary>
/// No-Op.
/// </summary>
public SentryId CaptureFeedback(SentryFeedback feedback, out CaptureFeedbackResult result,
Action<Scope> configureScope, SentryHint? hint = null)
{
result = CaptureFeedbackResult.DisabledHub;
return SentryId.Empty;
}
/// <summary>
/// No-Op.
/// </summary>
public SentryId CaptureFeedback(SentryFeedback feedback, out CaptureFeedbackResult result,
Scope? scope = null, SentryHint? hint = null)
{
result = CaptureFeedbackResult.DisabledHub;
return SentryId.Empty;
}
/// <summary>
/// No-Op.
/// </summary>
public SentryId CaptureEvent(SentryEvent evt, Action<Scope> configureScope) => SentryId.Empty;
/// <summary>
/// No-Op.
/// </summary>
public SentryId CaptureEvent(SentryEvent evt, SentryHint? hint, Action<Scope> configureScope) => SentryId.Empty;
/// <summary>
/// No-Op.
/// </summary>
public void CaptureTransaction(SentryTransaction transaction)
{
}
/// <summary>
/// No-Op.
/// </summary>
public void CaptureTransaction(SentryTransaction transaction, Scope? scope, SentryHint? hint)
{
}
/// <summary>
/// No-Op.
/// </summary>
public void CaptureSession(SessionUpdate sessionUpdate)
{
}
/// <summary>
/// No-Op
/// </summary>
public SentryId CaptureCheckIn(
string monitorSlug,
CheckInStatus status,
SentryId? sentryId = null,
TimeSpan? duration = null,
Scope? scope = null,
Action<SentryMonitorOptions>? configureMonitorOptions = null)
=> SentryId.Empty;
/// <summary>
/// No-Op.
/// </summary>
public Task FlushAsync(TimeSpan timeout) => Task.CompletedTask;
/// <summary>
/// No-Op.
/// </summary>
public void Dispose()
{
}
/// <summary>
/// No-Op.
/// </summary>
public SentryId LastEventId => SentryId.Empty;
/// <summary>
/// Disabled Logger.
/// </summary>
public SentryStructuredLogger Logger => DisabledSentryStructuredLogger.Instance;
}
| DisabledHub |
csharp | unoplatform__uno | src/Uno.UWP/Generated/3.0.0.0/Windows.Devices.WiFiDirect.Services/WiFiDirectServiceProvisioningInfo.cs | {
"start": 310,
"end": 2137
} | public partial class ____
{
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
internal WiFiDirectServiceProvisioningInfo()
{
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public bool IsGroupFormationNeeded
{
get
{
throw new global::System.NotImplementedException("The member bool WiFiDirectServiceProvisioningInfo.IsGroupFormationNeeded is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=bool%20WiFiDirectServiceProvisioningInfo.IsGroupFormationNeeded");
}
}
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
[global::Uno.NotImplemented("__ANDROID__", "__IOS__", "__TVOS__", "IS_UNIT_TESTS", "__WASM__", "__SKIA__", "__NETSTD_REFERENCE__")]
public global::Windows.Devices.WiFiDirect.Services.WiFiDirectServiceConfigurationMethod SelectedConfigurationMethod
{
get
{
throw new global::System.NotImplementedException("The member WiFiDirectServiceConfigurationMethod WiFiDirectServiceProvisioningInfo.SelectedConfigurationMethod is not implemented. For more information, visit https://aka.platform.uno/notimplemented#m=WiFiDirectServiceConfigurationMethod%20WiFiDirectServiceProvisioningInfo.SelectedConfigurationMethod");
}
}
#endif
// Forced skipping of method Windows.Devices.WiFiDirect.Services.WiFiDirectServiceProvisioningInfo.SelectedConfigurationMethod.get
// Forced skipping of method Windows.Devices.WiFiDirect.Services.WiFiDirectServiceProvisioningInfo.IsGroupFormationNeeded.get
}
}
| WiFiDirectServiceProvisioningInfo |
csharp | unoplatform__uno | src/Uno.UI/UI/Xaml/Markup/Reader/XamlConstants.cs | {
"start": 126,
"end": 927
} | internal static class ____
{
public const string XamlXmlNamespace = "http://schemas.microsoft.com/winfx/2006/xaml";
public const string PresentationXamlXmlNamespace = "http://schemas.microsoft.com/winfx/2006/xaml/presentation";
public const string XmlXmlNamespace = "http://www.w3.org/XML/1998/namespace";
public const string BundleResourcePrefix = "ms-appx:///";
public const string RootFoundationNamespace = "Windows.Foundation";
public const string RootWUINamespace = "Windows" + ".UI"; // Keep split for the WinUI conversion tool
public const string RootMUINamespace = "Microsoft.UI";
public const string BaseXamlNamespace = "Microsoft.UI.Xaml";
public const string UnoXamlNamespace = "Microsoft.UI.Xaml";
public const string UnknownContent = "_UnknownContent";
| XamlConstants |
csharp | SixLabors__ImageSharp | tests/ImageSharp.Tests/Processing/Filters/OpacityTest.cs | {
"start": 268,
"end": 828
} | public class ____ : BaseImageOperationsExtensionTest
{
[Fact]
public void Alpha_amount_AlphaProcessorDefaultsSet()
{
this.operations.Opacity(0.2f);
OpacityProcessor processor = this.Verify<OpacityProcessor>();
Assert.Equal(.2f, processor.Amount);
}
[Fact]
public void Alpha_amount_rect_AlphaProcessorDefaultsSet()
{
this.operations.Opacity(0.6f, this.rect);
OpacityProcessor processor = this.Verify<OpacityProcessor>(this.rect);
Assert.Equal(.6f, processor.Amount);
}
}
| OpacityTest |
csharp | dotnet__maui | src/Controls/src/Core/Platform/Android/TabbedPageManager.cs | {
"start": 25340,
"end": 28110
} | class ____ : ViewPager2.OnPageChangeCallback,
#pragma warning disable CS0618 // Type or member is obsolete
TabLayout.IOnTabSelectedListener,
#pragma warning restore CS0618 // Type or member is obsolete
NavigationBarView.IOnItemSelectedListener,
TabLayoutMediator.ITabConfigurationStrategy
{
readonly TabbedPageManager _tabbedPageManager;
public Listeners(TabbedPageManager tabbedPageManager)
{
_tabbedPageManager = tabbedPageManager;
}
public override void OnPageSelected(int position)
{
base.OnPageSelected(position);
var Element = _tabbedPageManager.Element;
if (Element == null)
return;
var _previousPage = _tabbedPageManager.previousPage;
var IsBottomTabPlacement = _tabbedPageManager.IsBottomTabPlacement;
var _bottomNavigationView = _tabbedPageManager._bottomNavigationView;
if (_previousPage != Element.CurrentPage)
{
_previousPage?.SendDisappearing();
_previousPage = Element.CurrentPage;
_tabbedPageManager.previousPage = Element.CurrentPage;
}
// This only happens if all the pages have been removed
if (Element.Children.Count > 0)
{
Element.CurrentPage = Element.Children[position];
Element.CurrentPage.SendAppearing();
}
if (IsBottomTabPlacement)
_bottomNavigationView.SelectedItemId = position;
}
void TabLayoutMediator.ITabConfigurationStrategy.OnConfigureTab(TabLayout.Tab p0, int p1)
{
p0.SetText(_tabbedPageManager.Element.Children[p1].Title);
}
bool NavigationBarView.IOnItemSelectedListener.OnNavigationItemSelected(IMenuItem item)
{
if (_tabbedPageManager.Element == null)
return false;
var id = item.ItemId;
if (id == BottomNavigationViewUtils.MoreTabId)
{
var items = _tabbedPageManager.CreateTabList();
var bottomSheetDialog = BottomNavigationViewUtils.CreateMoreBottomSheet(_tabbedPageManager.OnMoreItemSelected, _tabbedPageManager.Element.FindMauiContext(), items, _tabbedPageManager._bottomNavigationView.MaxItemCount);
bottomSheetDialog.DismissEvent += _tabbedPageManager.OnMoreSheetDismissed;
bottomSheetDialog.Show();
}
else
{
if (_tabbedPageManager._bottomNavigationView.SelectedItemId != item.ItemId && _tabbedPageManager.Element.Children.Count > item.ItemId)
_tabbedPageManager.Element.CurrentPage = _tabbedPageManager.Element.Children[item.ItemId];
}
return true;
}
void TabLayout.IOnTabSelectedListener.OnTabReselected(TabLayout.Tab tab)
{
}
void TabLayout.IOnTabSelectedListener.OnTabSelected(TabLayout.Tab tab)
{
_tabbedPageManager.TabSelected(tab);
}
void TabLayout.IOnTabSelectedListener.OnTabUnselected(TabLayout.Tab tab)
{
_tabbedPageManager.SetIconColorFilter(_tabbedPageManager.Element.CurrentPage, tab, false);
}
}
} | Listeners |
csharp | CommunityToolkit__WindowsCommunityToolkit | Microsoft.Toolkit.Uwp.UI.Controls.Primitives/AdaptiveGridView/AdaptiveGridView.Properties.cs | {
"start": 1030,
"end": 7262
} | public partial class ____
{
/// <summary>
/// Identifies the <see cref="ItemClickCommand"/> dependency property.
/// </summary>
public static readonly DependencyProperty ItemClickCommandProperty =
DependencyProperty.Register(nameof(ItemClickCommand), typeof(ICommand), typeof(AdaptiveGridView), new PropertyMetadata(null));
/// <summary>
/// Identifies the <see cref="ItemHeight"/> dependency property.
/// </summary>
public static readonly DependencyProperty ItemHeightProperty =
DependencyProperty.Register(nameof(ItemHeight), typeof(double), typeof(AdaptiveGridView), new PropertyMetadata(double.NaN));
/// <summary>
/// Identifies the <see cref="OneRowModeEnabled"/> dependency property.
/// </summary>
public static readonly DependencyProperty OneRowModeEnabledProperty =
DependencyProperty.Register(nameof(OneRowModeEnabled), typeof(bool), typeof(AdaptiveGridView), new PropertyMetadata(false, (o, e) => { OnOneRowModeEnabledChanged(o, e.NewValue); }));
/// <summary>
/// Identifies the <see cref="ItemWidth"/> dependency property.
/// </summary>
private static readonly DependencyProperty ItemWidthProperty =
DependencyProperty.Register(nameof(ItemWidth), typeof(double), typeof(AdaptiveGridView), new PropertyMetadata(double.NaN));
/// <summary>
/// Identifies the <see cref="DesiredWidth"/> dependency property.
/// </summary>
public static readonly DependencyProperty DesiredWidthProperty =
DependencyProperty.Register(nameof(DesiredWidth), typeof(double), typeof(AdaptiveGridView), new PropertyMetadata(double.NaN, DesiredWidthChanged));
/// <summary>
/// Identifies the <see cref="StretchContentForSingleRow"/> dependency property.
/// </summary>
public static readonly DependencyProperty StretchContentForSingleRowProperty =
DependencyProperty.Register(nameof(StretchContentForSingleRow), typeof(bool), typeof(AdaptiveGridView), new PropertyMetadata(true, OnStretchContentForSingleRowPropertyChanged));
private static void OnOneRowModeEnabledChanged(DependencyObject d, object newValue)
{
var self = d as AdaptiveGridView;
self.DetermineOneRowMode();
}
private static void DesiredWidthChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var self = d as AdaptiveGridView;
self.RecalculateLayout(self.ActualWidth);
}
private static void OnStretchContentForSingleRowPropertyChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
var self = d as AdaptiveGridView;
self.RecalculateLayout(self.ActualWidth);
}
/// <summary>
/// Gets or sets the desired width of each item
/// </summary>
/// <value>The width of the desired.</value>
public double DesiredWidth
{
get { return (double)GetValue(DesiredWidthProperty); }
set { SetValue(DesiredWidthProperty, value); }
}
/// <summary>
/// Gets or sets a value indicating whether the control should stretch the content to fill at least one row.
/// </summary>
/// <remarks>
/// If set to <c>true</c> (default) and there is only one row of items, the items will be stretched to fill the complete row.
/// If set to <c>false</c>, items will have their normal size, which means a gap can exist at the end of the row.
/// </remarks>
/// <value>A value indicating whether the control should stretch the content to fill at least one row.</value>
public bool StretchContentForSingleRow
{
get { return (bool)GetValue(StretchContentForSingleRowProperty); }
set { SetValue(StretchContentForSingleRowProperty, value); }
}
/// <summary>
/// Gets or sets the command to execute when an item is clicked and the IsItemClickEnabled property is true.
/// </summary>
/// <value>The item click command.</value>
public ICommand ItemClickCommand
{
get { return (ICommand)GetValue(ItemClickCommandProperty); }
set { SetValue(ItemClickCommandProperty, value); }
}
/// <summary>
/// Gets or sets the height of each item in the grid.
/// </summary>
/// <value>The height of the item.</value>
public double ItemHeight
{
get { return (double)GetValue(ItemHeightProperty); }
set { SetValue(ItemHeightProperty, value); }
}
/// <summary>
/// Gets or sets a value indicating whether only one row should be displayed.
/// </summary>
/// <value><c>true</c> if only one row is displayed; otherwise, <c>false</c>.</value>
public bool OneRowModeEnabled
{
get { return (bool)GetValue(OneRowModeEnabledProperty); }
set { SetValue(OneRowModeEnabledProperty, value); }
}
/// <summary>
/// Gets the template that defines the panel that controls the layout of items.
/// </summary>
/// <remarks>
/// This property overrides the base ItemsPanel to prevent changing it.
/// </remarks>
/// <returns>
/// An ItemsPanelTemplate that defines the panel to use for the layout of the items.
/// The default value for the ItemsControl is an ItemsPanelTemplate that specifies
/// a StackPanel.
/// </returns>
public new ItemsPanelTemplate ItemsPanel => base.ItemsPanel;
private double ItemWidth
{
get { return (double)GetValue(ItemWidthProperty); }
set { SetValue(ItemWidthProperty, value); }
}
private static int CalculateColumns(double containerWidth, double itemWidth)
{
var columns = (int)Math.Round(containerWidth / itemWidth);
if (columns == 0)
{
columns = 1;
}
return columns;
}
}
} | AdaptiveGridView |
csharp | JamesNK__Newtonsoft.Json | Src/Newtonsoft.Json.Tests/JsonTextReaderTests/ReadAsyncTests.cs | {
"start": 65837,
"end": 69592
} | private class ____ : JsonReader
{
public override bool Read() => true;
}
[Test]
public void AsyncMethodsAlreadyCancelledOnTextReaderSubclass()
{
CancellationTokenSource source = new CancellationTokenSource();
CancellationToken token = source.Token;
source.Cancel();
JsonTextReader reader = new NoOverridesDerivedJsonTextAsync();
Assert.IsTrue(reader.ReadAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsBooleanAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsBytesAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsDateTimeAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsDateTimeOffsetAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsDecimalAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsInt32Async(token).IsCanceled);
Assert.IsTrue(reader.ReadAsStringAsync(token).IsCanceled);
}
[Test]
public void AsyncMethodsAlreadyCancelledOnReaderSubclass()
{
CancellationTokenSource source = new CancellationTokenSource();
CancellationToken token = source.Token;
source.Cancel();
JsonReader reader = new MinimalOverridesDerivedJsonReader();
Assert.IsTrue(reader.ReadAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsBooleanAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsBytesAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsDateTimeAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsDateTimeOffsetAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsDecimalAsync(token).IsCanceled);
Assert.IsTrue(reader.ReadAsInt32Async(token).IsCanceled);
Assert.IsTrue(reader.ReadAsStringAsync(token).IsCanceled);
}
[Test]
public async Task ThrowOnDuplicateKeysDeserializingAsync()
{
string json = @"
{
""a"": 1,
""b"": [
{
""c"": {
""d"": 1,
""d"": ""2""
}
}
]
}
";
JsonLoadSettings settings = new JsonLoadSettings { DuplicatePropertyNameHandling = DuplicatePropertyNameHandling.Error };
JsonTextReader reader = new JsonTextReader(new StringReader(json));
await ExceptionAssert.ThrowsAsync<JsonReaderException>(async () => await JToken.ReadFromAsync(reader, settings));
}
[Test]
public async Task MaxDepth_GreaterThanDefaultAsync()
{
string json = GetNestedJson(150);
JsonTextReader reader = new JsonTextReader(new StringReader(json));
reader.MaxDepth = 150;
while (await reader.ReadAsync())
{
}
}
[Test]
public async Task MaxDepth_NullAsync()
{
string json = GetNestedJson(150);
JsonTextReader reader = new JsonTextReader(new StringReader(json));
reader.MaxDepth = null;
while (await reader.ReadAsync())
{
}
}
[Test]
public async Task MaxDepth_MaxValueAsync()
{
string json = GetNestedJson(150);
JsonTextReader reader = new JsonTextReader(new StringReader(json));
reader.MaxDepth = int.MaxValue;
while (await reader.ReadAsync())
{
}
}
}
}
#endif | MinimalOverridesDerivedJsonReader |
csharp | ChilliCream__graphql-platform | src/HotChocolate/Core/src/Types/Types/FieldCollection.cs | {
"start": 8699,
"end": 9609
} | private sealed class ____(InterfaceFieldCollection fields)
: IReadOnlyFieldDefinitionCollection<IOutputFieldDefinition>
{
public IOutputFieldDefinition this[string name] => fields[name];
public IOutputFieldDefinition this[int index] => fields[index];
public int Count => fields.Count;
public bool ContainsName(string name) => fields.ContainsField(name);
public bool TryGetField(string name, [NotNullWhen(true)] out IOutputFieldDefinition? field)
{
if (fields.TryGetField(name, out var fld))
{
field = fld;
return true;
}
field = null;
return false;
}
public IEnumerator<IOutputFieldDefinition> GetEnumerator() => fields.GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
}
}
| FieldDefinitionCollection |
csharp | microsoft__semantic-kernel | dotnet/src/VectorData/Weaviate/ModelV2/WeaviateCollectionSchemaVectorIndexConfig.cs | {
"start": 147,
"end": 292
} | internal sealed class ____
{
[JsonPropertyName("distance")]
public string? Distance { get; set; }
}
| WeaviateCollectionSchemaVectorIndexConfig |
csharp | ivanpaulovich__clean-architecture-manga | accounts-api/src/Domain/ValueObjects/CreditId.cs | {
"start": 466,
"end": 1068
} | struct ____ : IEquatable<CreditId>
{
public Guid Id { get; }
public CreditId(Guid id) =>
this.Id = id;
public override bool Equals(object? obj) =>
obj is CreditId o && this.Equals(o);
public bool Equals(CreditId other) => this.Id == other.Id;
public override int GetHashCode() =>
HashCode.Combine(this.Id);
public static bool operator ==(CreditId left, CreditId right) => left.Equals(right);
public static bool operator !=(CreditId left, CreditId right) => !(left == right);
public override string ToString() => this.Id.ToString();
}
| CreditId |
csharp | dotnet__maui | src/Compatibility/Core/src/WPF/Renderers/NavigationPageRenderer.cs | {
"start": 249,
"end": 707
} | public class ____ : FormsNavigationPage
{
NavigationPage NavigationPage;
public FormsLightNavigationPage(NavigationPage navigationPage)
{
ContentLoader = new FormsContentLoader();
NavigationPage = navigationPage;
}
public override void OnBackButtonPressed()
{
if (NavigationPage?.CurrentPage == null)
return;
if (!NavigationPage.CurrentPage.SendBackButtonPressed())
NavigationPage.PopAsync();
}
}
| FormsLightNavigationPage |
csharp | unoplatform__uno | src/Uno.UWP/Generated/3.0.0.0/Windows.Data.Xml.Dom/IXmlNodeSelector.cs | {
"start": 255,
"end": 908
} | public partial interface ____
{
// Skipping already declared method Windows.Data.Xml.Dom.IXmlNodeSelector.SelectSingleNode(string)
// Skipping already declared method Windows.Data.Xml.Dom.IXmlNodeSelector.SelectNodes(string)
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
global::Windows.Data.Xml.Dom.IXmlNode SelectSingleNodeNS(string xpath, object namespaces);
#endif
#if __ANDROID__ || __IOS__ || __TVOS__ || IS_UNIT_TESTS || __WASM__ || __SKIA__ || __NETSTD_REFERENCE__
global::Windows.Data.Xml.Dom.XmlNodeList SelectNodesNS(string xpath, object namespaces);
#endif
}
}
| IXmlNodeSelector |
csharp | ChilliCream__graphql-platform | src/HotChocolate/ApolloFederation/test/ApolloFederation.Tests/Directives/RequiresDirectiveTests.cs | {
"start": 3032,
"end": 3199
} | public class ____
{
[Key]
public int Id { get; set; }
[Requires("id")]
public Product Product { get; set; } = null!;
}
| Review |
csharp | AutoFixture__AutoFixture | Src/AutoFixtureUnitTest/Kernel/MissingParametersSupplyingMethodTests.cs | {
"start": 5608,
"end": 6957
} | public static class ____
{
public static string[] MethodWithOptionalArgumentString(string argument, string optional = "100")
{
return new[] { argument, optional };
}
public static int[] MethodWithOptionalArgument(int argument, int optional = 100)
{
return new[] { argument, optional };
}
public static string[] MethodWithParamsArgumentString(string argument, params string[] arguments)
{
return new[] { argument }.Concat(arguments).ToArray();
}
public static int[] MethodWithParamsArgument(int argument, params int[] arguments)
{
return new[] { argument }.Concat(arguments).ToArray();
}
public static int[] MethodWithOptionalAndParamsArguments(int argument, int optional = 200, params int[] arguments)
{
return new[] { argument, optional }.Concat(arguments).ToArray();
}
public static string[] MethodWithOptionalAndParamsArgumentsString(string argument, string optional = "200", params string[] arguments)
{
return new[] { argument, optional }.Concat(arguments).ToArray();
}
}
}
} | TypeWithMethodsWithOptionalArguments |
csharp | ChilliCream__graphql-platform | src/HotChocolate/Core/test/Execution.Tests/Processing/OperationCompilerTests.cs | {
"start": 32329,
"end": 35004
} | interface ____ {
id: ID!
name: String!
someType: SomeType!
children: [OrganizationUnit!]!
}
type SomeType {
id: ID!
name: String!
}
type OrganizationUnit1 implements OrganizationUnit {
id: ID!
name: String!
someType: SomeType!
children: [OrganizationUnit!]!
}
type OrganizationUnit2 implements OrganizationUnit {
id: ID!
name: String!
someType: SomeType!
children: [OrganizationUnit!]!
}
""")
.UseField(next => next)
.BuildSchemaAsync();
var document = Utf8GraphQLParser.Parse(
"""
{
organizationUnits {
id
name
someType {
id
name
}
children {
id
name
someType {
id
name
}
children {
id
name
someType {
id
name
}
}
}
}
}
""");
var operationDefinition = document.Definitions.OfType<OperationDefinitionNode>().Single();
// act
var compiler = new OperationCompiler(new InputParser());
var operation = compiler.Compile(
new OperationCompilerRequest(
"opid",
document,
operationDefinition,
schema.QueryType,
schema));
// assert
MatchSnapshot(document, operation);
}
[Fact]
public async Task Ensure_Selection_Backlog_Does_Not_Exponentially_Grow()
{
// arrange
var schema =
await new ServiceCollection()
.AddGraphQLServer()
.AddDocumentFromString(
"""
type Query {
organizationUnits: [OrganizationUnit!]!
}
| OrganizationUnit |
csharp | JoshClose__CsvHelper | docs-src/CsvHelper.DocsGenerator/Generators/TypeGenerator.cs | {
"start": 106,
"end": 3425
} | public class ____ : DocumentGenerator
{
public TypeGenerator(TypeInfo typeInfo) : base(typeInfo) { }
protected override void GenerateContent()
{
// Title
content.AppendLine($"# {typeInfo.Type.GetHtmlName()} {typeInfo.Type.GetTypeName()}");
// Namespace
content.AppendLine();
content.AppendLine($"Namespace: [{typeInfo.Type.Namespace}](/api/{typeInfo.Type.Namespace})");
// Summary
content.AppendLine();
content.AppendLine(typeInfo.Type.GetSummary());
// Definition
content.AppendLine();
content.AppendLine("```cs");
foreach (var attribute in typeInfo.Attributes)
{
content.AppendLine($"[{attribute.GetFullCodeName()}]");
}
var inheritanceText = string.Empty;
if (!typeInfo.Type.IsEnum && typeInfo.Implementers.Count > 0)
{
inheritanceText = $": {string.Join(", ", typeInfo.Implementers.Select(i => i.GetCodeName()))}";
}
var typeModifier = string.Empty;
if (typeInfo.Type.IsAbstract && typeInfo.Type.IsSealed && !typeInfo.Type.IsInterface)
{
typeModifier = "static ";
}
else if (typeInfo.Type.IsAbstract && !typeInfo.Type.IsSealed && !typeInfo.Type.IsInterface)
{
typeModifier = "abstract ";
}
content.AppendLine($"public {typeModifier}{typeInfo.Type.GetTypeName().ToLower()} {typeInfo.Type.GetCodeName()} {inheritanceText}");
content.AppendLine("```");
// Inheritance
if (typeInfo.Inheritance.Count > 0)
{
content.AppendLine();
content.AppendLine($"Inheritance {string.Join(" -> ", typeInfo.Inheritance.Select(t => t.GetHtmlName()))}");
}
// Constructors
if (typeInfo.Constructors.Count > 0)
{
content.AppendLine("");
content.AppendLine("## Constructors");
content.AppendLine(" | ");
content.AppendLine("- | -");
foreach (var constructorInfo in typeInfo.Constructors)
{
content.AppendLine($"{constructorInfo.Constructor.GetHtmlName()} | {constructorInfo.Constructor.GetSummary()}");
}
}
// Fields
if (typeInfo.Fields.Count > 0)
{
content.AppendLine();
content.AppendLine("## Fields");
content.AppendLine(" | ");
content.AppendLine("- | -");
foreach (var field in typeInfo.Fields)
{
content.AppendLine($"{field.GetHtmlName()} | {field.GetSummary()}");
}
}
// Properties
if (typeInfo.Properties.Count > 0)
{
content.AppendLine();
content.AppendLine("## Properties");
content.AppendLine(" | ");
content.AppendLine("- | -");
foreach (var property in typeInfo.Properties)
{
if (property.IndexParameters.Count > 0)
{
var parameters = string.Join(", ", property.IndexParameters.Select(ip => ip.ParameterType.GetHtmlName()));
content.AppendLine($"this[{parameters}] | {property.Property.GetSummary()}");
}
else
{
content.AppendLine($"{property.Property.GetHtmlName()} | {property.Property.GetSummary()}");
}
}
}
// Methods
if (typeInfo.Methods.Count > 0)
{
content.AppendLine();
content.AppendLine("## Methods");
content.AppendLine(" | ");
content.AppendLine("- | -");
foreach (var method in typeInfo.Methods)
{
content.AppendLine($"{method.Method.GetHtmlName()} | {method.Method.GetSummary()}");
}
}
}
}
}
| TypeGenerator |
csharp | ServiceStack__ServiceStack | ServiceStack/src/ServiceStack/Host/Handlers/RedirectHttpHandler.cs | {
"start": 126,
"end": 1974
} | public class ____ : HttpAsyncTaskHandler
{
public RedirectHttpHandler()
{
this.RequestName = nameof(RedirectHttpHandler);
this.StatusCode = HttpStatusCode.Redirect;
}
public string RelativeUrl { get; set; }
public string AbsoluteUrl { get; set; }
public HttpStatusCode StatusCode { get; set; }
public static string MakeRelative(string relativeUrl)
{
if (string.IsNullOrEmpty(relativeUrl))
return null;
if (relativeUrl.StartsWith("~/"))
return relativeUrl;
return relativeUrl.StartsWith("/")
? "~" + relativeUrl
: "~/" + relativeUrl;
}
public override Task ProcessRequestAsync(IRequest request, IResponse response, string operationName)
{
if (string.IsNullOrEmpty(RelativeUrl) && string.IsNullOrEmpty(AbsoluteUrl))
throw new ArgumentException("RelativeUrl and AbsoluteUrl is Required");
if (!string.IsNullOrEmpty(AbsoluteUrl))
{
response.StatusCode = (int)StatusCode;
response.AddHeader(HttpHeaders.Location, this.AbsoluteUrl);
}
else
{
if (RelativeUrl.StartsWith("http://") || RelativeUrl.StartsWith("https://"))
throw new ArgumentException($"'{RelativeUrl}' is not a RelativeUrl, use AbsoluteUrl instead");
var absoluteUrl = this.RelativeUrl.StartsWith("/")
? request.GetApplicationUrl().CombineWith(this.RelativeUrl) //preserve compat
: request.ResolveAbsoluteUrl(MakeRelative(this.RelativeUrl));
response.StatusCode = (int)StatusCode;
response.AddHeader(HttpHeaders.Location, absoluteUrl);
}
response.EndHttpHandlerRequest(skipClose: true);
return TypeConstants.EmptyTask;
}
}
| RedirectHttpHandler |
csharp | EventStore__EventStore | src/KurrentDB.Projections.Core/Services/IProjectionStateHandler.cs | {
"start": 2610,
"end": 4085
} | public static class ____ {
public static bool ProcessEvent(
this IProjectionStateHandler self, string partition, CheckpointTag eventPosition, string streamId,
string eventType, string category, Guid eventId, long eventSequenceNumber, string metadata, string data,
out string state, out EmittedEventEnvelope[] emittedEvents, bool isJson = true) {
string ignoredSharedState;
return self.ProcessEvent(
partition, eventPosition, category,
new ResolvedEvent(
streamId, eventSequenceNumber, streamId, eventSequenceNumber, false, new TFPos(0, -1), eventId,
eventType, isJson, data, metadata), out state, out ignoredSharedState, out emittedEvents);
}
public static bool ProcessEvent(
this IProjectionStateHandler self, string partition, CheckpointTag eventPosition, string streamId,
string eventType, string category, Guid eventId, long eventSequenceNumber, string metadata, string data,
out string state, out string sharedState, out EmittedEventEnvelope[] emittedEvents, bool isJson = true) {
return self.ProcessEvent(
partition, eventPosition, category,
new ResolvedEvent(
streamId, eventSequenceNumber, streamId, eventSequenceNumber, false, new TFPos(0, -1), eventId,
eventType, isJson, data, metadata), out state, out sharedState, out emittedEvents);
}
public static string GetNativeHandlerName(this Type handlerType) {
return "native:" + handlerType.Namespace + "." + handlerType.Name;
}
}
| ProjectionStateHandlerTestExtensions |
csharp | grandnode__grandnode2 | src/Business/Grand.Business.Catalog/Services/Collections/ProductCollectionService.cs | {
"start": 463,
"end": 6429
} | public class ____ : IProductCollectionService
{
#region Ctor
public ProductCollectionService(ICacheBase cacheBase,
IRepository<Product> productRepository,
IContextAccessor contextAccessor,
IMediator mediator, AccessControlConfig accessControlConfig)
{
_cacheBase = cacheBase;
_productRepository = productRepository;
_contextAccessor = contextAccessor;
_mediator = mediator;
_accessControlConfig = accessControlConfig;
}
#endregion
/// <summary>
/// Gets product collection by collection id
/// </summary>
/// <param name="collectionId">Collection id</param>
/// <param name="storeId">Store ident</param>
/// <param name="pageIndex">Page index</param>
/// <param name="pageSize">Page size</param>
/// <param name="showHidden">A value that indicates if it should shows hidden records</param>
/// <returns>Product collection collection</returns>
public virtual async Task<IPagedList<ProductsCollection>> GetProductCollectionsByCollectionId(string collectionId,
string storeId,
int pageIndex = 0, int pageSize = int.MaxValue, bool showHidden = false)
{
var key = string.Format(CacheKey.PRODUCTCOLLECTIONS_ALLBYCOLLECTIONID_KEY, showHidden, collectionId, pageIndex,
pageSize, _contextAccessor.WorkContext.CurrentCustomer.Id, storeId);
return await _cacheBase.GetAsync(key, () =>
{
var query = _productRepository.Table.Where(x =>
x.ProductCollections.Any(y => y.CollectionId == collectionId));
if (!showHidden && (!_accessControlConfig.IgnoreAcl || !_accessControlConfig.IgnoreStoreLimitations))
{
if (!_accessControlConfig.IgnoreAcl)
{
//ACL (access control list)
var allowedCustomerGroupsIds = _contextAccessor.WorkContext.CurrentCustomer.GetCustomerGroupIds();
query = from p in query
where !p.LimitedToGroups || allowedCustomerGroupsIds.Any(x => p.CustomerGroups.Contains(x))
select p;
}
if (!_accessControlConfig.IgnoreStoreLimitations && !string.IsNullOrEmpty(storeId))
//Store acl
query = from p in query
where !p.LimitedToStores || p.Stores.Contains(storeId)
select p;
}
var queryProductCollection = from prod in query
from pm in prod.ProductCollections
select new ProductsCollection {
Id = pm.Id,
ProductId = prod.Id,
DisplayOrder = pm.DisplayOrder,
IsFeaturedProduct = pm.IsFeaturedProduct,
CollectionId = pm.CollectionId
};
queryProductCollection = from pm in queryProductCollection
where pm.CollectionId == collectionId
orderby pm.DisplayOrder
select pm;
return Task.FromResult(new PagedList<ProductsCollection>(queryProductCollection, pageIndex, pageSize));
});
}
/// <summary>
/// Inserts a product collection mapping
/// </summary>
/// <param name="productCollection">Product collection mapping</param>
/// <param name="productId">Product ident</param>
public virtual async Task InsertProductCollection(ProductCollection productCollection, string productId)
{
ArgumentNullException.ThrowIfNull(productCollection);
await _productRepository.AddToSet(productId, x => x.ProductCollections, productCollection);
//cache
await _cacheBase.RemoveByPrefix(CacheKey.PRODUCTCOLLECTIONS_PATTERN_KEY);
await _cacheBase.RemoveByPrefix(string.Format(CacheKey.PRODUCTS_BY_ID_KEY, productId));
//event notification
await _mediator.EntityInserted(productCollection);
}
/// <summary>
/// Updates the product collection mapping
/// </summary>
/// <param name="productCollection">Product collection mapping</param>
/// <param name="productId">Product id</param>
public virtual async Task UpdateProductCollection(ProductCollection productCollection, string productId)
{
ArgumentNullException.ThrowIfNull(productCollection);
await _productRepository.UpdateToSet(productId, x => x.ProductCollections, z => z.Id, productCollection.Id,
productCollection);
//cache
await _cacheBase.RemoveByPrefix(CacheKey.PRODUCTCOLLECTIONS_PATTERN_KEY);
await _cacheBase.RemoveByPrefix(string.Format(CacheKey.PRODUCTS_BY_ID_KEY, productId));
//event notification
await _mediator.EntityUpdated(productCollection);
}
/// <summary>
/// Deletes a product collection mapping
/// </summary>
/// <param name="productCollection">Product collection mapping</param>
/// <param name="productId">Product id</param>
public virtual async Task DeleteProductCollection(ProductCollection productCollection, string productId)
{
ArgumentNullException.ThrowIfNull(productCollection);
await _productRepository.PullFilter(productId, x => x.ProductCollections, z => z.Id, productCollection.Id);
//cache
await _cacheBase.RemoveByPrefix(CacheKey.PRODUCTCOLLECTIONS_PATTERN_KEY);
await _cacheBase.RemoveByPrefix(string.Format(CacheKey.PRODUCTS_BY_ID_KEY, productId));
//event notification
await _mediator.EntityDeleted(productCollection);
}
#region Fields
private readonly IRepository<Product> _productRepository;
private readonly IContextAccessor _contextAccessor;
private readonly IMediator _mediator;
private readonly ICacheBase _cacheBase;
private readonly AccessControlConfig _accessControlConfig;
#endregion
} | ProductCollectionService |
csharp | Antaris__RazorEngine | src/source/RazorEngine.Core/Compilation/DynamicObject/Impromtu/Curry.cs | {
"start": 5123,
"end": 6912
} | class ____ override this method to specify dynamic behavior for operations such as invoking an object or a delegate.
/// </summary>
/// <param name="binder">Provides information about the invoke operation.</param>
/// <param name="args">The arguments that are passed to the object during the invoke operation. For example, for the sampleObject(100) operation, where sampleObject is derived from the <see cref="T:System.Dynamic.DynamicObject"/> class, <paramref name="args"/>[0] is equal to 100.</param>
/// <param name="result">The result of the object invocation.</param>
/// <returns>
/// true if the operation is successful; otherwise, false. If this method returns false, the run-time binder of the language determines the behavior. (In most cases, a language-specific run-time exception is thrown.
/// </returns>
public override bool TryInvoke(InvokeBinder binder, object[] args, out object result)
{
var tCurrying = _target as PartialApplyInvocation;
var curryResult = tCurrying != null
//If already currying append
? new PartialApplyInvocation(tCurrying.Target,
tCurrying.Args.Concat(Util.NameArgsIfNecessary(binder.CallInfo, args)).
ToArray(), tCurrying.MemberName, tCurrying.TotalArgCount, tCurrying.InvocationKind)
: new PartialApplyInvocation(_target, Util.NameArgsIfNecessary(binder.CallInfo, args), String.Empty, _totalArgCount);
result = curryResult;
if (args.Length == curryResult.TotalArgCount)
result = ((dynamic)curryResult)();
return true;
}
}
#endif
}
| can |
csharp | ServiceStack__ServiceStack | ServiceStack/tests/CheckWeb/Test.dtos.cs | {
"start": 37963,
"end": 38363
} | public partial class ____
{
public HelloInnerTypesResponse()
{
InnerList = new List<TypesGroup.InnerTypeItem>{};
}
public virtual TypesGroup.InnerType InnerType { get; set; }
public virtual TypesGroup.InnerEnum InnerEnum { get; set; }
public virtual List<TypesGroup.InnerTypeItem> InnerList { get; set; }
}
| HelloInnerTypesResponse |
csharp | EventStore__EventStore | src/KurrentDB.Auth.LegacyAuthorizationWithStreamAuthorizationDisabled/ClaimMatchAssertion.cs | {
"start": 376,
"end": 1630
} | public class ____ : IComparable<ClaimMatchAssertion>, IAssertion {
private readonly Claim _claim;
public ClaimMatchAssertion(Grant grant, Claim claim) {
_claim = claim;
Grant = grant;
Information = new AssertionInformation("equal", _claim.ToString(), grant);
}
public AssertionInformation Information { get; }
public Grant Grant { get; }
public ValueTask<bool> Evaluate(ClaimsPrincipal cp, Operation operation, PolicyInformation policy,
EvaluationContext context) {
// ReSharper disable once PatternAlwaysOfType
if (cp.FindFirst(x =>
string.Equals(x.Type, _claim.Type, StringComparison.Ordinal) &&
string.Equals(x.Value, _claim.Value, StringComparison.Ordinal)) is Claim matched) {
context.Add(new AssertionMatch(policy, Information, matched));
return new ValueTask<bool>(true);
}
return new ValueTask<bool>(false);
}
public int CompareTo(ClaimMatchAssertion other) {
if (other == null)
throw new ArgumentNullException(nameof(other));
var grant = Grant.CompareTo(other.Grant);
if (grant != 0)
return grant * -1;
var type = string.CompareOrdinal(_claim.Type, other._claim.Type);
if (type != 0)
return type;
return string.CompareOrdinal(_claim.Value, other._claim.Value);
}
}
| ClaimMatchAssertion |
csharp | ServiceStack__ServiceStack | ServiceStack.Stripe/src/ServiceStack.Stripe/StripeGateway.cs | {
"start": 13918,
"end": 14407
} | public class ____ : IPost, IReturn<StripePlan>
{
[IgnoreDataMember]
public string Id { get; set; }
public Dictionary<string, string> Metadata { get; set; }
public string Nickname { get; set; }
public string Product { get; set; }
/// <summary>
/// Still supported but not specified in arguments in latest API Version: https://stripe.com/docs/api#create_plan
/// </summary>
public int? TrialPeriodDays { get; set; }
}
[Route("/plans/{Id}")]
| UpdateStripePlan |
csharp | abpframework__abp | modules/audit-logging/src/Volo.Abp.AuditLogging.Domain/Volo/Abp/AuditLogging/IAuditLogInfoToAuditLogConverter.cs | {
"start": 91,
"end": 205
} | public interface ____
{
Task<AuditLog> ConvertAsync(AuditLogInfo auditLogInfo);
}
| IAuditLogInfoToAuditLogConverter |
csharp | open-telemetry__opentelemetry-dotnet | src/OpenTelemetry.Exporter.OpenTelemetryProtocol/Implementation/ExportClient/IExportClient.cs | {
"start": 212,
"end": 1292
} | internal interface ____
{
/// <summary>
/// Method for sending export request to the server.
/// </summary>
/// <param name="buffer">The request body to send to the server.</param>
/// <param name="contentLength">length of the content.</param>
/// <param name="deadlineUtc">The deadline time in utc for export request to finish.</param>
/// <param name="cancellationToken">An optional token for canceling the call.</param>
/// <returns><see cref="ExportClientResponse"/>.</returns>
ExportClientResponse SendExportRequest(byte[] buffer, int contentLength, DateTime deadlineUtc, CancellationToken cancellationToken = default);
/// <summary>
/// Method for shutting down the export client.
/// </summary>
/// <param name="timeoutMilliseconds">
/// The number of milliseconds to wait, or <c>Timeout.Infinite</c> to
/// wait indefinitely.
/// </param>
/// <returns>
/// Returns <c>true</c> if shutdown succeeded; otherwise, <c>false</c>.
/// </returns>
bool Shutdown(int timeoutMilliseconds);
}
| IExportClient |
csharp | jellyfin__jellyfin | MediaBrowser.Providers/MediaInfo/VideoImageProvider.cs | {
"start": 770,
"end": 5272
} | public class ____ : IDynamicImageProvider, IHasOrder
{
private readonly IMediaSourceManager _mediaSourceManager;
private readonly IMediaEncoder _mediaEncoder;
private readonly ILogger<VideoImageProvider> _logger;
/// <summary>
/// Initializes a new instance of the <see cref="VideoImageProvider"/> class.
/// </summary>
/// <param name="mediaSourceManager">The media source manager for fetching item streams.</param>
/// <param name="mediaEncoder">The media encoder for capturing images.</param>
/// <param name="logger">The logger.</param>
public VideoImageProvider(IMediaSourceManager mediaSourceManager, IMediaEncoder mediaEncoder, ILogger<VideoImageProvider> logger)
{
_mediaSourceManager = mediaSourceManager;
_mediaEncoder = mediaEncoder;
_logger = logger;
}
/// <inheritdoc />
public string Name => "Screen Grabber";
/// <inheritdoc />
// Make sure this comes after internet image providers
public int Order => 100;
/// <inheritdoc />
public IEnumerable<ImageType> GetSupportedImages(BaseItem item)
{
return new[] { ImageType.Primary };
}
/// <inheritdoc />
public Task<DynamicImageResponse> GetImage(BaseItem item, ImageType type, CancellationToken cancellationToken)
{
var video = (Video)item;
// No support for these
if (video.IsPlaceHolder || video.VideoType == VideoType.Dvd)
{
return Task.FromResult(new DynamicImageResponse { HasImage = false });
}
// Can't extract if we didn't find a video stream in the file
if (!video.DefaultVideoStreamIndex.HasValue)
{
_logger.LogInformation("Skipping image extraction due to missing DefaultVideoStreamIndex for {Path}.", video.Path ?? string.Empty);
return Task.FromResult(new DynamicImageResponse { HasImage = false });
}
return GetVideoImage(video, cancellationToken);
}
private async Task<DynamicImageResponse> GetVideoImage(Video item, CancellationToken cancellationToken)
{
MediaSourceInfo mediaSource = new MediaSourceInfo
{
VideoType = item.VideoType,
IsoType = item.IsoType,
Protocol = item.PathProtocol ?? MediaProtocol.File,
};
// If we know the duration, grab it from 10% into the video. Otherwise just 10 seconds in.
// Always use 10 seconds for dvd because our duration could be out of whack
var imageOffset = item.VideoType != VideoType.Dvd && item.RunTimeTicks > 0
? TimeSpan.FromTicks(item.RunTimeTicks.Value / 10)
: TimeSpan.FromSeconds(10);
var query = new MediaStreamQuery { ItemId = item.Id, Index = item.DefaultVideoStreamIndex };
var videoStream = _mediaSourceManager.GetMediaStreams(query).FirstOrDefault();
if (videoStream is null)
{
query.Type = MediaStreamType.Video;
query.Index = null;
videoStream = _mediaSourceManager.GetMediaStreams(query).FirstOrDefault();
}
if (videoStream is null)
{
_logger.LogInformation("Skipping image extraction: no video stream found for {Path}.", item.Path ?? string.Empty);
return new DynamicImageResponse { HasImage = false };
}
string extractedImagePath = await _mediaEncoder.ExtractVideoImage(item.Path, item.Container, mediaSource, videoStream, item.Video3DFormat, imageOffset, cancellationToken).ConfigureAwait(false);
return new DynamicImageResponse
{
Format = ImageFormat.Jpg,
HasImage = true,
Path = extractedImagePath,
Protocol = MediaProtocol.File
};
}
/// <inheritdoc />
public bool Supports(BaseItem item)
{
if (item.IsShortcut)
{
return false;
}
if (!item.IsFileProtocol)
{
return false;
}
return item is Video video && !video.IsPlaceHolder && video.IsCompleteMedia;
}
}
}
| VideoImageProvider |
csharp | ChilliCream__graphql-platform | src/StrawberryShake/CodeGeneration/test/CodeGeneration.CSharp.Tests/Integration/AnyScalarDefaultSerializationTest.Client.cs | {
"start": 17279,
"end": 18192
} | private sealed class ____ : System.IServiceProvider, System.IDisposable
{
private readonly System.IServiceProvider _provider;
public ClientServiceProvider(System.IServiceProvider provider)
{
_provider = provider;
}
public object? GetService(System.Type serviceType)
{
return _provider.GetService(serviceType);
}
public void Dispose()
{
if (_provider is System.IDisposable d)
{
d.Dispose();
}
}
}
}
}
namespace StrawberryShake.CodeGeneration.CSharp.Integration.AnyScalarDefaultSerialization
{
// StrawberryShake.CodeGeneration.CSharp.Generators.ResultTypeGenerator
[global::System.CodeDom.Compiler.GeneratedCode("StrawberryShake", "11.0.0")]
| ClientServiceProvider |
csharp | ChilliCream__graphql-platform | src/Nitro/CommandLine/src/CommandLine.Cloud/Generated/ApiClient.Client.cs | {
"start": 3313612,
"end": 3313892
} | public partial interface ____ : ISelectMockCommand_MockEdge
{
}
/// <summary>
/// An edge in a connection.
/// </summary>
[global::System.CodeDom.Compiler.GeneratedCode("StrawberryShake", "15.1.8.0")]
| ISelectMockSchemaPromptQuery_ApiById_MockSchemas_Edges |
csharp | dotnet__orleans | test/Extensions/TesterAzureUtils/Persistence/PersistenceGrainTests_AzureTableGrainStorage.cs | {
"start": 3292,
"end": 4784
} | private class ____ : ISiloConfigurator
{
public void Configure(ISiloBuilder hostBuilder)
{
hostBuilder
.AddAzureTableGrainStorage("GrainStorageForTest", builder => builder.Configure<IOptions<ClusterOptions>>((options, silo) =>
{
options.ConfigureTestDefaults();
options.DeleteStateOnClear = true;
}))
.AddAzureTableGrainStorage("AzureStore1", builder => builder.Configure<IOptions<ClusterOptions>>((options, silo) =>
{
options.ConfigureTestDefaults();
}))
.AddAzureTableGrainStorage("AzureStore2", builder => builder.Configure<IOptions<ClusterOptions>>((options, silo) =>
{
options.ConfigureTestDefaults();
}))
.AddAzureTableGrainStorage("AzureStore3", builder => builder.Configure<IOptions<ClusterOptions>>((options, silo) =>
{
options.ConfigureTestDefaults();
}))
.AddMemoryGrainStorage("MemoryStore");
}
}
}
public PersistenceGrainTests_AzureTableGrainStorage_DeleteStateOnClear(ITestOutputHelper output, Fixture fixture) :
base(output, fixture)
{
fixture.EnsurePreconditionsMet();
}
}
| MySiloBuilderConfigurator |
csharp | EventStore__EventStore | src/KurrentDB.Core.Tests/Services/Storage/RepeatableDbTestScenario.cs | {
"start": 747,
"end": 4056
} | public abstract class ____<TLogFormat, TStreamId> : SpecificationWithDirectoryPerTestFixture {
protected readonly int MaxEntriesInMemTable;
protected TableIndex<TStreamId> TableIndex;
protected IReadIndex<TStreamId> ReadIndex;
protected LogFormatAbstractor<TStreamId> _logFormat;
protected DbResult DbRes;
protected TFChunkDbCreationHelper<TLogFormat, TStreamId> DbCreationHelper;
private readonly int _metastreamMaxCount;
protected RepeatableDbTestScenario(int maxEntriesInMemTable = 20, int metastreamMaxCount = 1) {
Ensure.Positive(maxEntriesInMemTable, "maxEntriesInMemTable");
MaxEntriesInMemTable = maxEntriesInMemTable;
_metastreamMaxCount = metastreamMaxCount;
}
public async ValueTask CreateDb(Rec[] records, CancellationToken token = default) {
if (DbRes is not null) {
await DbRes.Db.DisposeAsync();
}
var indexDirectory = GetFilePathFor("index");
_logFormat = LogFormatHelper<TLogFormat, TStreamId>.LogFormatFactory.Create(new() {
IndexDirectory = indexDirectory,
});
var dbConfig = TFChunkHelper.CreateSizedDbConfig(PathName, 0, chunkSize: 1024 * 1024);
var dbHelper = await TFChunkDbCreationHelper<TLogFormat, TStreamId>.CreateAsync(dbConfig, _logFormat, token: token);
DbRes = await dbHelper.Chunk(records).CreateDb(token: token);
DbRes.Db.Config.WriterCheckpoint.Flush();
DbRes.Db.Config.ChaserCheckpoint.Write(DbRes.Db.Config.WriterCheckpoint.Read());
DbRes.Db.Config.ChaserCheckpoint.Flush();
var reader = new TFChunkReader(DbRes.Db, DbRes.Db.Config.WriterCheckpoint);
var lowHasher = _logFormat.LowHasher;
var highHasher = _logFormat.HighHasher;
var emptyStreamId = _logFormat.EmptyStreamId;
TableIndex = new TableIndex<TStreamId>(indexDirectory, lowHasher, highHasher, emptyStreamId,
() => new HashListMemTable(PTableVersions.IndexV3, MaxEntriesInMemTable * 2),
reader,
PTableVersions.IndexV3,
int.MaxValue,
MaxEntriesInMemTable);
_logFormat.StreamNamesProvider.SetTableIndex(TableIndex);
var readIndex = new ReadIndex<TStreamId>(new NoopPublisher(),
reader,
TableIndex,
_logFormat.StreamNameIndexConfirmer,
_logFormat.StreamIds,
_logFormat.StreamNamesProvider,
_logFormat.EmptyStreamId,
_logFormat.StreamIdValidator,
_logFormat.StreamIdSizer,
_logFormat.StreamExistenceFilter,
_logFormat.StreamExistenceFilterReader,
_logFormat.EventTypeIndexConfirmer,
new NoLRUCache<TStreamId, IndexBackend<TStreamId>.EventNumberCached>(),
new NoLRUCache<TStreamId, IndexBackend<TStreamId>.MetadataCached>(),
additionalCommitChecks: true,
metastreamMaxCount: _metastreamMaxCount,
hashCollisionReadLimit: Opts.HashCollisionReadLimitDefault,
skipIndexScanOnReads: Opts.SkipIndexScanOnReadsDefault,
replicationCheckpoint: DbRes.Db.Config.ReplicationCheckpoint,
indexCheckpoint: DbRes.Db.Config.IndexCheckpoint,
indexStatusTracker: new IndexStatusTracker.NoOp(),
indexTracker: new IndexTracker.NoOp(),
cacheTracker: new CacheHitsMissesTracker.NoOp());
await readIndex.IndexCommitter.Init(DbRes.Db.Config.ChaserCheckpoint.Read(), CancellationToken.None);
ReadIndex = readIndex;
}
public override async Task TestFixtureTearDown() {
_logFormat?.Dispose();
await DbRes.Db.DisposeAsync();
await base.TestFixtureTearDown();
}
}
| RepeatableDbTestScenario |
csharp | dotnet__efcore | src/EFCore/ValueGeneration/HiLoValueGeneratorState.cs | {
"start": 5519,
"end": 5914
} | private sealed class ____(long low, long high)
{
public long Low { get; } = low;
public long High { get; } = high;
public HiLoValue NextValue()
=> new(Low + 1, High);
}
/// <summary>
/// Releases the allocated resources for this instance.
/// </summary>
public virtual void Dispose()
=> _semaphoreSlim.Dispose();
}
| HiLoValue |
csharp | dotnet__aspnetcore | src/FileProviders/Embedded/src/Manifest/ManifestParser.cs | {
"start": 411,
"end": 5308
} | internal static class ____
{
private const string DefaultManifestName = "Microsoft.Extensions.FileProviders.Embedded.Manifest.xml";
public static EmbeddedFilesManifest Parse(Assembly assembly)
{
return Parse(assembly, DefaultManifestName);
}
public static EmbeddedFilesManifest Parse(Assembly assembly, string name)
{
ArgumentNullThrowHelper.ThrowIfNull(assembly);
ArgumentNullThrowHelper.ThrowIfNull(name);
var stream = assembly.GetManifestResourceStream(name);
if (stream == null)
{
throw new InvalidOperationException($"Could not load the embedded file manifest " +
$"'{name}' for assembly '{assembly.GetName().Name}'.");
}
var document = XDocument.Load(stream);
var manifest = EnsureElement(document, "Manifest");
var manifestVersion = EnsureElement(manifest, "ManifestVersion");
var version = EnsureText(manifestVersion);
if (!string.Equals("1.0", version, StringComparison.Ordinal))
{
throw new InvalidOperationException($"The embedded file manifest '{name}' for " +
$"assembly '{assembly.GetName().Name}' specifies an unsupported file format" +
$" version: '{version}'.");
}
var fileSystem = EnsureElement(manifest, "FileSystem");
var entries = fileSystem.Elements();
var entriesList = new List<ManifestEntry>();
foreach (var element in entries)
{
var entry = BuildEntry(element);
entriesList.Add(entry);
}
ValidateEntries(entriesList);
var rootDirectory = ManifestDirectory.CreateRootDirectory(entriesList.ToArray());
return new EmbeddedFilesManifest(rootDirectory);
}
private static void ValidateEntries(List<ManifestEntry> entriesList)
{
for (int i = 0; i < entriesList.Count - 1; i++)
{
for (int j = i + 1; j < entriesList.Count; j++)
{
if (string.Equals(entriesList[i].Name, entriesList[j].Name, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
"Found two entries with the same name but different casing:" +
$" '{entriesList[i].Name}' and '{entriesList[j]}'");
}
}
}
}
private static ManifestEntry BuildEntry(XElement element)
{
RuntimeHelpers.EnsureSufficientExecutionStack();
if (element.NodeType != XmlNodeType.Element)
{
throw new InvalidOperationException($"Invalid manifest format. Expected a 'File' or a 'Directory' node:" +
$" '{element}'");
}
if (string.Equals(element.Name.LocalName, "File", StringComparison.Ordinal))
{
var entryName = EnsureName(element);
var path = EnsureElement(element, "ResourcePath");
var pathValue = EnsureText(path);
return new ManifestFile(entryName, pathValue);
}
if (string.Equals(element.Name.LocalName, "Directory", StringComparison.Ordinal))
{
var directoryName = EnsureName(element);
var children = new List<ManifestEntry>();
foreach (var child in element.Elements())
{
children.Add(BuildEntry(child));
}
ValidateEntries(children);
return ManifestDirectory.CreateDirectory(directoryName, children.ToArray());
}
throw new InvalidOperationException($"Invalid manifest format.Expected a 'File' or a 'Directory' node. " +
$"Got '{element.Name.LocalName}' instead.");
}
private static XElement EnsureElement(XContainer container, string elementName)
{
var element = container.Element(elementName);
if (element == null)
{
throw new InvalidOperationException($"Invalid manifest format. Missing '{elementName}' element name");
}
return element;
}
private static string EnsureName(XElement element)
{
var value = element.Attribute("Name")?.Value;
if (value == null)
{
throw new InvalidOperationException($"Invalid manifest format. '{element.Name}' must contain a 'Name' attribute.");
}
return value;
}
private static string EnsureText(XElement element)
{
if (!element.Elements().Any() &&
!element.IsEmpty &&
element.Nodes().Count() == 1 &&
element.FirstNode?.NodeType == XmlNodeType.Text)
{
return element.Value;
}
throw new InvalidOperationException(
$"Invalid manifest format. '{element.Name.LocalName}' must contain " +
$"a text value. '{element.Value}'");
}
}
| ManifestParser |
csharp | MahApps__MahApps.Metro | src/MahApps.Metro/Controls/Helper/MultiSelectorHelper.cs | {
"start": 682,
"end": 3860
} | public static class ____
{
public static readonly DependencyProperty SelectedItemsProperty
= DependencyProperty.RegisterAttached(
"SelectedItems",
typeof(IList),
typeof(MultiSelectorHelper),
new FrameworkPropertyMetadata(null, OnSelectedItemsChanged));
/// <summary>
/// Handles disposal and creation of old and new bindings
/// </summary>
private static void OnSelectedItemsChanged(DependencyObject d, DependencyPropertyChangedEventArgs e)
{
if (d is not (ListBox or MultiSelector or MultiSelectionComboBox))
{
throw new ArgumentException("The property 'SelectedItems' may only be set on ListBox, MultiSelector or MultiSelectionComboBox elements.");
}
if (e.OldValue != e.NewValue)
{
GetSelectedItemBinding(d)?.UnBind();
if (e.NewValue is IList newList)
{
var multiSelectorBinding = new MultiSelectorBinding((Selector)d, newList);
SetSelectedItemBinding(d, multiSelectorBinding);
multiSelectorBinding.Bind();
}
else
{
SetSelectedItemBinding(d, null);
}
}
}
/// <summary>
/// Gets the selected items property binding
/// </summary>
[Category(AppName.MahApps)]
[AttachedPropertyBrowsableForType(typeof(ListBox))]
[AttachedPropertyBrowsableForType(typeof(MultiSelector))]
[AttachedPropertyBrowsableForType(typeof(MultiSelectionComboBox))]
public static IList? GetSelectedItems(DependencyObject element)
{
return (IList?)element.GetValue(SelectedItemsProperty);
}
/// <summary>
/// Sets the selected items property binding
/// </summary>
[Category(AppName.MahApps)]
[AttachedPropertyBrowsableForType(typeof(ListBox))]
[AttachedPropertyBrowsableForType(typeof(MultiSelector))]
[AttachedPropertyBrowsableForType(typeof(MultiSelectionComboBox))]
public static void SetSelectedItems(DependencyObject element, IList? value)
{
element.SetValue(SelectedItemsProperty, value);
}
private static readonly DependencyProperty SelectedItemBindingProperty
= DependencyProperty.RegisterAttached(
"SelectedItemBinding",
typeof(MultiSelectorBinding),
typeof(MultiSelectorHelper));
private static MultiSelectorBinding? GetSelectedItemBinding(DependencyObject element)
{
return (MultiSelectorBinding?)element.GetValue(SelectedItemBindingProperty);
}
private static void SetSelectedItemBinding(DependencyObject element, MultiSelectorBinding? value)
{
element.SetValue(SelectedItemBindingProperty, value);
}
/// <summary>
/// Defines a binding between <see cref="Selector"/> and collection.
/// </summary>
| MultiSelectorHelper |
csharp | Xabaril__AspNetCore.Diagnostics.HealthChecks | src/HealthChecks.Uris/DependencyInjection/UrisHealthCheckBuilderExtensions.cs | {
"start": 226,
"end": 14977
} | public static class ____
{
private const string NAME = "uri-group";
/// <summary>
/// Add a health check for single uri.
/// </summary>
/// <param name="builder">The <see cref="IHealthChecksBuilder"/>.</param>
/// <param name="uri">The uri to check.</param>
/// <param name="name">The health check name. Optional. If <c>null</c> the type name 'uri-group' will be used for the name.</param>
/// <param name="failureStatus">
/// The <see cref="HealthStatus"/> that should be reported when the health check fails. Optional. If <c>null</c> then
/// the default status of <see cref="HealthStatus.Unhealthy"/> will be reported.
/// </param>
/// <param name="tags">A list of tags that can be used to filter sets of health checks. Optional.</param>
/// <param name="timeout">An optional <see cref="TimeSpan"/> representing the timeout of the check.</param>
/// <param name="configureClient">An optional setup action to configure the Uris HealthCheck http client.</param>
/// <param name="configurePrimaryHttpMessageHandler">An optional setup action to configure the Uris HealthCheck http client message handler.</param>
/// <returns>The specified <paramref name="builder"/>.</returns>
public static IHealthChecksBuilder AddUrlGroup(
this IHealthChecksBuilder builder,
Uri uri,
string? name = default,
HealthStatus? failureStatus = default,
IEnumerable<string>? tags = default,
TimeSpan? timeout = default,
Action<IServiceProvider, HttpClient>? configureClient = null,
Func<IServiceProvider, HttpMessageHandler>? configurePrimaryHttpMessageHandler = null)
{
var registrationName = name ?? NAME;
ConfigureUrisClient(builder, configureClient, configurePrimaryHttpMessageHandler, registrationName);
return builder.Add(new HealthCheckRegistration(
registrationName,
sp =>
{
var options = new UriHealthCheckOptions()
.AddUri(uri);
return CreateHealthCheck(sp, registrationName, options);
},
failureStatus,
tags,
timeout));
}
/// <summary>
/// Add a health check for single uri.
/// </summary>
/// <param name="builder">The <see cref="IHealthChecksBuilder"/>.</param>
/// <param name="uri">The uri to check.</param>
/// <param name="httpMethod">The http method to use on check.</param>
/// <param name="name">The health check name. Optional. If <c>null</c> the type name 'uri-group' will be used for the name.</param>
/// <param name="failureStatus">
/// The <see cref="HealthStatus"/> that should be reported when the health check fails. Optional. If <c>null</c> then
/// the default status of <see cref="HealthStatus.Unhealthy"/> will be reported.
/// </param>
/// <param name="tags">A list of tags that can be used to filter sets of health checks. Optional.</param>
/// <param name="timeout">An optional <see cref="TimeSpan"/> representing the timeout of the check.</param>
/// <param name="configureClient">An optional setup action to configure the Uris HealthCheck http client.</param>
/// <param name="configurePrimaryHttpMessageHandler">An optional setup action to configure the Uris HealthCheck http client message handler.</param>
/// <returns>The specified <paramref name="builder"/>.</returns>
public static IHealthChecksBuilder AddUrlGroup(
this IHealthChecksBuilder builder,
Uri uri,
HttpMethod httpMethod,
string? name = default,
HealthStatus? failureStatus = default,
IEnumerable<string>? tags = default,
TimeSpan? timeout = default,
Action<IServiceProvider, HttpClient>? configureClient = null,
Func<IServiceProvider, HttpMessageHandler>? configurePrimaryHttpMessageHandler = null)
{
var registrationName = name ?? NAME;
ConfigureUrisClient(builder, configureClient, configurePrimaryHttpMessageHandler, registrationName);
return builder.Add(new HealthCheckRegistration(
registrationName,
sp =>
{
var options = new UriHealthCheckOptions()
.AddUri(uri)
.UseHttpMethod(httpMethod);
return CreateHealthCheck(sp, registrationName, options);
},
failureStatus,
tags,
timeout));
}
/// <summary>
/// Add a health check for multiple uri's.
/// </summary>
/// <param name="builder">The <see cref="IHealthChecksBuilder"/>.</param>
/// <param name="uris">The collection of uri's to be checked.</param>
/// <param name="name">The health check name. Optional. If <c>null</c> the type name 'uri-group' will be used for the name.</param>
/// <param name="failureStatus">
/// The <see cref="HealthStatus"/> that should be reported when the health check fails. Optional. If <c>null</c> then
/// the default status of <see cref="HealthStatus.Unhealthy"/> will be reported.
/// </param>
/// <param name="tags">A list of tags that can be used to filter sets of health checks. Optional.</param>
/// <param name="timeout">An optional <see cref="TimeSpan"/> representing the timeout of the check.</param>
/// <param name="configureClient">An optional setup action to configure the Uris HealthCheck http client.</param>
/// <param name="configurePrimaryHttpMessageHandler">An optional setup action to configure the Uris HealthCheck http client message handler.</param>
/// <returns>The specified <paramref name="builder"/>.</returns>
public static IHealthChecksBuilder AddUrlGroup(
this IHealthChecksBuilder builder, IEnumerable<Uri> uris,
string? name = default,
HealthStatus? failureStatus = default,
IEnumerable<string>? tags = default,
TimeSpan? timeout = default,
Action<IServiceProvider, HttpClient>? configureClient = null,
Func<IServiceProvider, HttpMessageHandler>? configurePrimaryHttpMessageHandler = null)
{
var registrationName = name ?? NAME;
ConfigureUrisClient(builder, configureClient, configurePrimaryHttpMessageHandler, registrationName);
return builder.Add(new HealthCheckRegistration(
registrationName,
sp => CreateHealthCheck(sp, registrationName, UriHealthCheckOptions.CreateFromUris(uris)),
failureStatus,
tags,
timeout));
}
/// <summary>
/// Add a health check for multiple uri's.
/// </summary>
/// <param name="builder">The <see cref="IHealthChecksBuilder"/>.</param>
/// <param name="uris">The collection of uri's to be checked.</param>
/// <param name="httpMethod">The http method to be used.</param>
/// <param name="name">The health check name. Optional. If <c>null</c> the type name 'uri-group' will be used for the name.</param>
/// <param name="failureStatus">
/// The <see cref="HealthStatus"/> that should be reported when the health check fails. Optional. If <c>null</c> then
/// the default status of <see cref="HealthStatus.Unhealthy"/> will be reported.
/// </param>
/// <param name="tags">A list of tags that can be used to filter sets of health checks. Optional.</param>
/// <param name="timeout">An optional <see cref="TimeSpan"/> representing the timeout of the check.</param>
/// <param name="configureClient">An optional setup action to configure the Uris HealthCheck http client.</param>
/// <param name="configurePrimaryHttpMessageHandler">An optional setup action to configure the Uris HealthCheck http client message handler.</param>
/// <returns>The specified <paramref name="builder"/>.</returns>
public static IHealthChecksBuilder AddUrlGroup(
this IHealthChecksBuilder builder,
IEnumerable<Uri> uris,
HttpMethod httpMethod,
string? name = default,
HealthStatus? failureStatus = default,
IEnumerable<string>? tags = default,
TimeSpan? timeout = default,
Action<IServiceProvider, HttpClient>? configureClient = null,
Func<IServiceProvider, HttpMessageHandler>? configurePrimaryHttpMessageHandler = null)
{
var registrationName = name ?? NAME;
ConfigureUrisClient(builder, configureClient, configurePrimaryHttpMessageHandler, registrationName);
return builder.Add(new HealthCheckRegistration(
registrationName,
sp =>
{
var options = UriHealthCheckOptions
.CreateFromUris(uris)
.UseHttpMethod(httpMethod);
return CreateHealthCheck(sp, registrationName, options);
},
failureStatus,
tags,
timeout));
}
/// <summary>
/// Add a health check for multiple uri's.
/// </summary>
/// <param name="builder">The <see cref="IHealthChecksBuilder"/>.</param>
/// <param name="uriOptions">The action used to configured uri values and specified http methods to be checked.</param>
/// <param name="name">The health check name. Optional. If <c>null</c> the type name 'uri-group' will be used for the name.</param>
/// <param name="failureStatus">
/// The <see cref="HealthStatus"/> that should be reported when the health check fails. Optional. If <c>null</c> then
/// the default status of <see cref="HealthStatus.Unhealthy"/> will be reported.
/// </param>
/// <param name="tags">A list of tags that can be used to filter sets of health checks. Optional.</param>
/// <param name="timeout">An optional <see cref="TimeSpan"/> representing the timeout of the check.</param>
/// <param name="configureClient">An optional setup action to configure the Uris HealthCheck http client.</param>
/// <param name="configurePrimaryHttpMessageHandler">An optional setup action to configure the Uris HealthCheck http client message handler.</param>
/// <returns>The specified <paramref name="builder"/>.</returns>
public static IHealthChecksBuilder AddUrlGroup(
this IHealthChecksBuilder builder,
Action<UriHealthCheckOptions>? uriOptions,
string? name = default,
HealthStatus? failureStatus = default,
IEnumerable<string>? tags = default,
TimeSpan? timeout = default,
Action<IServiceProvider, HttpClient>? configureClient = null,
Func<IServiceProvider, HttpMessageHandler>? configurePrimaryHttpMessageHandler = null)
{
var registrationName = name ?? NAME;
ConfigureUrisClient(builder, configureClient, configurePrimaryHttpMessageHandler, registrationName);
return builder.Add(new HealthCheckRegistration(
registrationName,
sp =>
{
var options = new UriHealthCheckOptions();
uriOptions?.Invoke(options);
return CreateHealthCheck(sp, registrationName, options);
},
failureStatus,
tags,
timeout));
}
/// <summary>
/// Add a health check for single uri.
/// </summary>
/// <param name="builder">The <see cref="IHealthChecksBuilder"/>.</param>
/// <param name="uriProvider">Factory for providing the uri to check.</param>
/// <param name="name">The health check name. Optional. If <c>null</c> the type name 'uri-group' will be used for the name.</param>
/// <param name="failureStatus"></param>
/// The <see cref="HealthStatus"/> that should be reported when the health check fails. Optional. If <c>null</c> then
/// the default status of <see cref="HealthStatus.Unhealthy"/> will be reported.
/// <param name="tags">A list of tags that can be used to filter sets of health checks. Optional.</param>
/// <param name="timeout">An optional <see cref="TimeSpan"/> representing the timeout of the check.</param>
/// <param name="configureClient">An optional setup action to configure the Uris HealthCheck http client.</param>
/// <param name="configurePrimaryHttpMessageHandler">An optional setup action to configure the Uris HealthCheck http client message handler.</param>
/// <returns>The specified <paramref name="builder"/>.</returns>
public static IHealthChecksBuilder AddUrlGroup(
this IHealthChecksBuilder builder,
Func<IServiceProvider, Uri> uriProvider,
string? name = null,
HealthStatus? failureStatus = null,
IEnumerable<string>? tags = null,
TimeSpan? timeout = null,
Action<IServiceProvider, HttpClient>? configureClient = null,
Func<IServiceProvider, HttpMessageHandler>? configurePrimaryHttpMessageHandler = null)
{
var registrationName = name ?? NAME;
ConfigureUrisClient(builder, configureClient, configurePrimaryHttpMessageHandler, registrationName);
return builder.Add(
new HealthCheckRegistration(
registrationName,
sp =>
{
var uri = uriProvider(sp);
var uriHealthCheckOptions = new UriHealthCheckOptions().AddUri(uri, null);
var httpClientFactory = sp.GetRequiredService<IHttpClientFactory>();
return new UriHealthCheck(
uriHealthCheckOptions,
() => httpClientFactory.CreateClient(registrationName));
},
failureStatus,
tags,
timeout));
}
private static UriHealthCheck CreateHealthCheck(IServiceProvider sp, string name, UriHealthCheckOptions options)
{
var httpClientFactory = sp.GetRequiredService<IHttpClientFactory>();
return new UriHealthCheck(options, () => httpClientFactory.CreateClient(name));
}
private static readonly Action<IServiceProvider, HttpClient> _emptyHttpClientCallback = (_, _) => { };
private static readonly Func<IServiceProvider, HttpMessageHandler> _defaultHttpMessageHandlerCallback = _ => new HttpClientHandler();
private static void ConfigureUrisClient(
IHealthChecksBuilder builder,
Action<IServiceProvider, HttpClient>? configureHttpclient,
Func<IServiceProvider, HttpMessageHandler>? configurePrimaryHttpMessageHandler,
string registrationName)
{
builder.Services.AddHttpClient(registrationName)
.ConfigureHttpClient(configureHttpclient ?? _emptyHttpClientCallback)
.ConfigurePrimaryHttpMessageHandler(configurePrimaryHttpMessageHandler ?? _defaultHttpMessageHandlerCallback);
}
}
| UrisHealthCheckBuilderExtensions |
csharp | OrchardCMS__OrchardCore | src/OrchardCore/OrchardCore.XmlRpc.Abstractions/Services/IXmlRpcWriter.cs | {
"start": 724,
"end": 1119
} | struct ____ XML.
/// </summary>
/// <param name="rpcStruct">The rpc struct.</param>
/// <returns>The XML element.</returns>
XElement MapStruct(XRpcStruct rpcStruct);
/// <summary>
/// Maps a rpc array to XML.
/// </summary>
/// <param name="rpcArray">The rpc array.</param>
/// <returns>The XML element.</returns>
XElement MapArray(XRpcArray rpcArray);
}
| to |
csharp | dotnet__aspnetcore | src/Http/Routing/test/UnitTests/DefaultInlineConstraintResolverTest.cs | {
"start": 362,
"end": 11821
} | public class ____
{
private readonly IInlineConstraintResolver _constraintResolver;
public DefaultInlineConstraintResolverTest()
{
var routeOptions = new RouteOptions();
routeOptions.SetParameterPolicy<RegexInlineRouteConstraint>("regex");
_constraintResolver = GetInlineConstraintResolver(routeOptions);
}
[Fact]
public void ResolveConstraint_RequiredConstraint_ResolvesCorrectly()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("required");
// Assert
Assert.IsType<RequiredRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_IntConstraint_ResolvesCorrectly()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("int");
// Assert
Assert.IsType<IntRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_IntConstraintWithArgument_Throws()
{
// Arrange, Act & Assert
var ex = Assert.Throws<RouteCreationException>(
() => _constraintResolver.ResolveConstraint("int(5)"));
Assert.Equal("Could not find a constructor for constraint type 'IntRouteConstraint'" +
" with the following number of parameters: 1.",
ex.Message);
}
[Fact]
public void ResolveConstraint_AlphaConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("alpha");
// Assert
Assert.IsType<AlphaRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_RegexInlineConstraint_WithAComma_PassesAsASingleArgument()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("regex(ab,1)");
// Assert
Assert.IsType<RegexInlineRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_RegexInlineConstraint_WithCurlyBraces_Balanced()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint(
@"regex(\\b(?<month>\\d{1,2})/(?<day>\\d{1,2})/(?<year>\\d{2,4})\\b)");
// Assert
Assert.IsType<RegexInlineRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_BoolConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("bool");
// Assert
Assert.IsType<BoolRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_CompositeConstraintIsNotRegistered()
{
// Arrange, Act & Assert
Assert.Null(_constraintResolver.ResolveConstraint("composite"));
}
[Fact]
public void ResolveConstraint_DateTimeConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("datetime");
// Assert
Assert.IsType<DateTimeRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_DecimalConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("decimal");
// Assert
Assert.IsType<DecimalRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_DoubleConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("double");
// Assert
Assert.IsType<DoubleRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_FloatConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("float");
// Assert
Assert.IsType<FloatRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_GuidConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("guid");
// Assert
Assert.IsType<GuidRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_IntConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("int");
// Assert
Assert.IsType<IntRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_LengthConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("length(5)");
// Assert
Assert.IsType<LengthRouteConstraint>(constraint);
Assert.Equal(5, ((LengthRouteConstraint)constraint).MinLength);
Assert.Equal(5, ((LengthRouteConstraint)constraint).MaxLength);
}
[Fact]
public void ResolveConstraint_LengthRangeConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("length(5, 10)");
// Assert
var lengthConstraint = Assert.IsType<LengthRouteConstraint>(constraint);
Assert.Equal(5, lengthConstraint.MinLength);
Assert.Equal(10, lengthConstraint.MaxLength);
}
[Fact]
public void ResolveConstraint_LongRangeConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("long");
// Assert
Assert.IsType<LongRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_MaxConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("max(10)");
// Assert
Assert.IsType<MaxRouteConstraint>(constraint);
Assert.Equal(10, ((MaxRouteConstraint)constraint).Max);
}
[Fact]
public void ResolveConstraint_MaxLengthConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("maxlength(10)");
// Assert
Assert.IsType<MaxLengthRouteConstraint>(constraint);
Assert.Equal(10, ((MaxLengthRouteConstraint)constraint).MaxLength);
}
[Fact]
public void ResolveConstraint_MinConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("min(3)");
// Assert
Assert.IsType<MinRouteConstraint>(constraint);
Assert.Equal(3, ((MinRouteConstraint)constraint).Min);
}
[Fact]
public void ResolveConstraint_MinLengthConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("minlength(3)");
// Assert
Assert.IsType<MinLengthRouteConstraint>(constraint);
Assert.Equal(3, ((MinLengthRouteConstraint)constraint).MinLength);
}
[Fact]
public void ResolveConstraint_RangeConstraint()
{
// Arrange & Act
var constraint = _constraintResolver.ResolveConstraint("range(5, 10)");
// Assert
Assert.IsType<RangeRouteConstraint>(constraint);
var rangeConstraint = (RangeRouteConstraint)constraint;
Assert.Equal(5, rangeConstraint.Min);
Assert.Equal(10, rangeConstraint.Max);
}
[Fact]
public void ResolveConstraint_SupportsCustomConstraints()
{
// Arrange
var routeOptions = new RouteOptions();
routeOptions.ConstraintMap.Add("custom", typeof(CustomRouteConstraint));
var resolver = GetInlineConstraintResolver(routeOptions);
// Act
var constraint = resolver.ResolveConstraint("custom(argument)");
// Assert
Assert.IsType<CustomRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_SupportsCustomConstraintsUsingNonGenericOverload()
{
// Arrange
var routeOptions = new RouteOptions();
routeOptions.SetParameterPolicy("custom", typeof(CustomRouteConstraint));
var resolver = GetInlineConstraintResolver(routeOptions);
// Act
var constraint = resolver.ResolveConstraint("custom(argument)");
// Assert
Assert.IsType<CustomRouteConstraint>(constraint);
}
[Fact]
public void SetParameterPolicyThrowsIfTypeIsNotIParameterPolicy()
{
// Arrange
var routeOptions = new RouteOptions();
var ex = Assert.Throws<InvalidOperationException>(() => routeOptions.SetParameterPolicy("custom", typeof(string)));
Assert.Equal("System.String must implement Microsoft.AspNetCore.Routing.IParameterPolicy", ex.Message);
}
[Fact]
public void ResolveConstraint_SupportsCustomConstraintsUsingGenericOverloads()
{
// Arrange
var routeOptions = new RouteOptions();
routeOptions.SetParameterPolicy<CustomRouteConstraint>("custom");
var resolver = GetInlineConstraintResolver(routeOptions);
// Act
var constraint = resolver.ResolveConstraint("custom(argument)");
// Assert
Assert.IsType<CustomRouteConstraint>(constraint);
}
[Fact]
public void ResolveConstraint_CustomConstraintThatDoesNotImplementIRouteConstraint_Throws()
{
// Arrange
var routeOptions = new RouteOptions();
routeOptions.ConstraintMap.Add("custom", typeof(string));
var resolver = GetInlineConstraintResolver(routeOptions);
// Act & Assert
var ex = Assert.Throws<RouteCreationException>(() => resolver.ResolveConstraint("custom"));
Assert.Equal("The constraint type 'System.String' which is mapped to constraint key 'custom'" +
" must implement the 'IRouteConstraint' interface.",
ex.Message);
}
[Fact]
public void ResolveConstraint_AmbiguousConstructors_Throws()
{
// Arrange
var routeOptions = new RouteOptions();
routeOptions.ConstraintMap.Add("custom", typeof(MultiConstructorRouteConstraint));
var resolver = GetInlineConstraintResolver(routeOptions);
// Act & Assert
var ex = Assert.Throws<RouteCreationException>(() => resolver.ResolveConstraint("custom(5,6)"));
Assert.Equal("The constructor to use for activating the constraint type 'MultiConstructorRouteConstraint' is ambiguous." +
" Multiple constructors were found with the following number of parameters: 2.",
ex.Message);
}
// These are cases which parsing does not catch and we'll end up here
[Theory]
[InlineData("regex(abc")]
[InlineData("int/")]
[InlineData("in{t")]
public void ResolveConstraint_Invalid_Throws(string constraint)
{
// Arrange
var routeOptions = new RouteOptions();
var resolver = GetInlineConstraintResolver(routeOptions);
// Act & Assert
Assert.Null(resolver.ResolveConstraint(constraint));
}
[Fact]
public void ResolveConstraint_NoMatchingConstructor_Throws()
{
// Arrange
// Act & Assert
var ex = Assert.Throws<RouteCreationException>(() => _constraintResolver.ResolveConstraint("int(5,6)"));
Assert.Equal("Could not find a constructor for constraint type 'IntRouteConstraint'" +
" with the following number of parameters: 2.",
ex.Message);
}
private IInlineConstraintResolver GetInlineConstraintResolver(RouteOptions routeOptions)
{
var optionsAccessor = new Mock<IOptions<RouteOptions>>();
optionsAccessor.SetupGet(o => o.Value).Returns(routeOptions);
return new DefaultInlineConstraintResolver(optionsAccessor.Object, new TestServiceProvider());
}
| DefaultInlineConstraintResolverTest |
csharp | EventStore__EventStore | src/KurrentDB.Core.Tests/ClientAPI/create_persistent_subscription.cs | {
"start": 8298,
"end": 8962
} | public class ____ : SpecificationWithMiniNode
{
private PersistentSubscriptionCreateResult _result;
private readonly PersistentSubscriptionSettings _settings = PersistentSubscriptionSettingsBuilder.Create()
.DoNotResolveLinkTos()
.StartFromCurrent();
protected override void When()
{
_result = _conn.CreatePersistentSubscriptionForAllAsync("group", _settings, DefaultData.AdminCredentials).Result;
}
[Test]
public void the_completion_succeeds()
{
Assert.AreEqual(PersistentSubscriptionCreateStatus.Success, _result.Status);
}
}
[TestFixture, Category("LongRunning")]
| create_persistent_subscription_on_all |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.