language stringclasses 1 value | repo stringclasses 133 values | path stringlengths 13 229 | class_span dict | source stringlengths 14 2.92M | target stringlengths 1 153 |
|---|---|---|---|---|---|
csharp | ChilliCream__graphql-platform | src/Nitro/CommandLine/src/CommandLine.Cloud/Generated/ApiClient.Client.cs | {
"start": 3419804,
"end": 3422981
} | public partial class ____ : global::StrawberryShake.Serialization.IInputObjectFormatter
{
private global::StrawberryShake.Serialization.IInputValueFormatter _iDFormatter = default !;
private global::StrawberryShake.Serialization.IInputValueFormatter _stageUpdateInputFormatter = default !;
public global::System.String TypeName => "UpdateStagesInput";
public void Initialize(global::StrawberryShake.Serialization.ISerializerResolver serializerResolver)
{
_iDFormatter = serializerResolver.GetInputValueFormatter("ID");
_stageUpdateInputFormatter = serializerResolver.GetInputValueFormatter("StageUpdateInput");
}
public global::System.Object? Format(global::System.Object? runtimeValue)
{
if (runtimeValue is null)
{
return null;
}
var input = runtimeValue as global::ChilliCream.Nitro.CommandLine.Cloud.Client.UpdateStagesInput;
var inputInfo = runtimeValue as global::ChilliCream.Nitro.CommandLine.Cloud.Client.State.IUpdateStagesInputInfo;
if (input is null || inputInfo is null)
{
throw new global::System.ArgumentException(nameof(runtimeValue));
}
var fields = new global::System.Collections.Generic.List<global::System.Collections.Generic.KeyValuePair<global::System.String, global::System.Object?>>();
if (inputInfo.IsApiIdSet)
{
fields.Add(new global::System.Collections.Generic.KeyValuePair<global::System.String, global::System.Object?>("apiId", FormatApiId(input.ApiId)));
}
if (inputInfo.IsUpdatedStagesSet)
{
fields.Add(new global::System.Collections.Generic.KeyValuePair<global::System.String, global::System.Object?>("updatedStages", FormatUpdatedStages(input.UpdatedStages)));
}
return fields;
}
private global::System.Object? FormatApiId(global::System.String input)
{
if (input is null)
{
throw new global::System.ArgumentNullException(nameof(input));
}
return _iDFormatter.Format(input);
}
private global::System.Object? FormatUpdatedStages(global::System.Collections.Generic.IReadOnlyList<global::ChilliCream.Nitro.CommandLine.Cloud.Client.StageUpdateInput> input)
{
if (input is null)
{
throw new global::System.ArgumentNullException(nameof(input));
}
var input_list = new global::System.Collections.Generic.List<global::System.Object?>();
foreach (var input_elm in input)
{
if (input_elm is null)
{
throw new global::System.ArgumentNullException(nameof(input_elm));
}
input_list.Add(_stageUpdateInputFormatter.Format(input_elm));
}
return input_list;
}
}
[global::System.CodeDom.Compiler.GeneratedCode("StrawberryShake", "15.1.8.0")]
| UpdateStagesInputInputValueFormatter |
csharp | cake-build__cake | src/Cake.Common/Tools/NUnit/NUnitSettings.cs | {
"start": 394,
"end": 6426
} | public sealed class ____ : ToolSettings
{
/// <summary>
/// Initializes a new instance of the <see cref="NUnitSettings" /> class.
/// </summary>
public NUnitSettings()
{
ShadowCopy = true;
}
/// <summary>
/// Gets or sets the name of the XML result file.
/// </summary>
/// <value>
/// The name of the XML result file. Defaults to <c>TestResult.xml</c>.
/// </value>
public FilePath ResultsFile { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to generate the XML result file.
/// </summary>
/// <value>
/// <c>true</c> if the XML result file should be generated; otherwise, <c>false</c>.
/// </value>
public bool NoResults { get; set; }
/// <summary>
/// Gets or sets the version of the runtime to be used when executing tests.
/// </summary>
/// <value>
/// The version of the runtime to be used when executing tests.
/// </value>
public string Framework { get; set; }
/// <summary>
/// Gets or sets the categories to include in a run.
/// </summary>
/// <value>The categories to include in a run.</value>
public string Include { get; set; }
/// <summary>
/// Gets or sets the categories to exclude from a run.
/// </summary>
/// <value>
/// The categories to exclude from a run.
/// </value>
public string Exclude { get; set; }
/// <summary>
/// Gets or sets the default timeout to be used for test cases in this run.
/// If any test exceeds the timeout value, it is cancelled and reported as an error.
/// </summary>
/// <value>The default timeout to be used for test cases in this run.</value>
public int? Timeout { get; set; }
/// <summary>
/// Gets or sets a value indicating whether tests should be run as a shadow copy.
/// Default value is <c>true</c>.
/// </summary>
/// <value>
/// <c>true</c> if tests should be run as a shadow copy; otherwise, <c>false</c>.
/// </value>
public bool ShadowCopy { get; set; }
/// <summary>
/// Gets or sets a value indicating whether the main thread should be used for running tests.
/// </summary>
/// <value>
/// <c>true</c> if the main thread should be used for running tests; otherwise, <c>false</c>.
/// </value>
public bool NoThread { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to show copyright information at the start of the program.
/// </summary>
/// <value>
/// <c>true</c> if to show copyright information at the start of the program; otherwise, <c>false</c>.
/// </value>
public bool NoLogo { get; set; }
/// <summary>
/// Gets or sets a value indicating whether execution of the test run should terminate
/// immediately on the first test failure or error.
/// </summary>
/// <value>
/// <c>true</c> if execution of the test run should terminate immediately on the first test failure or error;
/// otherwise, <c>false</c>.
/// </value>
public bool StopOnError { get; set; }
/// <summary>
/// Gets or sets the amount of information that NUnit should write to its internal trace log.
/// </summary>
/// <value>The amount of information that NUnit should write to its internal trace log.</value>
public string Trace { get; set; }
/// <summary>
/// Gets or sets the location that NUnit should write test output.
/// </summary>
/// <value>The location that NUnit should write test output.</value>
public FilePath OutputFile { get; set; }
/// <summary>
/// Gets or sets the location that NUnit should write test error output.
/// </summary>
/// <value>The location that NUnit should write test error output.</value>
public FilePath ErrorOutputFile { get; set; }
/// <summary>
/// Gets or sets a value indicating how NUnit should load tests in processes.
/// The Default value is <see cref="NUnitProcessOption.Single"/>.
/// </summary>
public NUnitProcessOption Process { get; set; }
/// <summary>
/// Gets or sets a value indicating whether Single Threaded Apartment state (STA) will be used.
/// Corresponds to the /apartment command line option.
/// </summary>
public bool UseSingleThreadedApartment { get; set; }
/// <summary>
/// Gets or sets a value to control creation of AppDomains for running tests.
/// Corresponds to the /domain command line switch.
/// The default is to use multiple domains if multiple assemblies are listed on the command line.
/// Otherwise a single domain is used.
/// </summary>
public NUnitAppDomainUsage AppDomainUsage { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to run tests in an x86 process on 64 bit systems.
/// </summary>
/// <value>
/// <c>true</c> to run tests in an x86 process on 64 bit systems; otherwise, <c>false</c>.
/// </value>
public bool X86 { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to cause an identifying label to be displayed at the start of each test case.
/// </summary>
/// <value>
/// <c>true</c> to cause an identifying label to be displayed at the start of each test case; otherwise, <c>false</c>.
/// </value>
public bool Labels { get; set; }
}
} | NUnitSettings |
csharp | dotnet__orleans | src/Orleans.Journaling/StateMachineManager.cs | {
"start": 22436,
"end": 23315
} | private sealed class ____(
StateMachineManager manager,
IFieldCodec<string> keyCodec,
IFieldCodec<ulong> valueCodec,
SerializerSessionPool serializerSessionPool) : DurableDictionary<string, ulong>(keyCodec, valueCodec, serializerSessionPool)
{
public const int Id = 0;
private readonly StateMachineManager _manager = manager;
public void ResetVolatileState() => ((IDurableStateMachine)this).Reset(new StateMachineLogWriter(_manager, new(Id)));
protected override void OnSet(string key, ulong value) => _manager.OnSetStateMachineId(key, value);
}
/// <summary>
/// Used to track state machines that are not registered via user-code anymore, until time-based purging has elapsed.
/// </summary>
/// <remarks>Resurrecting of retired machines is supported.</remarks>
| StateMachineManagerState |
csharp | icsharpcode__ILSpy | ICSharpCode.Decompiler/TypeSystem/IType.cs | {
"start": 6660,
"end": 7087
} | class ____ have the value copied from the outer type
/// if it is a parameterized type. Otherwise, those existing type parameters will be self-parameterized,
/// and thus 'leaked' to the caller in the same way the GetMembers() method does not specialize members
/// from an <see cref="ITypeDefinition"/> and 'leaks' type parameters in member signatures.
/// </para>
/// </remarks>
/// <example>
/// <code>
/// | will |
csharp | ChilliCream__graphql-platform | .build/Build.GitHub.cs | {
"start": 34,
"end": 1104
} | partial class ____
{
[Parameter] readonly string GitHubToken;
[Parameter] readonly string CodeCovToken;
/// <summary>
/// ChilliCream/hotchocolate
/// </summary>
[Parameter] readonly string GitHubRepository = Environment.GetEnvironmentVariable("GITHUB_REPOSITORY");
/// <summary>
/// Unique identifier of your PR. Must correspond to the key of the PR in GitHub.
/// E.G.: sonar.pullrequest.key=5
/// </summary>
[Parameter] readonly string GitHubPRNumber = Environment.GetEnvironmentVariable("HC_GITHUB_PR_NR");
/// <summary>
/// The name of your PR
/// Ex: sonar.pullrequest.branch=feature/my-new-feature
/// </summary>
[Parameter] readonly string GitHubHeadRef = Environment.GetEnvironmentVariable("HC_GITHUB_HEAD_REF");
/// <summary>
/// The long-lived branch into which the PR will be merged.
/// Default: master
/// E.G.: sonar.pullrequest.base=master
/// </summary>
[Parameter] readonly string GitHubBaseRef = Environment.GetEnvironmentVariable("HC_GITHUB_BASE_REF");
}
| Build |
csharp | getsentry__sentry-dotnet | benchmarks/Sentry.Benchmarks/Extensions.Logging/SentryStructuredLoggerBenchmarks.cs | {
"start": 2057,
"end": 2607
} | private sealed class ____
{
public LogRecord(LogLevel logLevel, EventId eventId, Exception? exception, string? message, params object?[] args)
{
LogLevel = logLevel;
EventId = eventId;
Exception = exception;
Message = message;
Args = args;
}
public LogLevel LogLevel { get; }
public EventId EventId { get; }
public Exception? Exception { get; }
public string? Message { get; }
public object?[] Args { get; }
}
}
| LogRecord |
csharp | AvaloniaUI__Avalonia | src/Avalonia.Base/Rendering/Composition/Server/ServerCompositorAnimations.cs | {
"start": 85,
"end": 1217
} | internal class ____
{
private readonly HashSet<IServerClockItem> _clockItems = new();
private readonly List<IServerClockItem> _clockItemsToUpdate = new();
private readonly HashSet<ServerObjectAnimations> _dirtyAnimatedObjects = new();
private readonly Queue<ServerObjectAnimations> _dirtyAnimatedObjectQueue = new();
public void AddToClock(IServerClockItem item) =>
_clockItems.Add(item);
public void RemoveFromClock(IServerClockItem item) =>
_clockItems.Remove(item);
public void Process()
{
foreach (var animation in _clockItems)
_clockItemsToUpdate.Add(animation);
foreach (var animation in _clockItemsToUpdate)
animation.OnTick();
_clockItemsToUpdate.Clear();
while (_dirtyAnimatedObjectQueue.Count > 0)
_dirtyAnimatedObjectQueue.Dequeue().EvaluateAnimations();
_dirtyAnimatedObjects.Clear();
}
public void AddDirtyAnimatedObject(ServerObjectAnimations obj)
{
if (_dirtyAnimatedObjects.Add(obj))
_dirtyAnimatedObjectQueue.Enqueue(obj);
}
} | ServerCompositorAnimations |
csharp | protobuf-net__protobuf-net | src/protobuf-net/Meta/MetaType.cs | {
"start": 82134,
"end": 96817
} | private enum ____ : ushort
{
None = 0,
Pending = 1,
// EnumPassThru = 2,
Frozen = 4,
PrivateOnApi = 8,
SkipConstructor = 16,
#if FEAT_DYNAMIC_REF
AsReferenceDefault = 32,
#endif
AutoTuple = 64,
IgnoreListHandling = 128,
IsGroup = 256,
IgnoreUnknownSubTypes = 512,
}
private volatile TypeOptions flags;
private bool HasFlag(TypeOptions flag) { return (flags & flag) == flag; }
private void SetFlag(TypeOptions flag, bool value, bool throwIfFrozen)
{
if (throwIfFrozen && HasFlag(flag) != value)
{
ThrowIfFrozen();
}
if (value)
flags |= flag;
else
flags &= ~flag;
}
private Type _serializerType;
/// <summary>
/// Specify a custom serializer for this type
/// </summary>
public Type SerializerType
{
get => _serializerType;
set
{
if (value != _serializerType)
{
if (!value.IsClass)
ThrowHelper.ThrowArgumentException("Custom serializer providers must be classes", nameof(SerializerType));
ThrowIfFrozen();
_serializerType = value;
}
}
}
internal static MetaType GetRootType(MetaType source)
{
while (source._serializer is object)
{
MetaType tmp = source.baseType;
if (tmp is null) return source;
source = tmp; // else loop until we reach something that isn't generated, or is the root
}
// now we get into uncertain territory
RuntimeTypeModel model = source.model;
int opaqueToken = 0;
try
{
model.TakeLock(ref opaqueToken);
MetaType tmp;
while ((tmp = source.baseType) is not null) source = tmp;
return source;
}
finally
{
model.ReleaseLock(opaqueToken);
}
}
internal bool IsPrepared()
{
return _serializer is CompiledSerializer;
}
internal static StringBuilder NewLine(StringBuilder builder, int indent)
{
return builder.AppendLine().Append(' ', indent * 3);
}
internal bool IsAutoTuple => HasFlag(TypeOptions.AutoTuple);
/// <summary>
/// Indicates whether this type should always be treated as a "group" (rather than a string-prefixed sub-message)
/// </summary>
public bool IsGroup
{
get { return HasFlag(TypeOptions.IsGroup); }
set { SetFlag(TypeOptions.IsGroup, value, true); }
}
internal void WriteSchema(HashSet<Type> callstack, StringBuilder builder, int indent, HashSet<string> imports, ProtoSyntax syntax,
string package, SchemaGenerationFlags flags)
{
if (surrogateType is not null) return; // nothing to write
bool multipleNamespaceSupport = (flags & SchemaGenerationFlags.MultipleNamespaceSupport) != 0;
bool isEnumNamePrefixSupported = (flags & SchemaGenerationFlags.IncludeEnumNamePrefix) != 0;
var repeated = model.TryGetRepeatedProvider(Type);
if (repeated is not null)
{
NewLine(builder, indent).Append("message ").Append(GetSchemaTypeName(callstack)).Append(" {");
if (repeated.IsValidProtobufMap(model, CompatibilityLevel, DataFormat.Default))
{
repeated.ResolveMapTypes(out var key, out var value);
NewLine(builder, indent + 1).Append("map<")
.Append(model.GetSchemaTypeName(callstack, key, DataFormat.Default, CompatibilityLevel, false, false, imports))
.Append(", ")
.Append(model.GetSchemaTypeName(callstack, value, DataFormat.Default, CompatibilityLevel, false, false, imports))
.Append("> items = 1;");
}
else
{
NewLine(builder, indent + 1).Append("repeated ")
.Append(model.GetSchemaTypeName(callstack, repeated.ItemType, DataFormat.Default, CompatibilityLevel, false, false, imports))
.Append(" items = 1;");
}
NewLine(builder, indent).Append('}');
}
else if (IsAutoTuple)
{ // key-value-pair etc
if (ResolveTupleConstructor(Type, out MemberInfo[] mapping) is not null)
{
NewLine(builder, indent).Append("message ").Append(GetSchemaTypeName(callstack)).Append(" {");
AddNamespace(imports);
for (int i = 0; i < mapping.Length; i++)
{
Type effectiveType;
if (mapping[i] is PropertyInfo property)
{
effectiveType = property.PropertyType;
}
else if (mapping[i] is FieldInfo field)
{
effectiveType = field.FieldType;
}
else
{
throw new NotSupportedException("Unknown member type: " + mapping[i].GetType().Name);
}
NewLine(builder, indent + 1).Append(syntax == ProtoSyntax.Proto2 ? "optional " : "")
.Append(model.GetSchemaTypeName(callstack, effectiveType, DataFormat.Default, CompatibilityLevel, false, false, imports))
.Append(' ').Append(mapping[i].Name).Append(" = ").Append(i + 1).Append(';');
}
NewLine(builder, indent).Append('}');
}
}
else if (Type.IsEnum)
{
var enums = GetEnumValues();
string enumNamePrefix = isEnumNamePrefixSupported ? $"{GetSchemaTypeName(callstack)}_" : "";
bool allValid = IsValidEnum(enums);
if (!allValid) NewLine(builder, indent).Append("/* for context only");
NewLine(builder, indent).Append("enum ").Append(GetSchemaTypeName(callstack)).Append(" {");
AddNamespace(imports);
if (Type.IsDefined(typeof(FlagsAttribute), true))
{
NewLine(builder, indent + 1).Append("// this is a composite/flags enumeration");
}
bool needsAlias = false; // check whether we need to allow duplicate names
var uniqueFields = new HashSet<int>();
foreach (var field in enums)
{
var parsed = field.TryGetInt32();
if (parsed.HasValue && !uniqueFields.Add(parsed.Value))
{
needsAlias = true;
break;
}
}
if (needsAlias)
{ // duplicated value requires allow_alias
NewLine(builder, indent + 1).Append("option allow_alias = true;");
}
bool haveWrittenZero = false;
// write zero values **first**
foreach (var member in enums)
{
var parsed = member.TryGetInt32();
if (parsed.HasValue && parsed.Value == 0)
{
NewLine(builder, indent + 1).Append(enumNamePrefix).Append(member.Name).Append(" = 0;");
haveWrittenZero = true;
}
}
if (syntax == ProtoSyntax.Proto3 && !haveWrittenZero)
{
NewLine(builder, indent + 1).Append(enumNamePrefix).Append("ZERO").Append(" = 0;")
.Append(" // proto3 requires a zero value as the first item (it can be named anything)");
}
// note array is already sorted, so zero would already be first
foreach (var member in enums)
{
var parsed = member.TryGetInt32();
if (parsed.HasValue)
{
if (parsed.Value == 0) continue;
NewLine(builder, indent + 1).Append(enumNamePrefix).Append(member.Name).Append(" = ").Append(parsed.Value).Append(';');
}
else
{
NewLine(builder, indent + 1).Append("// ").Append(enumNamePrefix).Append(member.Name)
.Append(" = ").Append(member.Value).Append(';').Append(" // note: enums should be valid 32-bit integers");
}
}
if (HasReservations) AppendReservations();
NewLine(builder, indent).Append('}');
if (!allValid) NewLine(builder, indent).Append("*/");
}
else
{
var extraLayeredMembers = new ExtraLayerValueMembers();
ValueMember[] fieldsArr = GetFields();
int beforeMessagePosition = builder.Length;
NewLine(builder, indent).Append("message ").Append(GetSchemaTypeName(callstack)).Append(" {");
AddNamespace(imports);
foreach (ValueMember member in fieldsArr)
{
string schemaTypeName;
string altName;
bool hasOption = false;
if (member.IsMap)
{
if (member.NullWrappedCollection || member.NullWrappedValue)
{
throw new NotSupportedException("Schema generation for null-wrapped maps and maps with null-wrapped values is not currently implemented; poke @mgravell with a big stick if you need this!");
}
repeated = model.TryGetRepeatedProvider(member.MemberType);
repeated.ResolveMapTypes(out var keyType, out var valueType);
var keyTypeName = model.GetSchemaTypeName(callstack, keyType, member.MapKeyFormat, CompatibilityLevel, false, false, imports);
schemaTypeName = model.GetSchemaTypeName(callstack, valueType, member.MapValueFormat, CompatibilityLevel, member.AsReference, member.DynamicType, imports);
NewLine(builder, indent + 1).Append("map<").Append(keyTypeName).Append(',').Append(schemaTypeName).Append("> ")
.Append(member.Name).Append(" = ").Append(member.FieldNumber).Append(';');
}
else
{
if (member.RequiresExtraLayerInSchema())
{
schemaTypeName = member.GetSchemaTypeName(callstack, true, imports, out altName);
var nullWrappedValueMemberData = extraLayeredMembers.Add(schemaTypeName, member);
WriteValueMember(
schemaModelTypeName: nullWrappedValueMemberData.WrappedSchemaTypeName,
hasGroupModifier: nullWrappedValueMemberData.HasGroupModifier);
}
else
{
var considerWrappersProtoTypes = member.HasExtendedNullSupport();
schemaTypeName = member.GetSchemaTypeName(callstack, true, imports, out altName, considerWrappersProtoTypes);
WriteValueMember(schemaTypeName, hasGroupModifier: member.RequiresGroupModifier);
}
void WriteValueMember(string schemaModelTypeName, bool hasGroupModifier = false)
{
if (member.NullWrappedCollection)
{
throw new NotSupportedException("Schema generation for null-wrapped collections is not currently implemented; poke @mgravell with a big stick if you need this!");
}
string ordinality = member.ItemType is not null ? "repeated " : (syntax == ProtoSyntax.Proto2 ? (member.IsRequired ? "required " : "optional ") : "");
NewLine(builder, indent + 1).Append(ordinality);
if (hasGroupModifier) builder.Append("group ");
else if (member.DataFormat == DataFormat.Group) builder.Append("group ");
builder.Append(schemaModelTypeName).Append(' ')
.Append(member.Name).Append(" = ").Append(member.FieldNumber);
if (syntax == ProtoSyntax.Proto2 && member.DefaultValue is not null && !member.IsRequired)
{
if (member.DefaultValue is string)
{
AddOption(builder, ref hasOption).Append("default = \"").Append(member.DefaultValue).Append('\"');
}
else if (member.DefaultValue is TimeSpan)
{
// ignore
}
else if (member.DefaultValue is bool boolValue)
{ // need to be lower case (issue 304)
AddOption(builder, ref hasOption).Append(boolValue ? "default = true" : "default = false");
}
else
{
object effectiveValue = member.DefaultValue;
if (effectiveValue is Enum && effectiveValue.GetType() == member.MemberType
&& model.IsDefined(member.MemberType, member.CompatibilityLevel))
{
// lookup the | TypeOptions |
csharp | AvaloniaUI__Avalonia | src/Browser/Avalonia.Browser/BrowserSystemNavigationManager.cs | {
"start": 100,
"end": 444
} | internal class ____ : ISystemNavigationManagerImpl
{
public event EventHandler<RoutedEventArgs>? BackRequested;
public bool OnBackRequested()
{
var routedEventArgs = new RoutedEventArgs();
BackRequested?.Invoke(this, routedEventArgs);
return routedEventArgs.Handled;
}
}
| BrowserSystemNavigationManagerImpl |
csharp | dotnet__efcore | src/EFCore.Relational/Diagnostics/CommandCorrelatedEventData.cs | {
"start": 500,
"end": 3356
} | public class ____ : DbContextEventData
{
/// <summary>
/// Constructs the event payload.
/// </summary>
/// <param name="eventDefinition">The event definition.</param>
/// <param name="messageGenerator">A delegate that generates a log message for this event.</param>
/// <param name="connection">The <see cref="DbConnection" /> being used.</param>
/// <param name="context">The <see cref="DbContext" /> currently being used, to null if not known.</param>
/// <param name="executeMethod">The <see cref="DbCommand" /> method.</param>
/// <param name="commandId">A correlation ID that identifies the <see cref="DbCommand" /> instance being used.</param>
/// <param name="connectionId">A correlation ID that identifies the <see cref="DbConnection" /> instance being used.</param>
/// <param name="async">Indicates whether or not the command was executed asynchronously.</param>
/// <param name="startTime">The start time of this event.</param>
/// <param name="commandSource">Source of the command.</param>
public CommandCorrelatedEventData(
EventDefinitionBase eventDefinition,
Func<EventDefinitionBase, EventData, string> messageGenerator,
DbConnection connection,
DbContext? context,
DbCommandMethod executeMethod,
Guid commandId,
Guid connectionId,
bool async,
DateTimeOffset startTime,
CommandSource commandSource)
: base(eventDefinition, messageGenerator, context)
{
Connection = connection;
CommandId = commandId;
ConnectionId = connectionId;
ExecuteMethod = executeMethod;
IsAsync = async;
StartTime = startTime;
CommandSource = commandSource;
}
/// <summary>
/// The <see cref="DbConnection" />.
/// </summary>
public virtual DbConnection Connection { get; }
/// <summary>
/// A correlation ID that identifies the <see cref="DbCommand" /> instance being used.
/// </summary>
public virtual Guid CommandId { get; }
/// <summary>
/// A correlation ID that identifies the <see cref="DbConnection" /> instance being used.
/// </summary>
public virtual Guid ConnectionId { get; }
/// <summary>
/// The <see cref="DbCommandMethod" /> method.
/// </summary>
public virtual DbCommandMethod ExecuteMethod { get; }
/// <summary>
/// Indicates whether or not the operation is being executed asynchronously.
/// </summary>
public virtual bool IsAsync { get; }
/// <summary>
/// The start time of this event.
/// </summary>
public virtual DateTimeOffset StartTime { get; }
/// <summary>
/// Source of the command.
/// </summary>
public virtual CommandSource CommandSource { get; }
}
| CommandCorrelatedEventData |
csharp | ChilliCream__graphql-platform | src/HotChocolate/Caching/src/Caching/Types/CacheControlScopeType.cs | {
"start": 744,
"end": 859
} | public static class ____
{
public const string CacheControlScope = nameof(CacheControlScope);
}
}
| Names |
csharp | duplicati__duplicati | Duplicati/Library/Main/Database/LocalRepairDatabase.cs | {
"start": 19040,
"end": 94789
} | private class ____ : IMissingBlockList
{
/// <summary>
/// The connection to the database
/// </summary>
private readonly SqliteConnection m_connection;
/// <summary>
/// The transaction to use
/// </summary>
private readonly ReusableTransaction m_rtr;
/// <summary>
/// Updates the "Restored" status of a block in the temporary missing blocks table for a given hash and size.
/// </summary>
private SqliteCommand m_insertCommand = null!;
/// <summary>
/// Inserts or ignores a block and its volume assignment into the "DuplicateBlock" table for a given hash and size.
/// </summary>
private SqliteCommand m_copyIntoDuplicatedBlocks = null!;
/// <summary>
/// Updates the "VolumeID" of a block in the "Block" table to assign it to a new volume for a given hash and size.
/// </summary>
private SqliteCommand m_assignBlocksToNewVolume = null!;
/// <summary>
/// Selects the hash and size of all missing blocks (where "Restored" is 0) from the temporary missing blocks table.
/// </summary>
private SqliteCommand m_missingBlocksCommand = null!;
/// <summary>
/// Counts the number of missing blocks (where "Restored" is 0) in the temporary missing blocks table.
/// </summary>
private SqliteCommand m_missingBlocksCountCommand = null!;
/// <summary>
/// The name of the temporary table.
/// </summary>
private readonly string m_tablename;
/// <summary>
/// The name of the volume where blocks are missing.
/// </summary>
private readonly string m_volumename;
/// <summary>
/// Whether the object has been disposed.
/// </summary>
private bool m_isDisposed = false;
/// <summary>
/// Creates a new missing block list.
/// </summary>
/// <param name="volumename">The name of the volume with missing blocks.</param>
/// <param name="connection">The connection to the database.</param>
/// <param name="rtr">The transaction to use.</param>
private MissingBlockList(string volumename, SqliteConnection connection, ReusableTransaction rtr)
{
m_connection = connection;
m_rtr = rtr;
m_volumename = volumename;
var tablename = $"MissingBlocks-{Library.Utility.Utility.GetHexGuid()}";
m_tablename = tablename;
}
/// <summary>
/// Creates a new instance of the missing block list.
/// </summary>
/// <param name="volumename">The name of the volume with missing blocks.</param>
/// <param name="connection">The connection to the database.</param>
/// <param name="transaction">The transaction to use.</param>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>A task that when awaited returns a new instance of <see cref="IMissingBlockList"/>.</returns>
public static async Task<IMissingBlockList> CreateMissingBlockList(string volumename, SqliteConnection connection, ReusableTransaction transaction, CancellationToken token)
{
var blocklist = new MissingBlockList(volumename, connection, transaction);
await using (var cmd = connection.CreateCommand(transaction.Transaction))
{
await cmd.ExecuteNonQueryAsync($@"
CREATE TEMPORARY TABLE ""{blocklist.m_tablename}"" (
""Hash"" TEXT NOT NULL,
""Size"" INTEGER NOT NULL,
""Restored"" INTEGER NOT NULL
)
", token)
.ConfigureAwait(false);
cmd.SetCommandAndParameters($@"
INSERT INTO ""{blocklist.m_tablename}"" (
""Hash"",
""Size"",
""Restored""
)
SELECT DISTINCT
""Block"".""Hash"",
""Block"".""Size"",
0 AS ""Restored"" FROM ""Block"",
""Remotevolume""
WHERE
""Block"".""VolumeID"" = ""Remotevolume"".""ID""
AND ""Remotevolume"".""Name"" = @Name
")
.SetParameterValue("@Name", volumename);
var blockCount = await cmd.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
if (blockCount == 0)
throw new Exception($"Unexpected empty block volume: {0}");
await cmd.ExecuteNonQueryAsync($@"
CREATE UNIQUE INDEX ""{blocklist.m_tablename}-Ix""
ON ""{blocklist.m_tablename}"" (
""Hash"",
""Size"",
""Restored""
)
", token)
.ConfigureAwait(false);
}
blocklist.m_insertCommand = await connection.CreateCommandAsync($@"
UPDATE ""{blocklist.m_tablename}""
SET ""Restored"" = @NewRestoredValue
WHERE
""Hash"" = @Hash
AND ""Size"" = @Size
AND ""Restored"" = @PreviousRestoredValue
", token)
.ConfigureAwait(false);
blocklist.m_copyIntoDuplicatedBlocks = await connection.CreateCommandAsync(@"
INSERT OR IGNORE INTO ""DuplicateBlock"" (
""BlockID"",
""VolumeID""
)
SELECT
""Block"".""ID"",
""Block"".""VolumeID""
FROM ""Block""
WHERE
""Block"".""Hash"" = @Hash
AND ""Block"".""Size"" = @Size
", token)
.ConfigureAwait(false);
blocklist.m_assignBlocksToNewVolume = await connection.CreateCommandAsync(@"
UPDATE ""Block""
SET ""VolumeID"" = @TargetVolumeId
WHERE
""Hash"" = @Hash
AND ""Size"" = @Size
", token)
.ConfigureAwait(false);
var m_missingBlocksQuery = $@"
SELECT
""{blocklist.m_tablename}"".""Hash"",
""{blocklist.m_tablename}"".""Size""
FROM ""{blocklist.m_tablename}""
WHERE ""{blocklist.m_tablename}"".""Restored"" = @Restored ";
blocklist.m_missingBlocksCommand =
await connection.CreateCommandAsync(m_missingBlocksQuery, token)
.ConfigureAwait(false);
blocklist.m_missingBlocksCountCommand =
await connection.CreateCommandAsync($@"
SELECT COUNT(*)
FROM ({m_missingBlocksQuery})
", token)
.ConfigureAwait(false);
return blocklist;
}
/// <inheritdoc/>
public async Task<bool> SetBlockRestored(string hash, long size, long targetVolumeId, CancellationToken token)
{
var restored = await m_insertCommand
.SetTransaction(m_rtr)
.SetParameterValue("@NewRestoredValue", 1)
.SetParameterValue("@Hash", hash)
.SetParameterValue("@Size", size)
.SetParameterValue("@PreviousRestoredValue", 0)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false) == 1;
if (restored)
{
await m_copyIntoDuplicatedBlocks.SetTransaction(m_rtr)
.SetParameterValue("@Hash", hash)
.SetParameterValue("@Size", size)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
var c = await m_assignBlocksToNewVolume.SetTransaction(m_rtr)
.SetParameterValue("@TargetVolumeId", targetVolumeId)
.SetParameterValue("@Hash", hash)
.SetParameterValue("@Size", size)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
if (c != 1)
throw new Exception($"Unexpected number of updated blocks: {c} != 1");
}
return restored;
}
/// <inheritdoc/>
public async IAsyncEnumerable<BlockWithSourceData> GetSourceFilesWithBlocks(long blocksize, [EnumeratorCancellation] CancellationToken token)
{
var str_blocksize = Library.Utility.Utility.FormatInvariantValue(blocksize);
await using var cmd = m_connection.CreateCommand($@"
SELECT DISTINCT
""{m_tablename}"".""Hash"",
""{m_tablename}"".""Size"",
""File"".""Path"",
""BlocksetEntry"".""Index"" * {str_blocksize}
FROM
""{m_tablename}"",
""Block"",
""BlocksetEntry"",
""File""
WHERE
""File"".""BlocksetID"" = ""BlocksetEntry"".""BlocksetID""
AND ""Block"".""ID"" = ""BlocksetEntry"".""BlockID""
AND ""{m_tablename}"".""Hash"" = ""Block"".""Hash""
AND ""{m_tablename}"".""Size"" = ""Block"".""Size""
AND ""{m_tablename}"".""Restored"" = @Restored
ORDER BY
""{m_tablename}"".""Hash"",
""{m_tablename}"".""Size"",
""File"".""Path"",
""BlocksetEntry"".""Index"" * {str_blocksize}
")
.SetTransaction(m_rtr)
.SetParameterValue("@Restored", 0);
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
{
var hash = rd.ConvertValueToString(0) ?? throw new Exception("Hash value was null");
var size = rd.ConvertValueToInt64(1);
var file = rd.ConvertValueToString(2) ?? throw new Exception("File value was null");
var offset = rd.ConvertValueToInt64(3);
yield return new BlockWithSourceData(hash, size, file, offset);
}
}
/// <inheritdoc/>
public async IAsyncEnumerable<BlockWithMetadataSourceData> GetSourceItemsWithMetadataBlocks([EnumeratorCancellation] CancellationToken token)
{
await using var cmd = m_connection.CreateCommand($@"
SELECT DISTINCT
""{m_tablename}"".""Hash"",
""{m_tablename}"".""Size"",
""File"".""Path""
FROM
""{m_tablename}"",
""Block"",
""BlocksetEntry"",
""Metadataset"",
""File""
WHERE
""File"".""MetadataID"" == ""Metadataset"".""ID""
AND ""Metadataset"".""BlocksetID"" = ""BlocksetEntry"".""BlocksetID""
AND ""Block"".""ID"" = ""BlocksetEntry"".""BlockID""
AND ""{m_tablename}"".""Hash"" = ""Block"".""Hash""
AND ""{m_tablename}"".""Size"" = ""Block"".""Size""
AND ""{m_tablename}"".""Restored"" = @Restored
ORDER BY
""{m_tablename}"".""Hash"",
""{m_tablename}"".""Size"",
""File"".""Path""
")
.SetTransaction(m_rtr)
.SetParameterValue("@Restored", 0);
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
{
var hash = rd.ConvertValueToString(0) ?? throw new Exception("Hash value was null");
var size = rd.ConvertValueToInt64(1);
var path = rd.ConvertValueToString(2) ?? throw new Exception("File value was null");
yield return new BlockWithMetadataSourceData(hash, size, path);
}
}
/// <inheritdoc/>
public async IAsyncEnumerable<BlocklistHashesEntry> GetBlocklistHashes(long hashesPerBlock, [EnumeratorCancellation] CancellationToken token)
{
await using var cmd = m_connection.CreateCommand(m_rtr);
var blocklistTableName = $"BlocklistHashList-{Library.Utility.Utility.GetHexGuid()}";
try
{
// We need to create a snapshot as we will be updating the m_tablename table during enumeration
await cmd.SetCommandAndParameters($@"
CREATE TEMPORARY TABLE ""{blocklistTableName}"" AS
SELECT
""b"".""Hash"" AS ""BlockHash"",
""bs"".""Id"" AS ""BlocksetId"",
""bs"".""Length"" AS ""BlocksetLength"",
""bse"".""Index"" / @HashesPerBlock AS ""BlocklistHashIndex"",
(
SELECT ""blh"".""Hash""
FROM ""BlocklistHash"" ""blh""
WHERE
""blh"".""BlocksetID"" = ""bs"".""ID""
AND ""blh"".""Index"" == ""bse"".""Index"" / @HashesPerBlock
LIMIT 1
) AS ""BlocklistHashHash"",
""bse"".""Index"" AS ""BlocksetEntryIndex""
FROM ""BlocksetEntry"" ""bse""
JOIN ""Block"" ""b""
ON ""b"".""ID"" = ""bse"".""BlockID""
JOIN ""Blockset"" ""bs""
ON ""bs"".""ID"" = ""bse"".""BlocksetID""
WHERE EXISTS (
SELECT 1
FROM ""BlocklistHash"" ""blh""
JOIN ""{m_tablename}"" ""mt""
ON ""mt"".""Hash"" = ""blh"".""Hash""
WHERE
""blh"".""BlocksetID"" = ""bs"".""ID""
AND ""mt"".""Restored"" = @Restored
)
")
.SetParameterValue("@HashesPerBlock", hashesPerBlock)
.SetParameterValue("@Restored", 0)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
cmd.SetCommandAndParameters($@"
SELECT
""BlockHash"",
""BlocksetId"",
""BlocksetLength"",
""BlocklistHashIndex"",
""BlocklistHashHash"",
""BlocksetEntryIndex""
FROM ""{blocklistTableName}""
ORDER BY
""BlocksetId"",
""BlocklistHashIndex"",
""BlocksetEntryIndex""
");
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
{
var hash = rd.ConvertValueToString(0) ?? throw new Exception("Block.Hash is null");
var blocksetId = rd.ConvertValueToInt64(1);
var length = rd.ConvertValueToInt64(2);
var blocklistHashIndex = rd.ConvertValueToInt64(3);
var blocklistHash = rd.ConvertValueToString(4) ?? throw new Exception("BlocklistHash is null");
var index = rd.ConvertValueToInt64(5);
yield return new BlocklistHashesEntry(
blocksetId,
blocklistHash,
length,
blocklistHashIndex,
(int)index,
hash
);
}
}
finally
{
try
{
await cmd.ExecuteNonQueryAsync($@"
DROP TABLE IF EXISTS ""{blocklistTableName}""
", token)
.ConfigureAwait(false);
}
catch { }
}
}
/// <inheritdoc/>
public async Task<long> GetMissingBlockCount(CancellationToken token)
{
return await m_missingBlocksCountCommand
.SetTransaction(m_rtr)
.SetParameterValue("@Restored", 0)
.ExecuteScalarInt64Async(0, token)
.ConfigureAwait(false);
}
/// <summary>
/// Gets the list of missing blocks.
/// </summary>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>An asynchronous enumerable of tuples containing the block hash and size.</returns>
public async IAsyncEnumerable<(string Hash, long Size)> GetMissingBlocks([EnumeratorCancellation] CancellationToken token)
{
m_missingBlocksCommand
.SetTransaction(m_rtr)
.SetParameterValue("@Restored", 0);
await foreach (var rd in m_missingBlocksCommand.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
yield return (
rd.ConvertValueToString(0) ?? "",
rd.ConvertValueToInt64(1)
);
}
/// <inheritdoc/>
public async Task<long> MoveBlocksToNewVolume(long targetVolumeId, long sourceVolumeId, CancellationToken token)
{
if (targetVolumeId <= 0)
throw new ArgumentOutOfRangeException(nameof(targetVolumeId), "Target volume ID must be greater than 0");
// Move the source blocks into the DuplicateBlock table
await using var cmd = m_connection.CreateCommand($@"
INSERT OR IGNORE INTO ""DuplicateBlock"" (
""BlockID"",
""VolumeID""
)
SELECT
""b"".""ID"",
""b"".""VolumeID""
FROM ""Block"" ""b""
WHERE
""b"".""VolumeID"" = @SourceVolumeId
AND (""b"".""Hash"", ""b"".""Size"") IN (
SELECT
""Hash"",
""Size""
FROM ""{m_tablename}""
WHERE ""Restored"" = @Restored
)
")
.SetParameterValue("@SourceVolumeId", sourceVolumeId)
.SetParameterValue("@Restored", 1);
var moved = await cmd.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
// Then update the blocks table to point to the new volume
var updated = await cmd.SetCommandAndParameters($@"
UPDATE ""Block""
SET ""VolumeID"" = @TargetVolumeId
WHERE
""VolumeID"" = @SourceVolumeId
AND (""Hash"", ""Size"") IN (
SELECT
""Hash"",
""Size""
FROM ""{m_tablename}""
WHERE ""Restored"" = @Restored
)
")
.SetParameterValue("@TargetVolumeId", targetVolumeId)
.SetParameterValue("@SourceVolumeId", sourceVolumeId)
.SetParameterValue("@Restored", 1)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
if (updated != moved)
throw new Exception($"Unexpected number of updated blocks: {updated} != {moved}");
return updated;
}
/// <inheritdoc/>
public async IAsyncEnumerable<IRemoteVolume> GetFilesetsUsingMissingBlocks([EnumeratorCancellation] CancellationToken token)
{
var blocks = $@"
SELECT DISTINCT ""FileLookup"".""ID"" AS ID
FROM
""{m_tablename}"",
""Block"",
""Blockset"",
""BlocksetEntry"",
""FileLookup""
WHERE
""Block"".""Hash"" = ""{m_tablename}"".""Hash""
AND ""Block"".""Size"" = ""{m_tablename}"".""Size""
AND ""BlocksetEntry"".""BlockID"" = ""Block"".""ID""
AND ""BlocksetEntry"".""BlocksetID"" = ""Blockset"".""ID""
AND ""FileLookup"".""BlocksetID"" = ""Blockset"".""ID""
";
var blocklists = $@"
SELECT DISTINCT ""FileLookup"".""ID"" AS ID
FROM
""{m_tablename}"",
""Block"",
""Blockset"",
""BlocklistHash"",
""FileLookup""
WHERE
""Block"".""Hash"" = ""{m_tablename}"".""Hash""
AND ""Block"".""Size"" = ""{m_tablename}"".""Size""
AND ""BlocklistHash"".""Hash"" = ""Block"".""Hash""
AND ""BlocklistHash"".""BlocksetID"" = ""Blockset"".""ID""
AND ""FileLookup"".""BlocksetID"" = ""Blockset"".""ID""
";
var cmdtxt = $@"
SELECT DISTINCT
""RemoteVolume"".""Name"",
""RemoteVolume"".""Hash"",
""RemoteVolume"".""Size""
FROM
""RemoteVolume"",
""FilesetEntry"",
""Fileset""
WHERE
""RemoteVolume"".""ID"" = ""Fileset"".""VolumeID""
AND ""Fileset"".""ID"" = ""FilesetEntry"".""FilesetID""
AND ""RemoteVolume"".""Type"" = @Type
AND ""FilesetEntry"".""FileID"" IN (
SELECT DISTINCT ""ID""
FROM (
{blocks} UNION {blocklists}
)
)
";
await using var cmd = m_connection.CreateCommand(cmdtxt)
.SetTransaction(m_rtr)
.SetParameterValue("@Type", RemoteVolumeType.Files.ToString());
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
yield return new RemoteVolume(
rd.ConvertValueToString(0) ?? "",
rd.ConvertValueToString(1) ?? "",
rd.ConvertValueToInt64(2)
);
}
/// <inheritdoc/>
public async IAsyncEnumerable<IRemoteVolume> GetMissingBlockSources([EnumeratorCancellation] CancellationToken token)
{
await using var cmd = m_connection.CreateCommand($@"
SELECT DISTINCT
""RemoteVolume"".""Name"",
""RemoteVolume"".""Hash"",
""RemoteVolume"".""Size""
FROM
""RemoteVolume"",
""Block"",
""{m_tablename}""
WHERE
""{m_tablename}"".""Restored"" = @Restored
AND ""Block"".""Hash"" = ""{m_tablename}"".""Hash""
AND ""Block"".""Size"" = ""{m_tablename}"".""Size""
AND ""Block"".""VolumeID"" = ""RemoteVolume"".""ID""
AND ""Remotevolume"".""Name"" != @Name
")
.SetTransaction(m_rtr)
.SetParameterValue("@Restored", 0)
.SetParameterValue("@Name", m_volumename);
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
yield return new RemoteVolume(
rd.ConvertValueToString(0) ?? "",
rd.ConvertValueToString(1) ?? "",
rd.ConvertValueToInt64(2)
);
}
public void Dispose()
{
if (m_isDisposed)
return;
DisposeAsync().AsTask().Await();
}
public async ValueTask DisposeAsync()
{
if (m_isDisposed)
return;
m_isDisposed = true;
try
{
if (m_tablename != null)
{
await using var cmd = await m_connection.CreateCommandAsync($@"DROP TABLE IF EXISTS ""{m_tablename}""", default)
.ConfigureAwait(false);
await cmd.SetTransaction(m_rtr)
.ExecuteNonQueryAsync()
.ConfigureAwait(false);
}
}
catch { }
try { m_insertCommand?.Dispose(); }
catch { }
}
}
/// <summary>
/// Creates a new missing block list for the specified volume.
/// </summary>
/// <param name="volumename">The name of the volume with missing blocks.</param>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>A task that when awaited returns an instance of <see cref="IMissingBlockList"/>.</returns>
public async Task<IMissingBlockList> CreateBlockList(string volumename, CancellationToken token)
{
return await MissingBlockList.CreateMissingBlockList(volumename, m_connection, m_rtr, token)
.ConfigureAwait(false);
}
/// <summary>
/// Fixes duplicate metadata hashes in the database.
/// </summary>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>A task that when completed indicates that the repair has been attempted.</returns>
public async Task FixDuplicateMetahash(CancellationToken token)
{
await using var cmd = m_connection.CreateCommand(m_rtr.Transaction);
var sql_count = @"
SELECT COUNT(*)
FROM (
SELECT DISTINCT ""C1""
FROM (
SELECT COUNT(*) AS ""C1""
FROM (
SELECT DISTINCT ""BlocksetID""
FROM ""Metadataset""
)
UNION SELECT COUNT(*) AS ""C1""
FROM ""Metadataset""
)
)
";
var x = await cmd.ExecuteScalarInt64Async(sql_count, 0, token)
.ConfigureAwait(false);
if (x > 1)
{
Logging.Log.WriteInformationMessage(LOGTAG, "DuplicateMetadataHashes", "Found duplicate metadatahashes, repairing");
var tablename = $"TmpFile-{Library.Utility.Utility.GetHexGuid()}";
await cmd.ExecuteNonQueryAsync($@"
CREATE TEMPORARY TABLE ""{tablename}""
AS SELECT *
FROM ""File""
", token)
.ConfigureAwait(false);
var sql = @"
SELECT
""A"".""ID"",
""B"".""BlocksetID""
FROM (
SELECT
MIN(""ID"") AS ""ID"",
COUNT(""ID"") AS ""Duplicates""
FROM ""Metadataset""
GROUP BY ""BlocksetID""
) ""A"",
""Metadataset"" ""B""
WHERE
""A"".""Duplicates"" > 1
AND ""A"".""ID"" = ""B"".""ID""
";
await using (var c2 = m_connection.CreateCommand(m_rtr.Transaction))
{
c2.SetCommandAndParameters($@"
UPDATE ""{tablename}""
SET ""MetadataID"" = @MetadataId
WHERE ""MetadataID"" IN (
SELECT ""ID""
FROM ""Metadataset""
WHERE ""BlocksetID"" = @BlocksetId
);
DELETE FROM ""Metadataset""
WHERE
""BlocksetID"" = @BlocksetId
AND ""ID"" != @MetadataId
");
await using var rd = await cmd.ExecuteReaderAsync(sql, token)
.ConfigureAwait(false);
while (await rd.ReadAsync(token).ConfigureAwait(false))
{
await c2
.SetParameterValue("@MetadataId", rd.GetValue(0))
.SetParameterValue("@BlocksetId", rd.GetValue(1))
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
}
}
sql = $@"
SELECT
""ID"",
""Path"",
""BlocksetID"",
""MetadataID"",
""Entries""
FROM (
SELECT
MIN(""ID"") AS ""ID"",
""Path"",
""BlocksetID"",
""MetadataID"",
COUNT(*) as ""Entries""
FROM ""{tablename}""
GROUP BY
""Path"",
""BlocksetID"",
""MetadataID""
)
WHERE ""Entries"" > 1
ORDER BY ""ID""
";
await using (var c2 = m_connection.CreateCommand(m_rtr.Transaction))
{
c2.SetCommandAndParameters($@"
UPDATE ""FilesetEntry""
SET ""FileID"" = @FileId
WHERE ""FileID"" IN (
SELECT ""ID""
FROM ""{tablename}""
WHERE
""Path"" = @Path
AND ""BlocksetID"" = @BlocksetId
AND ""MetadataID"" = @MetadataId
);
DELETE FROM ""{tablename}""
WHERE
""Path"" = @Path
AND ""BlocksetID"" = @BlocksetId
AND ""MetadataID"" = @MetadataId
AND ""ID"" != @FileId
");
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(sql, token).ConfigureAwait(false))
{
await c2
.SetParameterValue("@FileId", rd.GetValue(0))
.SetParameterValue("@Path", rd.GetValue(1))
.SetParameterValue("@BlocksetId", rd.GetValue(2))
.SetParameterValue("@MetadataId", rd.GetValue(3))
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
}
}
await cmd.ExecuteNonQueryAsync($@"
DELETE FROM ""FileLookup""
WHERE ""ID"" NOT IN (
SELECT ""ID""
FROM ""{tablename}""
)
", token)
.ConfigureAwait(false);
await cmd.ExecuteNonQueryAsync($@"
CREATE INDEX ""{tablename}-Ix""
ON ""{tablename}"" (
""ID"",
""MetadataID""
)
", token)
.ConfigureAwait(false);
await cmd.ExecuteNonQueryAsync($@"
UPDATE ""FileLookup""
SET ""MetadataID"" = (
SELECT ""MetadataID""
FROM ""{tablename}"" ""A""
WHERE ""A"".""ID"" = ""FileLookup"".""ID""
)
", token)
.ConfigureAwait(false);
await cmd.ExecuteNonQueryAsync($@"DROP TABLE ""{tablename}"" ", token)
.ConfigureAwait(false);
x = await cmd.ExecuteScalarInt64Async(sql_count, 0, token)
.ConfigureAwait(false);
if (x > 1)
throw new Interface.UserInformationException("Repair failed, there are still duplicate metadatahashes!", "DuplicateHashesRepairFailed");
Logging.Log.WriteInformationMessage(LOGTAG, "DuplicateMetadataHashesFixed", "Duplicate metadatahashes repaired succesfully");
await m_rtr.CommitAsync(token: token).ConfigureAwait(false);
}
}
/// <summary>
/// Fixes duplicate file entries in the database.
/// </summary>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>A task that when completed indicates that the repair has been attempted.</returns>
public async Task FixDuplicateFileentries(CancellationToken token)
{
await using var cmd = m_connection.CreateCommand(m_rtr.Transaction);
var sql_count = @"
SELECT COUNT(*)
FROM (
SELECT
""PrefixID"",
""Path"",
""BlocksetID"",
""MetadataID"",
COUNT(*) as ""Duplicates""
FROM ""FileLookup""
GROUP BY
""PrefixID"",
""Path"",
""BlocksetID"",
""MetadataID""
)
WHERE ""Duplicates"" > 1
";
var x = await cmd.ExecuteScalarInt64Async(sql_count, 0, token)
.ConfigureAwait(false);
if (x > 0)
{
Logging.Log.WriteInformationMessage(LOGTAG, "DuplicateFileEntries", "Found duplicate file entries, repairing");
var sql = @"
SELECT
""ID"",
""PrefixID"",
""Path"",
""BlocksetID"",
""MetadataID"",
""Entries""
FROM (
SELECT
MIN(""ID"") AS ""ID"",
""PrefixID"",
""Path"",
""BlocksetID"",
""MetadataID"",
COUNT(*) as ""Entries""
FROM ""FileLookup""
GROUP BY
""PrefixID"",
""Path"",
""BlocksetID"",
""MetadataID""
)
WHERE ""Entries"" > 1
ORDER BY ""ID""
";
await using (var c2 = m_connection.CreateCommand(m_rtr.Transaction))
{
c2.SetCommandAndParameters(@"
UPDATE ""FilesetEntry""
SET ""FileID"" = @FileId
WHERE ""FileID"" IN (
SELECT ""ID""
FROM ""FileLookup""
WHERE
""PrefixID"" = @PrefixId
AND ""Path"" = @Path
AND ""BlocksetID"" = @BlocksetId
AND ""MetadataID"" = @MetatadataId
);
DELETE FROM ""FileLookup""
WHERE
""PrefixID"" = @PrefixId
AND ""Path"" = @Path
AND ""BlocksetID"" = @BlocksetId
AND ""MetadataID"" = @MetadataId
AND ""ID"" != @FileId
");
cmd.SetCommandAndParameters(sql);
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
{
await c2
.SetParameterValue("@FileId", rd.GetValue(0))
.SetParameterValue("@PrefixId", rd.GetValue(1))
.SetParameterValue("@Path", rd.GetValue(2))
.SetParameterValue("@BlocksetId", rd.GetValue(3))
.SetParameterValue("@MetadataId", rd.GetValue(4))
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
}
}
x = await cmd.ExecuteScalarInt64Async(sql_count, 0, token)
.ConfigureAwait(false);
if (x > 1)
throw new Interface.UserInformationException("Repair failed, there are still duplicate file entries!", "DuplicateFilesRepairFailed");
Logging.Log.WriteInformationMessage(LOGTAG, "DuplicateFileEntriesFixed", "Duplicate file entries repaired succesfully");
await m_rtr.CommitAsync(token: token).ConfigureAwait(false);
}
}
/// <summary>
/// Fixes missing blocklist hashes in the database.
/// </summary>
/// <param name="blockhashalgorithm">The hash algorithm used for the blocklist hashes.</param>
/// <param name="blocksize">The size of each block in bytes.</param>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>A task that when completed indicates that the repair has been attempted.</returns>
public async Task FixMissingBlocklistHashes(string blockhashalgorithm, long blocksize, CancellationToken token)
{
var blocklistbuffer = new byte[blocksize];
await using var cmd = m_connection.CreateCommand(m_rtr.Transaction);
using var blockhasher = HashFactory.CreateHasher(blockhashalgorithm);
var hashsize = blockhasher.HashSize / 8;
var blocksize_per_hashsize = Library.Utility.Utility.FormatInvariantValue(blocksize / hashsize);
var sql = $@"
SELECT *
FROM (
SELECT
""N"".""BlocksetID"",
((""N"".""BlockCount"" + {blocksize_per_hashsize} - 1) / {blocksize_per_hashsize}) AS ""BlocklistHashCountExpected"",
CASE
WHEN ""G"".""BlocklistHashCount"" IS NULL
THEN 0
ELSE ""G"".""BlocklistHashCount""
END AS ""BlocklistHashCountActual""
FROM (
SELECT
""BlocksetID"",
COUNT(*) AS ""BlockCount""
FROM ""BlocksetEntry""
GROUP BY ""BlocksetID""
) ""N""
LEFT OUTER JOIN (
SELECT
""BlocksetID"",
COUNT(*) AS ""BlocklistHashCount""
FROM ""BlocklistHash""
GROUP BY ""BlocksetID""
) ""G""
ON ""N"".""BlocksetID"" = ""G"".""BlocksetID""
WHERE ""N"".""BlockCount"" > 1
)
WHERE ""BlocklistHashCountExpected"" != ""BlocklistHashCountActual""
";
var countsql = @$"
SELECT COUNT(*)
FROM ({sql})
";
var itemswithnoblocklisthash = await cmd
.ExecuteScalarInt64Async(countsql, 0, token)
.ConfigureAwait(false);
if (itemswithnoblocklisthash != 0)
{
Logging.Log.WriteInformationMessage(LOGTAG, "MissingBlocklistHashes", "Found {0} missing blocklisthash entries, repairing", itemswithnoblocklisthash);
await using (var c2 = m_connection.CreateCommand(m_rtr.Transaction))
await using (var c3 = m_connection.CreateCommand(m_rtr.Transaction))
await using (var c4 = m_connection.CreateCommand(m_rtr.Transaction))
await using (var c5 = m_connection.CreateCommand(m_rtr.Transaction))
await using (var c6 = m_connection.CreateCommand(m_rtr.Transaction))
{
c3.SetCommandAndParameters(@"
INSERT INTO ""BlocklistHash"" (
""BlocksetID"",
""Index"",
""Hash""
)
VALUES (
@BlocksetId,
@Index,
@Hash
)
");
c4.SetCommandAndParameters(@"
SELECT ""ID""
FROM ""Block""
WHERE
""Hash"" = @Hash
AND ""Size"" = @Size
");
c5.SetCommandAndParameters(@"
SELECT ""ID""
FROM ""DeletedBlock""
WHERE
""Hash"" = @Hash
AND ""Size"" = @Size
AND ""VolumeID"" IN (
SELECT ""ID""
FROM ""RemoteVolume""
WHERE
""Type"" = @Type
AND (
""State"" IN (
@State1,
@State2
)
)
)
");
c6.SetCommandAndParameters(@"
INSERT INTO ""Block"" (
""Hash"",
""Size"",
""VolumeID""
)
SELECT
""Hash"",
""Size"",
""VolumeID""
FROM ""DeletedBlock""
WHERE ""ID"" = @DeletedBlockId
LIMIT 1;
DELETE FROM ""DeletedBlock""
WHERE ""ID"" = @DeletedBlockId;
");
await foreach (var e in cmd.ExecuteReaderEnumerableAsync(sql, token).ConfigureAwait(false))
{
var blocksetid = e.ConvertValueToInt64(0);
var ix = 0L;
int blocklistoffset = 0;
await c2.SetCommandAndParameters(@"
DELETE FROM ""BlocklistHash""
WHERE ""BlocksetID"" = @BlocksetId
")
.SetParameterValue("@BlocksetId", blocksetid)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
c2.SetCommandAndParameters(@"
SELECT ""A"".""Hash""
FROM
""Block"" ""A"",
""BlocksetEntry"" ""B""
WHERE
""A"".""ID"" = ""B"".""BlockID""
AND ""B"".""BlocksetID"" = @BlocksetId
ORDER BY ""B"".""Index""
")
.SetParameterValue("@BlocksetId", blocksetid);
await foreach (var h in c2.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
{
var tmp = Convert.FromBase64String(h.ConvertValueToString(0) ?? throw new Exception("Hash value was null"));
if (blocklistbuffer.Length - blocklistoffset < tmp.Length)
{
var blkey = Convert.ToBase64String(blockhasher.ComputeHash(blocklistbuffer, 0, blocklistoffset));
// Check if the block exists in "blocks"
var existingBlockId = await c4
.SetParameterValue("@Hash", blkey)
.SetParameterValue("@Size", blocklistoffset)
.ExecuteScalarInt64Async(-1, token)
.ConfigureAwait(false);
if (existingBlockId <= 0)
{
var deletedBlockId = await c5
.SetParameterValue("@Hash", blkey)
.SetParameterValue("@Size", blocklistoffset)
.SetParameterValue("@Type", RemoteVolumeType.Blocks.ToString())
.SetParameterValue("@State1", RemoteVolumeState.Uploaded.ToString())
.SetParameterValue("@State2", RemoteVolumeState.Verified.ToString())
.ExecuteScalarInt64Async(-1, token)
.ConfigureAwait(false);
if (deletedBlockId <= 0)
throw new Exception($"Missing block for blocklisthash: {blkey}");
else
{
var rc = await c6
.SetParameterValue("@DeletedBlockId", deletedBlockId)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
if (rc != 2)
throw new Exception($"Unexpected update count: {rc}");
}
}
// Add to table
await c3.SetParameterValue("@BlocksetId", blocksetid)
.SetParameterValue("@Index", ix)
.SetParameterValue("@Hash", blkey)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
ix++;
blocklistoffset = 0;
}
Array.Copy(tmp, 0, blocklistbuffer, blocklistoffset, tmp.Length);
blocklistoffset += tmp.Length;
}
if (blocklistoffset != 0)
{
var blkeyfinal = Convert.ToBase64String(blockhasher.ComputeHash(blocklistbuffer, 0, blocklistoffset));
// Ensure that the block exists in "blocks"
var existingBlockId = await c4
.SetParameterValue("@Hash", blkeyfinal)
.SetParameterValue("@Size", blocklistoffset)
.ExecuteScalarInt64Async(-1, token)
.ConfigureAwait(false);
if (existingBlockId <= 0)
{
var deletedBlockId = await c5
.SetParameterValue("@Hash", blkeyfinal)
.SetParameterValue("@Size", blocklistoffset)
.SetParameterValue("@Type", RemoteVolumeType.Blocks.ToString())
.SetParameterValue("@State1", RemoteVolumeState.Uploaded.ToString())
.SetParameterValue("@State2", RemoteVolumeState.Verified.ToString())
.ExecuteScalarInt64Async(-1, token)
.ConfigureAwait(false);
if (deletedBlockId == 0)
throw new Exception($"Missing block for blocklisthash: {blkeyfinal}");
else
{
var rc = await c6
.SetParameterValue("@DeletedBlockId", deletedBlockId)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
if (rc != 2)
throw new Exception($"Unexpected update count: {rc}");
}
}
// Add to table
await c3
.SetParameterValue("@BlocksetId", blocksetid)
.SetParameterValue("@Index", ix)
.SetParameterValue("@Hash", blkeyfinal)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
}
}
}
itemswithnoblocklisthash = await cmd
.ExecuteScalarInt64Async(countsql, 0, token)
.ConfigureAwait(false);
if (itemswithnoblocklisthash != 0)
throw new Interface.UserInformationException($"Failed to repair, after repair {itemswithnoblocklisthash} blocklisthashes were missing", "MissingBlocklistHashesRepairFailed");
Logging.Log.WriteInformationMessage(LOGTAG, "MissingBlocklisthashesRepaired", "Missing blocklisthashes repaired succesfully");
await m_rtr.CommitAsync(token: token).ConfigureAwait(false);
}
}
/// <summary>
/// Fixes duplicate blocklist hashes in the database.
/// </summary>
/// <param name="blocksize">The size of each block in bytes.</param>
/// <param name="hashsize">The size of each hash in bytes.</param>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>A task that when completed indicates that the repair has been attempted.</returns>
public async Task FixDuplicateBlocklistHashes(long blocksize, long hashsize, CancellationToken token)
{
await using var cmd = m_connection.CreateCommand(m_rtr.Transaction);
var dup_sql = @"
SELECT *
FROM (
SELECT
""BlocksetID"",
""Index"",
COUNT(*) AS ""EC""
FROM ""BlocklistHash""
GROUP BY
""BlocksetID"",
""Index""
)
WHERE ""EC"" > 1
";
var sql_count = @$"
SELECT COUNT(*)
FROM ({dup_sql})
";
var x = await cmd.ExecuteScalarInt64Async(sql_count, 0, token)
.ConfigureAwait(false);
if (x > 0)
{
Logging.Log.WriteInformationMessage(LOGTAG, "DuplicateBlocklistHashes", "Found duplicate blocklisthash entries, repairing");
var unique_count = await cmd.ExecuteScalarInt64Async(@"
SELECT COUNT(*)
FROM (
SELECT DISTINCT
""BlocksetID"",
""Index""
FROM ""BlocklistHash""
)
", 0, token)
.ConfigureAwait(false);
await using (var c2 = m_connection.CreateCommand(m_rtr.Transaction))
{
c2.SetCommandAndParameters(@"
DELETE FROM ""BlocklistHash""
WHERE rowid IN (
SELECT rowid
FROM ""BlocklistHash""
WHERE
""BlocksetID"" = @BlocksetId
AND ""Index"" = @Index
LIMIT @Limit
)
");
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(dup_sql, token).ConfigureAwait(false))
{
var expected = rd.GetInt32(2) - 1;
var actual = await c2
.SetParameterValue("@BlocksetId", rd.GetValue(0))
.SetParameterValue("@Index", rd.GetValue(1))
.SetParameterValue("@Limit", expected)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
if (actual != expected)
throw new Exception($"Unexpected number of results after fix, got: {actual}, expected: {expected}");
}
}
x = await cmd.ExecuteScalarInt64Async(sql_count, token)
.ConfigureAwait(false);
if (x > 1)
throw new Exception("Repair failed, there are still duplicate file entries!");
var real_count = await cmd.ExecuteScalarInt64Async(@"
SELECT Count(*)
FROM ""BlocklistHash""
", 0, token)
.ConfigureAwait(false);
if (real_count != unique_count)
throw new Interface.UserInformationException($"Failed to repair, result should have been {unique_count} blocklist hashes, but result was {real_count} blocklist hashes", "DuplicateBlocklistHashesRepairFailed");
try
{
await VerifyConsistency(blocksize, hashsize, true, token)
.ConfigureAwait(false);
}
catch (Exception ex)
{
throw new Interface.UserInformationException("Repaired blocklisthashes, but the database was broken afterwards, rolled back changes", "DuplicateBlocklistHashesRepairFailed", ex);
}
Logging.Log.WriteInformationMessage(LOGTAG, "DuplicateBlocklistHashesRepaired", "Duplicate blocklisthashes repaired succesfully");
await m_rtr.CommitAsync(token: token).ConfigureAwait(false);
}
}
/// <summary>
/// Checks if all blocks in the specified volume are present in the database.
/// </summary>
/// <param name="filename">The name of the volume to check.</param>
/// <param name="blocks">A collection of blocks to check, represented as key-value pairs where the key is the block hash and the value is the block size.</param>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <exception cref="Exception">Thrown if not all blocks are found in the specified volume.</exception>
/// <returns>A task that when awaited indicates the completion of the check.</returns>
public async Task CheckAllBlocksAreInVolume(string filename, IEnumerable<KeyValuePair<string, long>> blocks, CancellationToken token)
{
await using var cmd = m_connection.CreateCommand(m_rtr.Transaction);
var tablename = $"ProbeBlocks-{Library.Utility.Utility.GetHexGuid()}";
try
{
await cmd.ExecuteNonQueryAsync($@"
CREATE TEMPORARY TABLE ""{tablename}"" (
""Hash"" TEXT NOT NULL,
""Size"" INTEGER NOT NULL
)
", token)
.ConfigureAwait(false);
cmd.SetCommandAndParameters($@"
INSERT INTO ""{tablename}"" (
""Hash"",
""Size""
)
VALUES (
@Hash,
@Size
)
");
foreach (var kp in blocks)
{
await cmd
.SetParameterValue("@Hash", kp.Key)
.SetParameterValue("@Size", kp.Value)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
}
var id = await cmd.SetCommandAndParameters(@"
SELECT ""ID""
FROM ""RemoteVolume""
WHERE ""Name"" = @Name
")
.SetParameterValue("@Name", filename)
.ExecuteScalarInt64Async(-1, token)
.ConfigureAwait(false);
var aliens = await cmd.SetCommandAndParameters($@"
SELECT COUNT(*)
FROM (
SELECT ""A"".""VolumeID""
FROM ""{tablename}"" ""B""
LEFT OUTER JOIN ""Block"" ""A""
ON ""A"".""Hash"" = ""B"".""Hash""
AND ""A"".""Size"" = ""B"".""Size""
)
WHERE ""VolumeID"" != @VolumeId
")
.SetParameterValue("@VolumeId", id)
.ExecuteScalarInt64Async(0, token)
.ConfigureAwait(false);
if (aliens != 0)
throw new Exception($"Not all blocks were found in {filename}");
}
finally
{
await cmd.ExecuteNonQueryAsync($@"DROP TABLE IF EXISTS ""{tablename}"" ", token)
.ConfigureAwait(false);
}
}
/// <summary>
/// Checks if the provided blocklist matches the expected entries in the database for a given block hash and size.
/// </summary>
/// <param name="hash">The hash of the blocklist to check.</param>
/// <param name="length">The size of the blocklist in bytes.</param>
/// <param name="blocklist">The expected blocklist entries to compare against.</param>
/// <param name="blocksize">The size of each block in bytes.</param>
/// <param name="blockhashlength">The length of each block hash in bytes.</param>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>A task that when awaited indicates the completion of the check.</returns>
/// <exception cref="Exception">Thrown if the blocklist does not match the expected entries.</exception>
public async Task CheckBlocklistCorrect(string hash, long length, IEnumerable<string> blocklist, long blocksize, long blockhashlength, CancellationToken token)
{
await using var cmd = m_connection.CreateCommand(m_rtr.Transaction);
var str_blocksize = Library.Utility.Utility.FormatInvariantValue(blocksize);
var str_blockhashlength = Library.Utility.Utility.FormatInvariantValue(blockhashlength);
var query = $@"
SELECT
""C"".""Hash"",
""C"".""Size""
FROM
""BlocksetEntry"" ""A"",
(
SELECT
""Y"".""BlocksetID"",
""Y"".""Hash"" AS ""BlocklistHash"",
""Y"".""Index"" AS ""BlocklistHashIndex"",
""Z"".""Size"" AS ""BlocklistSize"",
""Z"".""ID"" AS ""BlocklistHashBlockID""
FROM
""BlocklistHash"" ""Y"",
""Block"" ""Z""
WHERE
""Y"".""Hash"" = ""Z"".""Hash""
AND ""Y"".""Hash"" = @Hash
AND ""Z"".""Size"" = @Size
LIMIT 1
) ""B"",
""Block"" ""C""
WHERE
""A"".""BlocksetID"" = ""B"".""BlocksetID""
AND ""A"".""BlockID"" = ""C"".""ID""
AND ""A"".""Index"" >= ""B"".""BlocklistHashIndex"" * ({str_blocksize} / {str_blockhashlength})
AND ""A"".""Index"" < (""B"".""BlocklistHashIndex"" + 1) * ({str_blocksize} / {str_blockhashlength})
ORDER BY ""A"".""Index""
";
using var en = blocklist.GetEnumerator();
cmd.SetCommandAndParameters(query)
.SetParameterValue("@Hash", hash)
.SetParameterValue("@Size", length);
await foreach (var r in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
{
if (!en.MoveNext())
throw new Exception($"Too few entries in source blocklist with hash {hash}");
if (en.Current != r.ConvertValueToString(0))
throw new Exception($"Mismatch in blocklist with hash {hash}");
}
if (en.MoveNext())
throw new Exception($"Too many source blocklist entries in {hash}");
}
/// <summary>
/// Checks if there are any missing local filesets in the database.
/// </summary>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>An asynchronous enumerable of missing local fileset names.</returns>
public async IAsyncEnumerable<string> MissingLocalFilesets([EnumeratorCancellation] CancellationToken token)
{
await using var cmd = m_connection.CreateCommand(@"
SELECT ""Name""
FROM ""RemoteVolume""
WHERE
""Type"" = @Type
AND ""State"" NOT IN (@States)
AND ""ID"" NOT IN (
SELECT ""VolumeID""
FROM ""Fileset""
)
")
.SetTransaction(m_rtr)
.SetParameterValue("@Type", RemoteVolumeType.Files.ToString())
.ExpandInClauseParameterMssqlite("@States", [
RemoteVolumeState.Deleting.ToString(),
RemoteVolumeState.Deleted.ToString()
]);
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
yield return rd.ConvertValueToString(0) ?? "";
}
/// <summary>
/// Checks if there are any missing remote filesets in the database.
/// </summary>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>An asynchronous enumerable of tuples containing the fileset ID, timestamp, and whether it is a full backup.</returns>
public async IAsyncEnumerable<(long FilesetID, DateTime Timestamp, bool IsFull)> MissingRemoteFilesets([EnumeratorCancellation] CancellationToken token)
{
await using var cmd = m_connection.CreateCommand(@"
SELECT
""ID"",
""Timestamp"",
""IsFullBackup""
FROM ""Fileset""
WHERE ""VolumeID"" NOT IN (
SELECT ""ID""
FROM ""RemoteVolume""
WHERE
""Type"" = @Type
AND ""State"" NOT IN (@States)
)
")
.SetTransaction(m_rtr)
.SetParameterValue("@Type", RemoteVolumeType.Files.ToString())
.ExpandInClauseParameterMssqlite("@States", [
RemoteVolumeState.Deleting.ToString(),
RemoteVolumeState.Deleted.ToString()
]);
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
yield return (
rd.ConvertValueToInt64(0),
ParseFromEpochSeconds(rd.ConvertValueToInt64(1)),
rd.ConvertValueToInt64(2) == BackupType.FULL_BACKUP
);
}
/// <summary>
/// Checks if there are any empty index files in the database.
/// </summary>
/// <param name="token">A cancellation token to cancel the operation.</param>
/// <returns>An asynchronous enumerable of remote volumes that are empty index files.</returns>
public async IAsyncEnumerable<IRemoteVolume> EmptyIndexFiles([EnumeratorCancellation] CancellationToken token)
{
await using var cmd = m_connection.CreateCommand(@"
SELECT
""Name"",
""Hash"",
""Size""
FROM ""RemoteVolume""
WHERE
""Type"" = @Type
AND ""State"" IN (@States)
AND ""ID"" NOT IN (
SELECT ""IndexVolumeId""
FROM ""IndexBlockLink""
)
")
.SetTransaction(m_rtr)
.SetParameterValue("@Type", RemoteVolumeType.Index.ToString())
.ExpandInClauseParameterMssqlite("@States", [
RemoteVolumeState.Uploaded.ToString(),
RemoteVolumeState.Verified.ToString()
]);
await foreach (var rd in cmd.ExecuteReaderEnumerableAsync(token).ConfigureAwait(false))
yield return new RemoteVolume(
rd.ConvertValueToString(0) ?? "",
rd.ConvertValueToString(1) ?? "",
rd.ConvertValueToInt64(2)
);
}
public async Task FixEmptyMetadatasets(Options options, CancellationToken token)
{
using var cmd = m_connection.CreateCommand(@"
SELECT COUNT(*)
FROM ""Metadataset""
JOIN ""Blockset""
ON ""Metadataset"".""BlocksetID"" = ""Blockset"".""ID""
WHERE ""Blockset"".""Length"" = 0
")
.SetTransaction(m_rtr);
var emptyMetaCount = await cmd.ExecuteScalarInt64Async(0, token)
.ConfigureAwait(false);
if (emptyMetaCount <= 0)
return;
Logging.Log.WriteInformationMessage(LOGTAG, "ZeroLengthMetadata", "Found {0} zero-length metadata entries", emptyMetaCount);
// Create replacement metadata
var emptyMeta = Utility.WrapMetadata(new Dictionary<string, string>(), options);
var emptyBlocksetId = await GetEmptyMetadataBlocksetId(Array.Empty<long>(), emptyMeta.FileHash, emptyMeta.Blob.Length, token)
.ConfigureAwait(false);
if (emptyBlocksetId < 0)
throw new Interface.UserInformationException(
"Failed to locate an empty metadata blockset to replace missing metadata. Set the option --disable-replace-missing-metadata=true to ignore this and drop files with missing metadata.",
"FailedToLocateEmptyMetadataBlockset");
// Step 1: Create temp table with Metadataset IDs referencing empty blocksets (excluding the one to keep)
var tablename = $"FixMetadatasets-{Library.Utility.Utility.GetHexGuid()}";
try
{
await cmd.SetCommandAndParameters(@$"
CREATE TEMP TABLE ""{tablename}"" AS
SELECT
""m"".""ID"" AS ""MetadataID"",
""m"".""BlocksetID""
FROM ""Metadataset"" ""m""
JOIN ""Blockset"" ""b""
ON ""m"".""BlocksetID"" = ""b"".""ID""
WHERE
""b"".""Length"" = 0
AND ""m"".""BlocksetID"" != @KeepBlockset
")
.SetParameterValue("@KeepBlockset", emptyBlocksetId)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
// Step 2: Update FileLookup to use a valid metadata ID
await cmd.SetCommandAndParameters(@$"
UPDATE ""FileLookup""
SET ""MetadataID"" = (
SELECT ""ID""
FROM ""Metadataset""
WHERE ""BlocksetID"" = @KeepBlockset
LIMIT 1
)
WHERE ""MetadataID"" IN (
SELECT ""MetadataID""
FROM ""{tablename}""
)
")
.SetParameterValue("@KeepBlockset", emptyBlocksetId)
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
// Step 3: Delete obsolete Metadataset entries
await cmd.SetCommandAndParameters(@$"
DELETE FROM ""Metadataset""
WHERE ""ID"" IN (
SELECT ""MetadataID""
FROM ""{tablename}""
)
")
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
// Step 4: Delete orphaned blocksets (affected only)
await cmd.SetCommandAndParameters(@$"
DELETE FROM ""Blockset""
WHERE
""ID"" IN (
SELECT ""BlocksetID""
FROM ""{tablename}""
)
AND NOT EXISTS (
SELECT 1
FROM ""Metadataset""
WHERE ""BlocksetID"" = ""Blockset"".""ID""
)
AND NOT EXISTS (
SELECT 1
FROM ""File""
WHERE ""BlocksetID"" = ""Blockset"".""ID""
)
")
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
// Step 5: Confirm all broken metadata entries are resolved
cmd.SetCommandAndParameters(@"
SELECT COUNT(*)
FROM ""Metadataset""
JOIN ""Blockset""
ON ""Metadataset"".""BlocksetID"" = ""Blockset"".""ID""
WHERE ""Blockset"".""Length"" = 0
");
var remaining = await cmd.ExecuteScalarInt64Async(0, token);
if (remaining > 0)
throw new Interface.UserInformationException(
"Some zero-length metadata entries could not be repaired.",
"MetadataRepairFailed");
Logging.Log.WriteInformationMessage(LOGTAG, "ZeroLengthMetadataRepaired", "Zero length metadata entries repaired successfully");
await m_rtr.CommitAsync(token: token)
.ConfigureAwait(false);
}
finally
{
try
{
await cmd.SetCommandAndParameters($@"DROP TABLE IF EXISTS ""{tablename}"" ")
.ExecuteNonQueryAsync(token)
.ConfigureAwait(false);
}
catch (Exception ex)
{
Logging.Log.WriteVerboseMessage(LOGTAG, "ErrorDroppingTempTable", ex, "Failed to drop temporary table {0}: {1}", tablename, ex.Message);
}
}
}
}
}
| MissingBlockList |
csharp | dotnet__aspnetcore | src/Validation/gen/Parsers/ValidationsGenerator.EndpointsParser.cs | {
"start": 596,
"end": 2117
} | partial class ____ : IIncrementalGenerator
{
internal bool FindEndpoints(SyntaxNode syntaxNode, CancellationToken cancellationToken)
{
if (syntaxNode is InvocationExpressionSyntax
&& syntaxNode.TryGetMapMethodName(out var method))
{
return method == "MapMethods" || InvocationOperationExtensions.KnownMethods.Contains(method);
}
return false;
}
internal IInvocationOperation? TransformEndpoints(GeneratorSyntaxContext context, CancellationToken cancellationToken)
{
if (context.Node is not InvocationExpressionSyntax node)
{
return null;
}
var operation = context.SemanticModel.GetOperation(node, cancellationToken);
AnalyzerDebug.Assert(operation != null, "Operation should not be null.");
return operation is IInvocationOperation invocationOperation
? invocationOperation
: null;
}
internal ImmutableArray<ValidatableType> ExtractValidatableEndpoint(IInvocationOperation? operation, CancellationToken cancellationToken)
{
AnalyzerDebug.Assert(operation != null, "Operation should not be null.");
AnalyzerDebug.Assert(operation.SemanticModel != null, "Operation should have a semantic model.");
var wellKnownTypes = WellKnownTypes.GetOrCreate(operation.SemanticModel.Compilation);
var validatableTypes = ExtractValidatableTypes(operation, wellKnownTypes);
return validatableTypes;
}
}
| ValidationsGenerator |
csharp | microsoft__semantic-kernel | dotnet/samples/Demos/ProcessFrameworkWithSignalR/src/ProcessFramework.Aspire.SignalR.ProcessOrchestrator/Steps/ProofreadDocumentationStep.cs | {
"start": 453,
"end": 634
} | public class ____ : KernelProcessStep
{
/// <summary>
/// SK Process Events emitted by <see cref="ProofReadDocumentationStep"/>
/// </summary>
| ProofReadDocumentationStep |
csharp | dotnet__aspire | src/Aspire.Dashboard/Components/Dialogs/TextVisualizerDialog.razor.cs | {
"start": 4360,
"end": 4707
} | internal sealed record ____(bool SecretsWarningAcknowledged);
private async Task UnmaskContentAsync()
{
await LocalStorage.SetUnprotectedAsync(BrowserStorageKeys.TextVisualizerDialogSettings, new TextVisualizerDialogSettings(SecretsWarningAcknowledged: true));
ShowSecretsWarning = false;
}
}
| TextVisualizerDialogSettings |
csharp | nopSolutions__nopCommerce | src/Presentation/Nop.Web.Framework/Mvc/Filters/PublishModelEventsAttribute.cs | {
"start": 3609,
"end": 6033
} | interface ____ handle this event
await _eventPublisher.ModelReceivedAsync(model, context.ModelState);
}
}
/// <summary>
/// Called asynchronously before the action, after model binding is complete.
/// </summary>
/// <param name="context">A context for action filters</param>
/// <returns>A task that represents the asynchronous operation</returns>
private async Task PublishModelPreparedEventAsync(ActionExecutingContext context)
{
ArgumentNullException.ThrowIfNull(context);
if (IgnoreFilter(context))
return;
//model prepared event
if (context.Controller is Controller controller)
await _eventPublisher.ModelPreparedAsync(controller.ViewData.Model);
}
#endregion
#region Methods
/// <summary>
/// Called asynchronously before the action, after model binding is complete.
/// </summary>
/// <param name="context">A context for action filters</param>
/// <param name="next">A delegate invoked to execute the next action filter or the action itself</param>
/// <returns>A task that represents the asynchronous operation</returns>
public async Task OnActionExecutionAsync(ActionExecutingContext context, ActionExecutionDelegate next)
{
await PublishModelReceivedEventAsync(context);
if (context.Result == null)
await next();
await PublishModelPreparedEventAsync(context);
}
/// <summary>Called asynchronously before the action result.</summary>
/// <param name="context">A context for action filters</param>
/// <param name="next">A delegate invoked to execute the next action filter or the action itself</param>
/// <returns>A task that represents the asynchronous operation</returns>
public async Task OnResultExecutionAsync(ResultExecutingContext context, ResultExecutionDelegate next)
{
ArgumentNullException.ThrowIfNull(context);
if (IgnoreFilter(context))
return;
//model prepared event
if (context.Result is JsonResult result)
await _eventPublisher.ModelPreparedAsync(result.Value);
await next();
}
#endregion
}
#endregion
} | to |
csharp | dotnet__efcore | test/EFCore.Design.Tests/Design/OperationExecutorTest.cs | {
"start": 392,
"end": 7093
} | public class ____(ITestOutputHelper testOutputHelper)
{
private static readonly char S = Path.DirectorySeparatorChar;
[ConditionalFact]
public void Ctor_validates_arguments()
{
var ex = Assert.Throws<ArgumentNullException>(() => new OperationExecutor(null!, null!));
Assert.Equal("reportHandler", ex.ParamName);
ex = Assert.Throws<ArgumentNullException>(() => new OperationExecutor(new OperationReportHandler(), null!));
Assert.Equal("args", ex.ParamName);
}
[ConditionalTheory, PlatformSkipCondition(
TestUtilities.Xunit.TestPlatform.Linux | TestUtilities.Xunit.TestPlatform.Mac,
SkipReason = "Tested negative cases and baselines are Windows-specific"), InlineData("MgOne", "MgOne"),
InlineData("Name with Spaces", "NamewithSpaces"), InlineData(" Space Space ", "SpaceSpace")]
public void AddMigration_can_scaffold_for_different_names(string migrationName, string processedMigrationName)
=> TestAddMigrationPositive(
migrationName, processedMigrationName,
"output", "output",
ProductInfo.GetVersion());
[ConditionalTheory, PlatformSkipCondition(
TestUtilities.Xunit.TestPlatform.Linux | TestUtilities.Xunit.TestPlatform.Mac,
SkipReason = "Tested negative cases and baselines are Windows-specific"), InlineData("to fix error: add column is_deleted"),
InlineData(@"A\B\C")] // Issue #24024
public void AddMigration_errors_for_bad_names(string migrationName)
=> TestAddMigrationNegative(
migrationName,
"output",
ProductInfo.GetVersion(),
typeof(OperationException),
DesignStrings.BadMigrationName(migrationName, string.Join("','", Path.GetInvalidFileNameChars())));
[ConditionalTheory, PlatformSkipCondition(
TestUtilities.Xunit.TestPlatform.Linux | TestUtilities.Xunit.TestPlatform.Mac,
SkipReason = "Tested negative cases and baselines are Windows-specific"), InlineData("output", "output"),
InlineData("Name with Spaces", "Name with Spaces"), InlineData(" Space Space", " Space Space")]
public void AddMigration_can_scaffold_for_different_output_dirs(string outputDir, string processedOutputDir)
=> TestAddMigrationPositive(
"MgTwo", "MgTwo",
outputDir, processedOutputDir,
ProductInfo.GetVersion());
[ConditionalTheory, PlatformSkipCondition(
TestUtilities.Xunit.TestPlatform.Linux | TestUtilities.Xunit.TestPlatform.Mac,
SkipReason = "Tested negative cases and baselines are Windows-specific"), InlineData("Something:Else")]
public void AddMigration_errors_for_bad_output_dirs(string outputDir)
=> TestAddMigrationNegative("MgTwo", outputDir, ProductInfo.GetVersion(), typeof(IOException), null);
[ConditionalFact]
public void AddMigration_errors_if_migration_name_is_same_as_context_name()
=> TestAddMigrationNegative(
"GnomeContext", "output", ProductInfo.GetVersion(), typeof(OperationException),
DesignStrings.ConflictingContextAndMigrationName("GnomeContext"));
private void TestAddMigrationPositive(
string migrationName,
string processedMigrationName,
string outputDir,
string processedOutputDir,
string productVersion)
{
using var tempPath = new TempDirectory();
var resultHandler = ExecuteAddMigration(tempPath, migrationName, Path.Combine(tempPath, outputDir), productVersion);
Assert.True(resultHandler.HasResult);
var files = (Hashtable)resultHandler.Result!;
Assert.Equal(3, files.Count);
var metadataFilePath = (string)files["MetadataFile"]!;
var migrationFilePath = (string)files["MigrationFile"]!;
var snapshotFilePath = (string)files["SnapshotFile"]!;
Assert.StartsWith(tempPath, metadataFilePath);
Assert.StartsWith(tempPath, migrationFilePath);
Assert.StartsWith(tempPath, snapshotFilePath);
var metadataFileName = metadataFilePath.Substring(tempPath.Path.Length + 1);
var migrationFileName = migrationFilePath.Substring(tempPath.Path.Length + 1);
var snapshotFileName = snapshotFilePath.Substring(tempPath.Path.Length + 1);
Assert.Equal(Path.Combine(processedOutputDir, $"11112233445566_{migrationName}.Designer.cs"), metadataFileName);
Assert.Equal(Path.Combine(processedOutputDir, $"11112233445566_{migrationName}.cs"), migrationFileName);
var snapshotDir = "";
foreach (var part in "My.Gnomespace.Data".Split('.'))
{
snapshotDir = Path.Combine(snapshotDir, part);
}
Assert.Equal(Path.Combine(snapshotDir, "GnomeContextModelSnapshot.cs"), snapshotFileName);
var metadataFile = File.ReadAllText(metadataFilePath);
var migrationFile = File.ReadAllText(migrationFilePath);
var snapshotFile = File.ReadAllText(snapshotFilePath);
Assert.Equal(
$$"""
// <auto-generated />
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Design;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
#nullable disable
namespace My.Gnomespace.Data
{
[DbContext(typeof(OperationExecutorTest.GnomeContext))]
[Migration("11112233445566_{{migrationName}}")]
partial class {{processedMigrationName}}
{
/// <inheritdoc />
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder.HasAnnotation("ProductVersion", "{{productVersion}}");
#pragma warning restore 612, 618
}
}
}
""", metadataFile);
Assert.Equal(
$$"""
using Microsoft.EntityFrameworkCore.Migrations;
#nullable disable
namespace My.Gnomespace.Data
{
/// <inheritdoc />
public partial class {{processedMigrationName}} : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
}
}
}
""", migrationFile);
Assert.Equal(
$$"""
// <auto-generated />
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Design;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
#nullable disable
namespace My.Gnomespace.Data
{
[DbContext(typeof(OperationExecutorTest.GnomeContext))]
| OperationExecutorTest |
csharp | grpc__grpc-dotnet | test/Grpc.AspNetCore.Server.Tests/TestObjects/TestGrpcServiceActivator.cs | {
"start": 698,
"end": 1199
} | internal class ____<TGrpcService> : IGrpcServiceActivator<TGrpcService> where TGrpcService : class, new()
{
public bool Released { get; private set; }
public GrpcActivatorHandle<TGrpcService> Create(IServiceProvider serviceProvider)
{
return new GrpcActivatorHandle<TGrpcService>(new TGrpcService(), false, null);
}
public ValueTask ReleaseAsync(GrpcActivatorHandle<TGrpcService> service)
{
Released = true;
return default;
}
} | TestGrpcServiceActivator |
csharp | MassTransit__MassTransit | src/MassTransit/JobService/JobService/Messages/StartJobCommand.cs | {
"start": 139,
"end": 637
} | public class ____ :
StartJob
{
public Guid JobId { get; set; }
public Guid AttemptId { get; set; }
public int RetryAttempt { get; set; }
public Dictionary<string, object> Job { get; set; } = null!;
public Guid JobTypeId { get; set; }
public long? LastProgressValue { get; set; }
public long? LastProgressLimit { get; set; }
public Dictionary<string, object>? JobState { get; set; }
public Dictionary<string, object>? JobProperties { get; set; }
}
| StartJobCommand |
csharp | unoplatform__uno | src/Uno.UI.RemoteControl.VS/Helpers/ILogger.cs | {
"start": 162,
"end": 341
} | internal interface ____
{
void Info(string message);
void Debug(string message);
void Warn(string message);
void Error(string message);
void Verbose(string message);
}
| ILogger |
csharp | unoplatform__uno | src/SourceGenerators/System.Xaml/System.Xaml/IRootObjectProvider.cs | {
"start": 1209,
"end": 1285
} | internal interface ____
{
object RootObject { get; }
}
}
| IRootObjectProvider |
csharp | dotnet__efcore | src/EFCore.Proxies/Proxies/Internal/ProxiesOptionsExtension.cs | {
"start": 10225,
"end": 12463
} | private sealed class ____(IDbContextOptionsExtension extension) : DbContextOptionsExtensionInfo(extension)
{
private string? _logFragment;
private new ProxiesOptionsExtension Extension
=> (ProxiesOptionsExtension)base.Extension;
public override bool IsDatabaseProvider
=> false;
public override string LogFragment
=> _logFragment ??= Extension is { UseLazyLoadingProxies: true, UseChangeTrackingProxies: true }
? "using lazy loading and change tracking proxies "
: Extension.UseLazyLoadingProxies
? "using lazy loading proxies "
: Extension.UseChangeTrackingProxies
? "using change tracking proxies "
: "";
public override int GetServiceProviderHashCode()
{
var hashCode = new HashCode();
hashCode.Add(Extension.UseLazyLoadingProxies);
hashCode.Add(Extension.IgnoreNonVirtualNavigations);
hashCode.Add(Extension.UseChangeTrackingProxies);
hashCode.Add(Extension.CheckEquality);
return hashCode.ToHashCode();
}
public override bool ShouldUseSameServiceProvider(DbContextOptionsExtensionInfo other)
=> other is ExtensionInfo otherInfo
&& Extension.UseLazyLoadingProxies == otherInfo.Extension.UseLazyLoadingProxies
&& Extension.IgnoreNonVirtualNavigations == otherInfo.Extension.IgnoreNonVirtualNavigations
&& Extension.UseChangeTrackingProxies == otherInfo.Extension.UseChangeTrackingProxies
&& Extension.CheckEquality == otherInfo.Extension.CheckEquality;
public override void PopulateDebugInfo(IDictionary<string, string> debugInfo)
{
debugInfo["Proxies:" + nameof(ProxiesExtensions.UseLazyLoadingProxies)]
= (Extension._useLazyLoadingProxies ? 541 : 0).ToString(CultureInfo.InvariantCulture);
debugInfo["Proxies:" + nameof(ProxiesExtensions.UseChangeTrackingProxies)]
= (Extension._useChangeTrackingProxies ? 541 : 0).ToString(CultureInfo.InvariantCulture);
}
}
}
| ExtensionInfo |
csharp | ServiceStack__ServiceStack | ServiceStack/src/ServiceStack.Interfaces/IService.cs | {
"start": 3391,
"end": 3459
} | public interface ____<T>
{
Task AnyAsync(T request);
}
| IAnyVoidAsync |
csharp | nopSolutions__nopCommerce | src/Plugins/Nop.Plugin.Misc.WebApi.Frontend/Controllers/WebApiFrontendController.cs | {
"start": 290,
"end": 866
} | public class ____ : BasePluginController
{
#region Fields
protected readonly IPermissionService _permissionService;
#endregion
#region Ctor
public WebApiFrontendController(IPermissionService permissionService)
{
_permissionService = permissionService;
}
#endregion
#region Methods
[CheckPermission(StandardPermission.Configuration.MANAGE_PLUGINS)]
public virtual IActionResult Configure()
{
return View("~/Plugins/Misc.WebApi.Frontend/Views/Configure.cshtml");
}
#endregion
} | WebApiFrontendController |
csharp | dotnet__aspire | src/Aspire.Hosting/Backchannel/BackchannelDataTypes.cs | {
"start": 7021,
"end": 7416
} | internal class ____
{
public required string Text { get; init; }
public bool IsErrorMessage { get; init; }
public int? LineNumber { get; init; }
/// <summary>
/// Additional info about type of the message.
/// Should be used for controlling the display style.
/// </summary>
public string? Type { get; init; }
public int? ExitCode { get; init; }
}
| CommandOutput |
csharp | unoplatform__uno | src/Uno.UI/DirectUI/JoltCollections.h.cs | {
"start": 8648,
"end": 9368
} | class ____<DependencyObject>:
// public ViewBase<DependencyObject>
// {
// public:
// IFACEMETHOD(IndexOf)(
// DependencyObject value,
// out uint index,
// out bool found) override;
// };
// IUntypedVector: public DependencyObject
// {
// IFACEMETHOD(UntypedAppend)( DependencyObject pItem);
// IFACEMETHOD(UntypedGetSize)(out uint * pSize);
// IFACEMETHOD(UntypedGetAt)( uint index, out DependencyObject ppItem);
// IFACEMETHOD(UntypedInsertAt)( uint index, DependencyObject pItem);
// IFACEMETHOD(UntypedRemoveAt)( uint index);
// IFACEMETHOD(UntypedClear)();
// };
// bool UntypedTryGetIndexOf( IUntypedVector vector, DependencyObject item, out uint * index);
// template < | View |
csharp | nopSolutions__nopCommerce | src/Presentation/Nop.Web/Areas/Admin/Models/Orders/CheckoutAttributeValueModel.cs | {
"start": 1697,
"end": 1956
} | public partial record ____ : ILocalizedLocaleModel
{
public int LanguageId { get; set; }
[NopResourceDisplayName("Admin.Catalog.Attributes.CheckoutAttributes.Values.Fields.Name")]
public string Name { get; set; }
} | CheckoutAttributeValueLocalizedModel |
csharp | simplcommerce__SimplCommerce | src/Modules/SimplCommerce.Module.Inventory/Areas/Inventory/ViewModels/StockVm.cs | {
"start": 75,
"end": 247
} | public class ____
{
public long ProductId { get; set; }
public int AdjustedQuantity { get; set; }
public string Note { get; set; }
}
}
| StockVm |
csharp | MaterialDesignInXAML__MaterialDesignInXamlToolkit | src/MaterialDesignThemes.Wpf/HintProxyFabric.TextBox.cs | {
"start": 88,
"end": 1959
} | private sealed class ____ : IHintProxy
{
private readonly TextBox _textBox;
public bool IsLoaded => _textBox.IsLoaded;
public bool IsVisible => _textBox.IsVisible;
public bool IsEmpty() => string.IsNullOrEmpty(_textBox.Text);
public bool IsFocused() => _textBox.IsKeyboardFocusWithin;
public event EventHandler? ContentChanged;
public event EventHandler? IsVisibleChanged;
public event EventHandler? Loaded;
public event EventHandler? FocusedChanged;
public TextBoxHintProxy(TextBox textBox)
{
_textBox = textBox ?? throw new ArgumentNullException(nameof(textBox));
_textBox.TextChanged += TextBoxTextChanged;
_textBox.Loaded += TextBoxLoaded;
_textBox.IsVisibleChanged += TextBoxIsVisibleChanged;
_textBox.IsKeyboardFocusWithinChanged += TextBoxIsKeyboardFocusedChanged;
}
private void TextBoxIsKeyboardFocusedChanged(object sender, DependencyPropertyChangedEventArgs e)
=> FocusedChanged?.Invoke(sender, EventArgs.Empty);
private void TextBoxIsVisibleChanged(object sender, DependencyPropertyChangedEventArgs e)
=> IsVisibleChanged?.Invoke(sender, EventArgs.Empty);
private void TextBoxLoaded(object sender, RoutedEventArgs e)
=> Loaded?.Invoke(sender, EventArgs.Empty);
private void TextBoxTextChanged(object sender, TextChangedEventArgs e)
=> ContentChanged?.Invoke(sender, EventArgs.Empty);
public void Dispose()
{
_textBox.TextChanged -= TextBoxTextChanged;
_textBox.Loaded -= TextBoxLoaded;
_textBox.IsVisibleChanged -= TextBoxIsVisibleChanged;
_textBox.IsKeyboardFocusWithinChanged -= TextBoxIsKeyboardFocusedChanged;
}
}
}
| TextBoxHintProxy |
csharp | ServiceStack__ServiceStack | ServiceStack.Blazor/tests/ServiceStack.Blazor.Server.Tests/ServiceModel/Bookings.cs | {
"start": 1994,
"end": 2888
} | public class ____ : ICreateDb<Booking>, IReturn<IdResponse>
{
[Description("Name this Booking is for"), ValidateNotEmpty]
public string Name { get; set; }
public RoomType RoomType { get; set; }
[ValidateGreaterThan(0)]
public int RoomNumber { get; set; }
[ValidateGreaterThan(0)]
public decimal Cost { get; set; }
[Required]
public DateTime BookingStartDate { get; set; }
public DateTime? BookingEndDate { get; set; }
[Input(Type = "textarea")]
public string? Notes { get; set; }
public string? CouponId { get; set; }
}
[Tag("bookings"), Description("Update an existing Booking")]
[Notes("Find out how to quickly create a <a class='svg-external' target='_blank' href='https://youtu.be/rSFiikDjGos'>C# Bookings App from Scratch</a>")]
[Route("/booking/{Id}", "PATCH")]
[ValidateHasRole("Employee")]
[AutoApply(Behavior.AuditModify)]
| CreateBooking |
csharp | dotnet__maui | src/Controls/src/Core/Page/NavigatedFromEventArgs.cs | {
"start": 71,
"end": 391
} | public sealed class ____ : EventArgs
{
public NavigationType NavigationType { get; }
public Page DestinationPage { get; }
internal NavigatedFromEventArgs(Page destinationPage, NavigationType navigationType)
{
DestinationPage = destinationPage;
NavigationType = navigationType;
}
}
} | NavigatedFromEventArgs |
csharp | HangfireIO__Hangfire | src/Hangfire.Core/ITimeZoneResolver.cs | {
"start": 779,
"end": 1011
} | public sealed class ____ : ITimeZoneResolver
{
public TimeZoneInfo GetTimeZoneById(string timeZoneId)
{
return TimeZoneInfo.FindSystemTimeZoneById(timeZoneId);
}
}
| DefaultTimeZoneResolver |
csharp | moq__moq4 | src/Moq.Tests/Regressions/IssueReportsFixture.cs | {
"start": 161225,
"end": 161386
} | public interface ____
{
void Do<T1, T2>() where T2 : T1;
}
}
#endregion
#region #251
| ITest |
csharp | graphql-dotnet__graphql-dotnet | src/GraphQL.Analyzers.Tests/AwaitableResolverAnalyzerTests.cs | {
"start": 13334,
"end": 14948
} | public class ____ : ObjectGraphType
{
public MyGraphType()
{
Field<StringGraphType>("Test").ResolveAsync(async ctx => await {{resolver}});
}
private string Resolve(IResolveFieldContext<object> ctx) => throw new NotImplementedException();
private Task<string> ResolveAsync(IResolveFieldContext<object> ctx) => throw new NotImplementedException();
private ValueTask<string> ResolveValueAsync(IResolveFieldContext<object> ctx) => throw new NotImplementedException();
}
""";
var expected = report
? new[] { VerifyCS.Diagnostic().WithLocation(0).WithArguments(Constants.MethodNames.ResolveAsync) }
: DiagnosticResult.EmptyDiagnosticResults;
string expectedFix = report ? fix : source;
await VerifyCS.VerifyCodeFixAsync(source, expected, expectedFix);
}
[Theory]
[InlineData("\"text\"", false)]
[InlineData("Resolve(ctx)", false)]
[InlineData("Task.FromResult(\"text\")", true)]
[InlineData("ValueTask.FromResult(\"text\")", true)]
[InlineData("ResolveAsync(ctx)", true)]
[InlineData("ResolveValueAsync(ctx)", true)]
public async Task SyncResolve_AwaitableBlockResolver_GQL009_Fixed(string resolver, bool report)
{
string source =
$$"""
using System;
using System.Threading.Tasks;
using GraphQL;
using GraphQL.Types;
namespace Sample.Server;
| MyGraphType |
csharp | CommunityToolkit__dotnet | src/CommunityToolkit.Common/Helpers/ObjectStorage/ISettingsStorageHelper.cs | {
"start": 266,
"end": 419
} | interface ____ to store data using key value pairs.
/// </summary>
/// <typeparam name="TKey">The type of keys to use for accessing values.</typeparam>
| used |
csharp | DapperLib__Dapper | tests/Dapper.Tests/TypeHandlerTests.cs | {
"start": 24365,
"end": 24543
} | private class ____
{
public int X { get; set; }
public int Y { get; set; }
public int Z { get; set; }
}
| ResultsChangeType |
csharp | dotnetcore__FreeSql | FreeSql/Internal/Model/DbToCs.cs | {
"start": 108,
"end": 1035
} | public class ____
{
public string csConvert { get; }
public string csParse { get; }
public string csStringify { get; }
public string csType { get; }
public Type csTypeInfo { get; }
public Type csNullableTypeInfo { get; }
public string csTypeValue { get; }
public string dataReaderMethod { get; }
public DbToCs(string csConvert, string csParse, string csStringify, string csType, Type csTypeInfo, Type csNullableTypeInfo, string csTypeValue, string dataReaderMethod)
{
this.csConvert = csConvert;
this.csParse = csParse;
this.csStringify = csStringify;
this.csType = csType;
this.csTypeInfo = csTypeInfo;
this.csNullableTypeInfo = csNullableTypeInfo;
this.csTypeValue = csTypeValue;
this.dataReaderMethod = dataReaderMethod;
}
}
| DbToCs |
csharp | dotnet__efcore | test/EFCore.SqlServer.FunctionalTests/MonsterFixupChangedChangingSqlServerTest.cs | {
"start": 480,
"end": 2126
} | public class ____ : MonsterFixupChangedChangingFixtureBase
{
protected override ITestStoreFactory TestStoreFactory
=> SqlServerTestStoreFactory.Instance;
protected override void OnModelCreating<TMessage, TProduct, TProductPhoto, TProductReview, TComputerDetail, TDimensions>(
ModelBuilder builder)
{
base.OnModelCreating<TMessage, TProduct, TProductPhoto, TProductReview, TComputerDetail, TDimensions>(builder);
builder.Entity<TMessage>().Property(e => e.MessageId).UseIdentityColumn();
builder.Entity<TProduct>()
.OwnsOne(
c => (TDimensions)c.Dimensions, db =>
{
db.Property(d => d.Depth).HasColumnType("decimal(18,2)");
db.Property(d => d.Width).HasColumnType("decimal(18,2)");
db.Property(d => d.Height).HasColumnType("decimal(18,2)");
});
builder.Entity<TProductPhoto>().Property(e => e.PhotoId).UseIdentityColumn();
builder.Entity<TProductReview>().Property(e => e.ReviewId).UseIdentityColumn();
builder.Entity<TComputerDetail>()
.OwnsOne(
c => (TDimensions)c.Dimensions, db =>
{
db.Property(d => d.Depth).HasColumnType("decimal(18,2)");
db.Property(d => d.Width).HasColumnType("decimal(18,2)");
db.Property(d => d.Height).HasColumnType("decimal(18,2)");
});
}
}
}
| MonsterFixupChangedChangingSqlServerFixture |
csharp | bitwarden__server | test/Core.Test/AdminConsole/OrganizationFeatures/OrganizationUsers/DeleteClaimedAccountvNext/DeleteClaimedOrganizationUserAccountValidatorTests.cs | {
"start": 531,
"end": 17591
} | public class ____
{
[Theory]
[BitAutoData]
public async Task ValidateAsync_WithValidSingleRequest_ReturnsValidResult(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
organizationUser.OrganizationId = organizationId;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = true
};
SetupMocks(sutProvider, organizationId, user.Id);
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsValid);
Assert.Equal(request, resultsList[0].Request);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_WithMultipleValidRequests_ReturnsAllValidResults(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user1,
User user2,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser] OrganizationUser orgUser1,
[OrganizationUser(OrganizationUserStatusType.Accepted)] OrganizationUser orgUser2)
{
orgUser1.UserId = user1.Id;
orgUser1.OrganizationId = organizationId;
orgUser2.UserId = user2.Id;
orgUser2.OrganizationId = organizationId;
var request1 = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = orgUser1.Id,
OrganizationUser = orgUser1,
User = user1,
DeletingUserId = deletingUserId,
IsClaimed = true
};
var request2 = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = orgUser2.Id,
OrganizationUser = orgUser2,
User = user2,
DeletingUserId = deletingUserId,
IsClaimed = true
};
SetupMocks(sutProvider, organizationId, user1.Id);
SetupMocks(sutProvider, organizationId, user2.Id);
var results = await sutProvider.Sut.ValidateAsync([request1, request2]);
var resultsList = results.ToList();
Assert.Equal(2, resultsList.Count);
Assert.All(resultsList, result => Assert.True(result.IsValid));
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_WithNullUser_ReturnsUserNotFoundError(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser] OrganizationUser organizationUser)
{
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = null,
DeletingUserId = deletingUserId,
IsClaimed = true
};
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsError);
Assert.IsType<UserNotFoundError>(resultsList[0].AsError);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_WithNullOrganizationUser_ReturnsUserNotFoundError(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId)
{
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = Guid.NewGuid(),
OrganizationUser = null,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = true
};
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsError);
Assert.IsType<UserNotFoundError>(resultsList[0].AsError);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_WithInvitedUser_ReturnsInvalidUserStatusError(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser(OrganizationUserStatusType.Invited)] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = true
};
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsError);
Assert.IsType<InvalidUserStatusError>(resultsList[0].AsError);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_WhenDeletingYourself_ReturnsCannotDeleteYourselfError(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
[OrganizationUser] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = user.Id,
IsClaimed = true
};
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsError);
Assert.IsType<CannotDeleteYourselfError>(resultsList[0].AsError);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_WithUnclaimedUser_ReturnsUserNotClaimedError(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = false
};
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsError);
Assert.IsType<UserNotClaimedError>(resultsList[0].AsError);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_DeletingOwnerWhenCurrentUserIsNotOwner_ReturnsCannotDeleteOwnersError(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser(OrganizationUserStatusType.Confirmed, OrganizationUserType.Owner)] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = true
};
SetupMocks(sutProvider, organizationId, user.Id, OrganizationUserType.Admin);
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsError);
Assert.IsType<CannotDeleteOwnersError>(resultsList[0].AsError);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_DeletingOwnerWhenCurrentUserIsOwner_ReturnsValidResult(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser(OrganizationUserStatusType.Confirmed, OrganizationUserType.Owner)] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = true
};
SetupMocks(sutProvider, organizationId, user.Id);
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsValid);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_WithSoleOwnerOfOrganization_ReturnsSoleOwnerError(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = true
};
SetupMocks(sutProvider, organizationId, user.Id);
sutProvider.GetDependency<IOrganizationUserRepository>()
.GetCountByOnlyOwnerAsync(user.Id)
.Returns(1);
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsError);
Assert.IsType<SoleOwnerError>(resultsList[0].AsError);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_WithSoleProviderOwner_ReturnsSoleProviderError(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = true
};
SetupMocks(sutProvider, organizationId, user.Id);
sutProvider.GetDependency<IProviderUserRepository>()
.GetCountByOnlyOwnerAsync(user.Id)
.Returns(1);
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsError);
Assert.IsType<SoleProviderError>(resultsList[0].AsError);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_CustomUserDeletingAdmin_ReturnsCannotDeleteAdminsError(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser(OrganizationUserStatusType.Confirmed, OrganizationUserType.Admin)] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = true
};
SetupMocks(sutProvider, organizationId, user.Id, OrganizationUserType.Custom);
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsError);
Assert.IsType<CannotDeleteAdminsError>(resultsList[0].AsError);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_AdminDeletingAdmin_ReturnsValidResult(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User user,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser(OrganizationUserStatusType.Confirmed, OrganizationUserType.Admin)] OrganizationUser organizationUser)
{
organizationUser.UserId = user.Id;
var request = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = organizationUser.Id,
OrganizationUser = organizationUser,
User = user,
DeletingUserId = deletingUserId,
IsClaimed = true
};
SetupMocks(sutProvider, organizationId, user.Id, OrganizationUserType.Admin);
var results = await sutProvider.Sut.ValidateAsync([request]);
var resultsList = results.ToList();
Assert.Single(resultsList);
Assert.True(resultsList[0].IsValid);
}
[Theory]
[BitAutoData]
public async Task ValidateAsync_WithMixedValidAndInvalidRequests_ReturnsCorrespondingResults(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
User validUser,
User invalidUser,
Guid organizationId,
Guid deletingUserId,
[OrganizationUser] OrganizationUser validOrgUser,
[OrganizationUser(OrganizationUserStatusType.Invited)] OrganizationUser invalidOrgUser)
{
validOrgUser.UserId = validUser.Id;
invalidOrgUser.UserId = invalidUser.Id;
var validRequest = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = validOrgUser.Id,
OrganizationUser = validOrgUser,
User = validUser,
DeletingUserId = deletingUserId,
IsClaimed = true
};
var invalidRequest = new DeleteUserValidationRequest
{
OrganizationId = organizationId,
OrganizationUserId = invalidOrgUser.Id,
OrganizationUser = invalidOrgUser,
User = invalidUser,
DeletingUserId = deletingUserId,
IsClaimed = true
};
SetupMocks(sutProvider, organizationId, validUser.Id);
var results = await sutProvider.Sut.ValidateAsync([validRequest, invalidRequest]);
var resultsList = results.ToList();
Assert.Equal(2, resultsList.Count);
var validResult = resultsList.First(r => r.Request == validRequest);
var invalidResult = resultsList.First(r => r.Request == invalidRequest);
Assert.True(validResult.IsValid);
Assert.True(invalidResult.IsError);
Assert.IsType<InvalidUserStatusError>(invalidResult.AsError);
}
private static void SetupMocks(
SutProvider<DeleteClaimedOrganizationUserAccountValidator> sutProvider,
Guid organizationId,
Guid userId,
OrganizationUserType currentUserType = OrganizationUserType.Owner)
{
sutProvider.GetDependency<ICurrentContext>()
.OrganizationOwner(organizationId)
.Returns(currentUserType == OrganizationUserType.Owner);
sutProvider.GetDependency<ICurrentContext>()
.OrganizationAdmin(organizationId)
.Returns(currentUserType is OrganizationUserType.Owner or OrganizationUserType.Admin);
sutProvider.GetDependency<ICurrentContext>()
.OrganizationCustom(organizationId)
.Returns(currentUserType is OrganizationUserType.Custom);
sutProvider.GetDependency<IOrganizationUserRepository>()
.GetCountByOnlyOwnerAsync(userId)
.Returns(0);
sutProvider.GetDependency<IProviderUserRepository>()
.GetCountByOnlyOwnerAsync(userId)
.Returns(0);
}
}
| DeleteClaimedOrganizationUserAccountValidatorTests |
csharp | EduardoPires__EquinoxProject | src/Equinox.Infra.Data/Context/EquinoxContext.cs | {
"start": 330,
"end": 2398
} | public sealed class ____ : DbContext, IUnitOfWork
{
private readonly IMediatorHandler _mediatorHandler;
public EquinoxContext(DbContextOptions<EquinoxContext> options, IMediatorHandler mediatorHandler) : base(options)
{
_mediatorHandler = mediatorHandler;
ChangeTracker.QueryTrackingBehavior = QueryTrackingBehavior.NoTracking;
ChangeTracker.AutoDetectChangesEnabled = false;
}
public DbSet<Customer> Customers { get; set; }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.Ignore<ValidationResult>();
modelBuilder.Ignore<Event>();
foreach (var property in modelBuilder.Model.GetEntityTypes().SelectMany(
e => e.GetProperties().Where(p => p.ClrType == typeof(string))))
property.SetColumnType("varchar(100)");
modelBuilder.ApplyConfiguration(new CustomerMap());
base.OnModelCreating(modelBuilder);
}
public async Task<bool> Commit()
{
// Dispatch Domain Events collection.
// Choices:
// A) Right BEFORE committing data (EF SaveChanges) into the DB will make a single transaction including
// side effects from the domain event handlers which are using the same DbContext with "InstancePerLifetimeScope" or "scoped" lifetime
// B) Right AFTER committing data (EF SaveChanges) into the DB will make multiple transactions.
// You will need to handle eventual consistency and compensatory actions in case of failures in any of the Handlers.
await _mediatorHandler.PublishDomainEvents(this).ConfigureAwait(false);
// After executing this line all the changes (from the Command Handler and Domain Event Handlers)
// performed through the DbContext will be committed
var success = await SaveChangesAsync() > 0;
return success;
}
}
| EquinoxContext |
csharp | ardalis__Result | src/Ardalis.Result/ValidationSeverity.cs | {
"start": 32,
"end": 122
} | public enum ____
{
Error,
Warning,
Info
}
}
| ValidationSeverity |
csharp | bitwarden__server | test/Core.Test/Services/DeviceServiceTests.cs | {
"start": 537,
"end": 13766
} | public class ____
{
[Theory]
[BitAutoData]
public async Task SaveAsync_IdProvided_UpdatedRevisionDateAndPushRegistration(Guid id, Guid userId,
Guid organizationId1, Guid organizationId2, Guid installationId,
OrganizationUserOrganizationDetails organizationUserOrganizationDetails1,
OrganizationUserOrganizationDetails organizationUserOrganizationDetails2)
{
organizationUserOrganizationDetails1.OrganizationId = organizationId1;
organizationUserOrganizationDetails2.OrganizationId = organizationId2;
var deviceRepo = Substitute.For<IDeviceRepository>();
var pushRepo = Substitute.For<IPushRegistrationService>();
var organizationUserRepository = Substitute.For<IOrganizationUserRepository>();
organizationUserRepository.GetManyDetailsByUserAsync(Arg.Any<Guid>(), Arg.Any<OrganizationUserStatusType?>())
.Returns([organizationUserOrganizationDetails1, organizationUserOrganizationDetails2]);
var globalSettings = Substitute.For<IGlobalSettings>();
globalSettings.Installation.Id.Returns(installationId);
var deviceService = new DeviceService(deviceRepo, pushRepo, organizationUserRepository, globalSettings);
var device = new Device
{
Id = id,
Name = "test device",
Type = DeviceType.Android,
UserId = userId,
PushToken = "testToken",
Identifier = "testid"
};
await deviceService.SaveAsync(device);
Assert.True(device.RevisionDate - DateTime.UtcNow < TimeSpan.FromSeconds(1));
await pushRepo.Received(1).CreateOrUpdateRegistrationAsync(Arg.Is<PushRegistrationData>(v => v.Token == "testToken"), id.ToString(),
userId.ToString(), "testid", DeviceType.Android,
Arg.Do<IEnumerable<string>>(organizationIds =>
{
var organizationIdsList = organizationIds.ToList();
Assert.Equal(2, organizationIdsList.Count);
Assert.Contains(organizationId1.ToString(), organizationIdsList);
Assert.Contains(organizationId2.ToString(), organizationIdsList);
}), installationId);
}
[Theory]
[BitAutoData]
public async Task SaveAsync_IdNotProvided_CreatedAndPushRegistration(Guid userId, Guid organizationId1,
Guid organizationId2, Guid installationId,
OrganizationUserOrganizationDetails organizationUserOrganizationDetails1,
OrganizationUserOrganizationDetails organizationUserOrganizationDetails2)
{
organizationUserOrganizationDetails1.OrganizationId = organizationId1;
organizationUserOrganizationDetails2.OrganizationId = organizationId2;
var deviceRepo = Substitute.For<IDeviceRepository>();
var pushRepo = Substitute.For<IPushRegistrationService>();
var organizationUserRepository = Substitute.For<IOrganizationUserRepository>();
organizationUserRepository.GetManyDetailsByUserAsync(Arg.Any<Guid>(), Arg.Any<OrganizationUserStatusType?>())
.Returns([organizationUserOrganizationDetails1, organizationUserOrganizationDetails2]);
var globalSettings = Substitute.For<IGlobalSettings>();
globalSettings.Installation.Id.Returns(installationId);
var deviceService = new DeviceService(deviceRepo, pushRepo, organizationUserRepository, globalSettings);
var device = new Device
{
Name = "test device",
Type = DeviceType.Android,
UserId = userId,
PushToken = "testToken",
Identifier = "testid"
};
await deviceService.SaveAsync(device);
await pushRepo.Received(1).CreateOrUpdateRegistrationAsync(Arg.Is<PushRegistrationData>(v => v.Token == "testToken"),
Arg.Do<string>(id => Guid.TryParse(id, out var _)), userId.ToString(), "testid", DeviceType.Android,
Arg.Do<IEnumerable<string>>(organizationIds =>
{
var organizationIdsList = organizationIds.ToList();
Assert.Equal(2, organizationIdsList.Count);
Assert.Contains(organizationId1.ToString(), organizationIdsList);
Assert.Contains(organizationId2.ToString(), organizationIdsList);
}), installationId);
}
/// <summary>
/// Story: A user chose to keep trust in one of their current trusted devices, but not in another one of their
/// devices. We will rotate the trust of the currently signed in device as well as the device they chose but will
/// remove the trust of the device they didn't give new keys for.
/// </summary>
[Theory, BitAutoData]
public async Task UpdateDevicesTrustAsync_Works(
SutProvider<DeviceService> sutProvider,
Guid currentUserId,
Device deviceOne,
Device deviceTwo,
Device deviceThree)
{
SetupOldTrust(deviceOne);
SetupOldTrust(deviceTwo);
SetupOldTrust(deviceThree);
deviceOne.Identifier = "current_device";
sutProvider.GetDependency<IDeviceRepository>()
.GetManyByUserIdAsync(currentUserId)
.Returns(new List<Device> { deviceOne, deviceTwo, deviceThree, });
var currentDeviceModel = new DeviceKeysUpdateRequestModel
{
EncryptedPublicKey = "current_encrypted_public_key",
EncryptedUserKey = "current_encrypted_user_key",
};
var alteredDeviceModels = new List<OtherDeviceKeysUpdateRequestModel>
{
new OtherDeviceKeysUpdateRequestModel
{
DeviceId = deviceTwo.Id,
EncryptedPublicKey = "encrypted_public_key_two",
EncryptedUserKey = "encrypted_user_key_two",
},
};
await sutProvider.Sut.UpdateDevicesTrustAsync("current_device", currentUserId, currentDeviceModel,
alteredDeviceModels);
// Updating trust, "current" or "other" only needs to change the EncryptedPublicKey & EncryptedUserKey
await sutProvider.GetDependency<IDeviceRepository>()
.Received(1)
.UpsertAsync(Arg.Is<Device>(d =>
d.Id == deviceOne.Id &&
d.EncryptedPublicKey == "current_encrypted_public_key" &&
d.EncryptedUserKey == "current_encrypted_user_key" &&
d.EncryptedPrivateKey == "old_private_deviceOne"));
await sutProvider.GetDependency<IDeviceRepository>()
.Received(1)
.UpsertAsync(Arg.Is<Device>(d =>
d.Id == deviceTwo.Id &&
d.EncryptedPublicKey == "encrypted_public_key_two" &&
d.EncryptedUserKey == "encrypted_user_key_two" &&
d.EncryptedPrivateKey == "old_private_deviceTwo"));
// Clearing trust should remove all key values
await sutProvider.GetDependency<IDeviceRepository>()
.Received(1)
.UpsertAsync(Arg.Is<Device>(d =>
d.Id == deviceThree.Id &&
d.EncryptedPublicKey == null &&
d.EncryptedUserKey == null &&
d.EncryptedPrivateKey == null));
// Should have recieved a total of 3 calls, the ones asserted above
await sutProvider.GetDependency<IDeviceRepository>()
.Received(3)
.UpsertAsync(Arg.Any<Device>());
static void SetupOldTrust(Device device, [CallerArgumentExpression(nameof(device))] string expression = null)
{
device.EncryptedPublicKey = $"old_public_{expression}";
device.EncryptedPrivateKey = $"old_private_{expression}";
device.EncryptedUserKey = $"old_user_{expression}";
}
}
/// <summary>
/// Story: This could result from a poor implementation of this method, if they attempt add trust to a device
/// that doesn't already have trust. They would have to create brand new values and for that values to be accurate
/// they would technically have all the values needed to trust a device, that is why we don't consider this bad
/// enough to throw but do skip it because we'd rather keep number of ways for trust to be added to the endpoint we
/// already have.
/// </summary>
[Theory, BitAutoData]
public async Task UpdateDevicesTrustAsync_DoesNotUpdateUntrustedDevices(
SutProvider<DeviceService> sutProvider,
Guid currentUserId,
Device deviceOne,
Device deviceTwo)
{
deviceOne.Identifier = "current_device";
// Make deviceTwo untrusted
deviceTwo.EncryptedUserKey = string.Empty;
deviceTwo.EncryptedPublicKey = string.Empty;
deviceTwo.EncryptedPrivateKey = string.Empty;
sutProvider.GetDependency<IDeviceRepository>()
.GetManyByUserIdAsync(currentUserId)
.Returns(new List<Device> { deviceOne, deviceTwo, });
var currentDeviceModel = new DeviceKeysUpdateRequestModel
{
EncryptedPublicKey = "current_encrypted_public_key",
EncryptedUserKey = "current_encrypted_user_key",
};
var alteredDeviceModels = new List<OtherDeviceKeysUpdateRequestModel>
{
new OtherDeviceKeysUpdateRequestModel
{
DeviceId = deviceTwo.Id,
EncryptedPublicKey = "encrypted_public_key_two",
EncryptedUserKey = "encrypted_user_key_two",
},
};
await sutProvider.Sut.UpdateDevicesTrustAsync("current_device", currentUserId, currentDeviceModel,
alteredDeviceModels);
// Check that UpsertAsync was called for the trusted device
await sutProvider.GetDependency<IDeviceRepository>()
.Received(1)
.UpsertAsync(Arg.Is<Device>(d =>
d.Id == deviceOne.Id &&
d.EncryptedPublicKey == "current_encrypted_public_key" &&
d.EncryptedUserKey == "current_encrypted_user_key"));
// Check that UpsertAsync was not called for the untrusted device
await sutProvider.GetDependency<IDeviceRepository>()
.DidNotReceive()
.UpsertAsync(Arg.Is<Device>(d => d.Id == deviceTwo.Id));
}
/// <summary>
/// Story: This should only happen if someone were to take the access token from a different device and try to rotate
/// a device that they don't actually have.
/// </summary>
[Theory, BitAutoData]
public async Task UpdateDevicesTrustAsync_ThrowsNotFoundException_WhenCurrentDeviceIdentifierDoesNotExist(
SutProvider<DeviceService> sutProvider,
Guid currentUserId,
Device deviceOne,
Device deviceTwo)
{
deviceOne.Identifier = "some_other_device";
deviceTwo.Identifier = "another_device";
sutProvider.GetDependency<IDeviceRepository>()
.GetManyByUserIdAsync(currentUserId)
.Returns(new List<Device> { deviceOne, deviceTwo, });
var currentDeviceModel = new DeviceKeysUpdateRequestModel
{
EncryptedPublicKey = "current_encrypted_public_key",
EncryptedUserKey = "current_encrypted_user_key",
};
await Assert.ThrowsAsync<NotFoundException>(() =>
sutProvider.Sut.UpdateDevicesTrustAsync("current_device", currentUserId, currentDeviceModel,
Enumerable.Empty<OtherDeviceKeysUpdateRequestModel>()));
}
/// <summary>
/// Story: This should only happen from a poorly implemented user of this method but important to enforce someone
/// using the method correctly, a device should only be rotated intentionally and including it as both the current
/// device and one of the users other device would mean they could rotate it twice and we aren't sure
/// which one they would want to win out.
/// </summary>
[Theory, BitAutoData]
public async Task UpdateDevicesTrustAsync_ThrowsBadRequestException_WhenCurrentDeviceIsIncludedInAlteredDevices(
SutProvider<DeviceService> sutProvider,
Guid currentUserId,
Device deviceOne,
Device deviceTwo)
{
deviceOne.Identifier = "current_device";
sutProvider.GetDependency<IDeviceRepository>()
.GetManyByUserIdAsync(currentUserId)
.Returns(new List<Device> { deviceOne, deviceTwo, });
var currentDeviceModel = new DeviceKeysUpdateRequestModel
{
EncryptedPublicKey = "current_encrypted_public_key",
EncryptedUserKey = "current_encrypted_user_key",
};
var alteredDeviceModels = new List<OtherDeviceKeysUpdateRequestModel>
{
new OtherDeviceKeysUpdateRequestModel
{
DeviceId = deviceOne.Id, // current device is included in alteredDevices
EncryptedPublicKey = "encrypted_public_key_one",
EncryptedUserKey = "encrypted_user_key_one",
},
};
await Assert.ThrowsAsync<BadRequestException>(() =>
sutProvider.Sut.UpdateDevicesTrustAsync("current_device", currentUserId, currentDeviceModel,
alteredDeviceModels));
}
}
| DeviceServiceTests |
csharp | dotnet__efcore | test/EFCore.Specification.Tests/JsonTypesTestBase.cs | {
"start": 139875,
"end": 140740
} | protected class ____
{
public IList<EnumU64?> EnumU64Converted { get; set; } = null!;
}
[ConditionalFact]
public virtual Task Can_read_write_collection_of_int_with_converter_JSON_values()
=> Can_read_and_write_JSON_collection_value<DddIdCollectionType, List<DddId>>(
b => b.ElementType(b =>
{
b.HasConversion<DddIdConverter>();
b.IsRequired();
}),
nameof(DddIdCollectionType.DddId),
[
new DddId { Id = int.MinValue },
new DddId { Id = 0 },
new DddId { Id = int.MaxValue }
],
"""{"Prop":[-2147483648,0,2147483647]}""",
facets: new Dictionary<string, object?> { { CoreAnnotationNames.ValueConverter, typeof(DddIdConverter) } });
| NullableEnumU64ConvertedType |
csharp | ChilliCream__graphql-platform | src/StrawberryShake/Tooling/src/dotnet-graphql/InitCommandArguments.cs | {
"start": 114,
"end": 922
} | public class ____
{
public InitCommandArguments(
CommandArgument uri,
CommandOption path,
CommandOption name,
AuthArguments authArguments,
CommandOption customHeaders,
CommandOption fromFile,
CommandOption typeDepth)
{
Uri = uri;
Path = path;
Name = name;
AuthArguments = authArguments;
CustomHeaders = customHeaders;
FromFile = fromFile;
TypeDepth = typeDepth;
}
public CommandArgument Uri { get; }
public CommandOption Path { get; }
public CommandOption Name { get; }
public AuthArguments AuthArguments { get; }
public CommandOption CustomHeaders { get; }
public CommandOption FromFile { get; }
public CommandOption TypeDepth { get; }
}
| InitCommandArguments |
csharp | VerifyTests__Verify | src/Verify/Combinations/CombinationRunner.cs | {
"start": 1,
"end": 4190
} | partial class ____
{
Type[] keyTypes;
int[] indices;
object?[][] lists;
bool captureExceptions;
string[]? columns;
public CombinationRunner(bool? captureExceptions, bool? header, List<IEnumerable<object?>> lists, Type[] keyTypes, ReadOnlySpan<string> columns)
{
this.keyTypes = keyTypes;
if(header ?? CombinationSettings.IncludeHeadersEnabled)
{
this.columns = columns.ToArray();
}
else
{
this.columns = null;
}
this.captureExceptions = captureExceptions ?? CombinationSettings.CaptureExceptionsEnabled;
this.lists = lists.Select(_ => _.ToArray()).ToArray();
indices = new int[lists.Count];
}
Task<CombinationResults> RunWithReturn<TReturn>(Func<object?[], Task<TReturn>> method) =>
InnerRun(async keys =>
{
object? value = await method(keys);
var paused = Recording.IsPaused();
if (Recording.IsRecording() || paused)
{
var appends = Recording.Values().ToList();
value = new InfoBuilder(false, value, appends);
Recording.Clear();
if (paused)
{
Recording.Resume();
}
}
return (CombinationResult.ForValue(keys, value), value);
});
Task<CombinationResults> RunWithVoid(Func<object?[], Task> method) =>
InnerRun(async keys =>
{
await method(keys);
var paused = Recording.IsPaused();
if (Recording.IsRecording() || paused)
{
var appends = Recording.Values().ToList();
var value = new InfoBuilder(false, "void", appends);
Recording.Clear();
if (paused)
{
Recording.Resume();
}
return (CombinationResult.ForValue(keys, value), null);
}
return (CombinationResult.ForVoid(keys), null);
});
async Task<CombinationResults> InnerRun(Func<object?[], Task<(CombinationResult result, object? value)>> method)
{
var items = new List<CombinationResult>();
while (true)
{
var keys = BuildParameters();
try
{
await CombinationSettings.RunBeforeCallbacks(keys);
var (result, value) = await method(keys);
await CombinationSettings.RunAfterCallbacks(keys, value);
items.Add(result);
}
catch (Exception exception)
when (captureExceptions)
{
await CombinationSettings.RunExceptionCallbacks(keys, exception);
var paused = Recording.IsPaused();
if (Recording.IsRecording() || paused)
{
var appends = Recording.Values().ToList();
var value = new InfoBuilder(false, exception, appends);
items.Add(CombinationResult.ForValue(keys, value));
Recording.Clear();
if (paused)
{
Recording.Resume();
}
}
else
{
items.Add(CombinationResult.ForException(keys, exception));
}
}
if (Increment())
{
break;
}
}
return new(items, keyTypes, columns);
}
object?[] BuildParameters()
{
var parameters = new object?[lists.Length];
for (var i = 0; i < lists.Length; i++)
{
var list = lists[i];
parameters[i] = list[indices[i]];
}
return parameters;
}
bool Increment()
{
var incrementIndex = lists.Length - 1;
while (incrementIndex >= 0 &&
++indices[incrementIndex] >= lists[incrementIndex].Length)
{
indices[incrementIndex] = 0;
incrementIndex--;
}
return incrementIndex < 0;
}
} | CombinationRunner |
csharp | dotnet__machinelearning | src/Microsoft.ML.Data/Transforms/NormalizeColumn.cs | {
"start": 11344,
"end": 19647
} | public sealed class ____ : AffineArgumentsBase
{
[Argument(ArgumentType.AtMostOnce, HelpText = "Should the data be centered around 0", Name = "CenterData", ShortName = "center", SortOrder = 1)]
public bool CenterData = Defaults.CenterData;
[Argument(ArgumentType.AtMostOnce, HelpText = "Minimum quantile value. Defaults to 25", Name = "QuantileMin", ShortName = "qmin", SortOrder = 2)]
public uint QuantileMin = Defaults.QuantileMin;
[Argument(ArgumentType.AtMostOnce, HelpText = "Maximum quantile value. Defaults to 75", Name = "QuantileMax", ShortName = "qmax", SortOrder = 3)]
public uint QuantileMax = Defaults.QuantileMax;
}
internal const string MinMaxNormalizerSummary = "Normalizes the data based on the observed minimum and maximum values of the data.";
internal const string MeanVarNormalizerSummary = "Normalizes the data based on the computed mean and variance of the data.";
internal const string LogMeanVarNormalizerSummary = "Normalizes the data based on the computed mean and variance of the logarithm of the data.";
internal const string BinNormalizerSummary = "The values are assigned into equidensity bins and a value is mapped to its bin_number/number_of_bins.";
internal const string SupervisedBinNormalizerSummary = "Similar to BinNormalizer, but calculates bins based on correlation with the label column, not equi-density. "
+ "The new value is bin_number / number_of_bins.";
internal const string RobustScalingNormalizerSummary = "Optionally centers the data and scales based on the range of data and the quantile min and max values provided. "
+ "This method is more robust to outliers.";
internal const string MinMaxNormalizerUserName = "Min-Max Normalizer";
internal const string MeanVarNormalizerUserName = "MeanVar Normalizer";
internal const string LogMeanVarNormalizerUserName = "LogMeanVar Normalizer";
internal const string BinNormalizerUserName = "Binning Normalizer";
internal const string SupervisedBinNormalizerUserName = "Supervised Binning Normalizer";
internal const string RobustScalingNormalizerUserName = "Robust Scaling Normalizer";
internal const string MinMaxNormalizerShortName = "MinMax";
internal const string MeanVarNormalizerShortName = "MeanVar";
internal const string LogMeanVarNormalizerShortName = "LogMeanVar";
internal const string BinNormalizerShortName = "Bin";
internal const string SupervisedBinNormalizerShortName = "SupBin";
internal const string RobustScalingNormalizerShortName = "RobScal";
/// <summary>
/// A helper method to create a MinMax normalizer.
/// </summary>
/// <param name="env">Host Environment.</param>
/// <param name="input">Input <see cref="IDataView"/>. This is the output from previous transform or loader.</param>
/// <param name="outputColumnName">Name of the output column.</param>
/// <param name="inputColumnName">Name of the column to be transformed. If this is null '<paramref name="outputColumnName"/>' will be used.</param>
public static IDataView CreateMinMaxNormalizer(IHostEnvironment env, IDataView input, string outputColumnName, string inputColumnName = null)
{
Contracts.CheckValue(env, nameof(env));
var normalizer = new NormalizingEstimator(env, new NormalizingEstimator.MinMaxColumnOptions(outputColumnName, inputColumnName ?? outputColumnName));
return normalizer.Fit(input).MakeDataTransform(input);
}
/// <summary>
/// Factory method corresponding to SignatureDataTransform.
/// </summary>
internal static IDataTransform Create(IHostEnvironment env, MinMaxArguments args, IDataView input)
{
Contracts.CheckValue(env, nameof(env));
env.CheckValue(args, nameof(args));
env.CheckValue(args.Columns, nameof(args.Columns));
var columns = args.Columns
.Select(col => new NormalizingEstimator.MinMaxColumnOptions(
col.Name,
col.Source ?? col.Name,
col.MaximumExampleCount ?? args.MaximumExampleCount,
col.EnsureZeroUntouched ?? args.EnsureZeroUntouched))
.ToArray();
var normalizer = new NormalizingEstimator(env, columns);
return normalizer.Fit(input).MakeDataTransform(input);
}
// Factory method corresponding to SignatureDataTransform.
internal static IDataTransform Create(IHostEnvironment env, MeanVarArguments args, IDataView input)
{
Contracts.CheckValue(env, nameof(env));
env.CheckValue(args, nameof(args));
env.CheckValue(args.Columns, nameof(args.Columns));
var columns = args.Columns
.Select(col => new NormalizingEstimator.MeanVarianceColumnOptions(
col.Name,
col.Source ?? col.Name,
col.MaximumExampleCount ?? args.MaximumExampleCount,
col.EnsureZeroUntouched ?? args.EnsureZeroUntouched))
.ToArray();
var normalizer = new NormalizingEstimator(env, columns);
return normalizer.Fit(input).MakeDataTransform(input);
}
/// <summary>
/// Factory method corresponding to SignatureDataTransform.
/// </summary>
internal static IDataTransform Create(IHostEnvironment env, LogMeanVarArguments args, IDataView input)
{
Contracts.CheckValue(env, nameof(env));
env.CheckValue(args, nameof(args));
env.CheckValue(args.Columns, nameof(args.Columns));
var columns = args.Columns
.Select(col => new NormalizingEstimator.LogMeanVarianceColumnOptions(
col.Name,
col.Source ?? col.Name,
col.MaximumExampleCount ?? args.MaximumExampleCount,
args.UseCdf))
.ToArray();
var normalizer = new NormalizingEstimator(env, columns);
return normalizer.Fit(input).MakeDataTransform(input);
}
/// <summary>
/// Factory method corresponding to SignatureDataTransform.
/// </summary>
internal static IDataTransform Create(IHostEnvironment env, BinArguments args, IDataView input)
{
Contracts.CheckValue(env, nameof(env));
env.CheckValue(args, nameof(args));
env.CheckValue(args.Columns, nameof(args.Columns));
var columns = args.Columns
.Select(col => new NormalizingEstimator.BinningColumnOptions(
col.Name,
col.Source ?? col.Name,
col.MaximumExampleCount ?? args.MaximumExampleCount,
col.EnsureZeroUntouched ?? args.EnsureZeroUntouched,
col.NumBins ?? args.NumBins))
.ToArray();
var normalizer = new NormalizingEstimator(env, columns);
return normalizer.Fit(input).MakeDataTransform(input);
}
/// <summary>
/// Factory method corresponding to SignatureDataTransform.
/// </summary>
internal static IDataTransform Create(IHostEnvironment env, RobustScalingArguments args, IDataView input)
{
Contracts.CheckValue(env, nameof(env));
env.CheckValue(args, nameof(args));
env.CheckValue(args.Columns, nameof(args.Columns));
var columns = args.Columns
.Select(col => new NormalizingEstimator.RobustScalingColumnOptions(
col.Name,
col.Source ?? col.Name,
col.MaximumExampleCount ?? args.MaximumExampleCount,
args.CenterData,
args.QuantileMin,
args.QuantileMax))
.ToArray();
var normalizer = new NormalizingEstimator(env, columns);
return normalizer.Fit(input).MakeDataTransform(input);
}
internal abstract | RobustScalingArguments |
csharp | OrchardCMS__OrchardCore | src/OrchardCore/OrchardCore.Users.Core/Services/UserDisplayNameShapeTableProvider.cs | {
"start": 178,
"end": 1167
} | public sealed class ____ : ShapeTableProvider
{
public override ValueTask DiscoverAsync(ShapeTableBuilder builder)
{
builder.Describe("UserDisplayName")
.OnDisplaying(context =>
{
var shape = context.Shape;
var displayType = shape.Metadata.DisplayType?.EncodeAlternateElement() ?? "Detail";
if (shape.TryGetProperty<string>("UserName", out var username))
{
// UserDisplayName_[DisplayType]__[UserName] e.g. UserDisplayName-johndoe.SummaryAdmin.cshtml
shape.Metadata.Alternates.Add("UserDisplayName_" + displayType + "__" + username.EncodeAlternateElement());
}
// UserDisplayName_[DisplayType] e.g. UserDisplayName.SummaryAdmin.cshtml
shape.Metadata.Alternates.Add("UserDisplayName_" + displayType);
});
return ValueTask.CompletedTask;
}
}
| UserDisplayNameShapeTableProvider |
csharp | dotnet__efcore | test/EFCore.Cosmos.FunctionalTests/Query/AdHocMiscellaneousQueryCosmosTest.cs | {
"start": 13939,
"end": 14394
} | public enum ____
{
Dog,
Cat
}
#endregion 36329
protected override string StoreName
=> "AdHocMiscellaneousQueryTests";
protected override DbContextOptionsBuilder AddOptions(DbContextOptionsBuilder builder)
=> builder.ConfigureWarnings(b => b.Ignore(CosmosEventId.NoPartitionKeyDefined));
protected override ITestStoreFactory TestStoreFactory
=> CosmosTestStoreFactory.Instance;
}
| PetType36329 |
csharp | dotnet__maui | src/ProfiledAot/src/maui/AppFlyoutPage.xaml.cs | {
"start": 17,
"end": 123
} | public partial class ____ : FlyoutPage
{
public AppFlyoutPage()
{
InitializeComponent();
}
} | AppFlyoutPage |
csharp | grandnode__grandnode2 | src/Web/Grand.Web.Admin/Models/Settings/ContentSettingsModel.cs | {
"start": 122,
"end": 432
} | public class ____ : BaseModel
{
public string ActiveStore { get; set; }
public BlogSettingsModel BlogSettings { get; set; } = new();
public NewsSettingsModel NewsSettings { get; set; } = new();
public KnowledgebaseSettingsModel KnowledgebaseSettings { get; set; } = new();
| ContentSettingsModel |
csharp | dotnet__efcore | test/EFCore.Design.Tests/Extensions/MethodCallCodeFragmentExtensionsTest.cs | {
"start": 2358,
"end": 2395
} | internal class ____;
}
}
| TestArgument |
csharp | FluentValidation__FluentValidation | src/FluentValidation/TestHelper/TestValidationResult.cs | {
"start": 912,
"end": 4670
} | public class ____<T> : ValidationResult {
public TestValidationResult(ValidationResult validationResult) : base(validationResult.Errors){
RuleSetsExecuted = validationResult.RuleSetsExecuted;
}
public ITestValidationWith ShouldHaveValidationErrorFor<TProperty>(Expression<Func<T, TProperty>> memberAccessor) {
string propertyName = ValidatorOptions.Global.PropertyNameResolver(typeof(T), memberAccessor.GetMember(), memberAccessor);
return ShouldHaveValidationError(propertyName, true);
}
public void ShouldNotHaveValidationErrorFor<TProperty>(Expression<Func<T, TProperty>> memberAccessor) {
string propertyName = ValidatorOptions.Global.PropertyNameResolver(typeof(T), memberAccessor.GetMember(), memberAccessor);
ShouldNotHaveValidationError(propertyName, true);
}
public void ShouldNotHaveAnyValidationErrors() {
ShouldNotHaveValidationError(ValidationTestExtension.MatchAnyFailure, true);
}
public ITestValidationContinuation ShouldHaveValidationErrors() {
if (!Errors.Any())
throw new ValidationTestException($"Expected at least one validation error, but none were found.");
return new TestValidationContinuation(Errors);
}
public ITestValidationWith ShouldHaveValidationErrorFor(string propertyName) {
return ShouldHaveValidationError(propertyName, false);
}
public void ShouldNotHaveValidationErrorFor(string propertyName) {
ShouldNotHaveValidationError(propertyName, false);
}
private ITestValidationWith ShouldHaveValidationError(string propertyName, bool shouldNormalizePropertyName) {
var result = new TestValidationContinuation(Errors);
result.ApplyPredicate(x => (shouldNormalizePropertyName ? NormalizePropertyName(x.PropertyName) == propertyName : x.PropertyName == propertyName)
|| (string.IsNullOrEmpty(x.PropertyName) && string.IsNullOrEmpty(propertyName))
|| propertyName == ValidationTestExtension.MatchAnyFailure);
if (result.Any()) {
return result;
}
// We expected an error but failed to match it.
var errorMessageBanner = $"Expected a validation error for property {propertyName}";
string errorMessage = "";
if (Errors?.Any() == true) {
string errorMessageDetails = "";
for (int i = 0; i < Errors.Count; i++) {
errorMessageDetails += $"[{i}]: {Errors[i].PropertyName}\n";
}
errorMessage = $"{errorMessageBanner}\n----\nProperties with Validation Errors:\n{errorMessageDetails}";
}
else {
errorMessage = $"{errorMessageBanner}";
}
throw new ValidationTestException(errorMessage);
}
private void ShouldNotHaveValidationError(string propertyName, bool shouldNormalizePropertyName) {
var failures = Errors.Where(x => (shouldNormalizePropertyName ? NormalizePropertyName(x.PropertyName) == propertyName : x.PropertyName == propertyName)
|| (string.IsNullOrEmpty(x.PropertyName) && string.IsNullOrEmpty(propertyName))
|| propertyName == ValidationTestExtension.MatchAnyFailure
).ToList();
if (failures.Any()) {
var errorMessageBanner = $"Expected no validation errors for property {propertyName}";
if (propertyName == ValidationTestExtension.MatchAnyFailure) {
errorMessageBanner = "Expected no validation errors";
}
string errorMessageDetails = "";
for (int i = 0; i < failures.Count; i++) {
errorMessageDetails += $"[{i}]: {failures[i].ErrorMessage}\n";
}
var errorMessage = $"{errorMessageBanner}\n----\nValidation Errors:\n{errorMessageDetails}";
throw new ValidationTestException(errorMessage, failures);
}
}
private static string NormalizePropertyName(string propertyName) {
return Regex.Replace(propertyName, @"\[.*\]", string.Empty);
}
}
| TestValidationResult |
csharp | dotnet__aspnetcore | src/Components/WebView/WebView/test/Infrastructure/TestWebViewServiceCollectionExtensions.cs | {
"start": 239,
"end": 468
} | public static class ____
{
public static IServiceCollection AddTestBlazorWebView(this IServiceCollection services)
{
services.AddBlazorWebView();
return services;
}
}
| TestWebViewServiceCollectionExtensions |
csharp | bitwarden__server | test/Core.Test/AdminConsole/OrganizationFeatures/Organizations/OrganizationSignUp/ResellerClientOrganizationSignUpCommandTests.cs | {
"start": 601,
"end": 7727
} | public class ____
{
[Theory]
[BitAutoData]
public async Task SignUpResellerClientAsync_WithValidParameters_CreatesOrganizationSuccessfully(
Organization organization,
string ownerEmail,
SutProvider<ResellerClientOrganizationSignUpCommand> sutProvider)
{
var result = await sutProvider.Sut.SignUpResellerClientAsync(organization, ownerEmail);
Assert.NotNull(result.Organization);
Assert.False(result.Organization.Enabled);
Assert.Equal(OrganizationStatusType.Pending, result.Organization.Status);
Assert.NotNull(result.OwnerOrganizationUser);
Assert.Equal(ownerEmail, result.OwnerOrganizationUser.Email);
Assert.Equal(OrganizationUserType.Owner, result.OwnerOrganizationUser.Type);
Assert.Equal(OrganizationUserStatusType.Invited, result.OwnerOrganizationUser.Status);
await sutProvider.GetDependency<IOrganizationRepository>()
.Received(1)
.CreateAsync(
Arg.Is<Organization>(o =>
o.Id != default &&
o.Name == organization.Name &&
o.Enabled == false &&
o.Status == OrganizationStatusType.Pending
)
);
await sutProvider.GetDependency<IOrganizationApiKeyRepository>()
.Received(1)
.CreateAsync(
Arg.Is<OrganizationApiKey>(k =>
k.OrganizationId == result.Organization.Id &&
k.Type == OrganizationApiKeyType.Default &&
!string.IsNullOrEmpty(k.ApiKey)
)
);
await sutProvider.GetDependency<IApplicationCacheService>()
.Received(1)
.UpsertOrganizationAbilityAsync(Arg.Is<Organization>(o => o.Id == result.Organization.Id));
await sutProvider.GetDependency<IOrganizationUserRepository>()
.Received(1)
.CreateAsync(
Arg.Is<OrganizationUser>(u =>
u.OrganizationId == result.Organization.Id &&
u.Email == ownerEmail &&
u.Type == OrganizationUserType.Owner &&
u.Status == OrganizationUserStatusType.Invited &&
u.UserId == null
)
);
await sutProvider.GetDependency<ISendOrganizationInvitesCommand>()
.Received(1)
.SendInvitesAsync(
Arg.Is<SendInvitesRequest>(r =>
r.Users.Count() == 1 &&
r.Users.First().Email == ownerEmail &&
r.Organization.Id == result.Organization.Id &&
r.InitOrganization == true
)
);
await sutProvider.GetDependency<IEventService>()
.Received(1)
.LogOrganizationUserEventAsync(
Arg.Is<OrganizationUser>(u => u.Email == ownerEmail),
EventType.OrganizationUser_Invited
);
}
[Theory]
[BitAutoData]
public async Task SignUpResellerClientAsync_WhenOrganizationRepositoryThrows_PerformsCleanup(
Organization organization,
string ownerEmail,
SutProvider<ResellerClientOrganizationSignUpCommand> sutProvider)
{
sutProvider.GetDependency<IOrganizationRepository>()
.When(x => x.CreateAsync(Arg.Any<Organization>()))
.Do(_ => throw new Exception());
await Assert.ThrowsAsync<Exception>(
() => sutProvider.Sut.SignUpResellerClientAsync(organization, ownerEmail));
await AssertCleanupIsPerformed(sutProvider);
}
[Theory]
[BitAutoData]
public async Task SignUpResellerClientAsync_WhenOrganizationUserCreationFails_PerformsCleanup(
Organization organization,
string ownerEmail,
SutProvider<ResellerClientOrganizationSignUpCommand> sutProvider)
{
sutProvider.GetDependency<IOrganizationUserRepository>()
.When(x => x.CreateAsync(Arg.Any<OrganizationUser>()))
.Do(_ => throw new Exception());
await Assert.ThrowsAsync<Exception>(
() => sutProvider.Sut.SignUpResellerClientAsync(organization, ownerEmail));
await sutProvider.GetDependency<IOrganizationRepository>()
.Received(1)
.CreateAsync(Arg.Any<Organization>());
await AssertCleanupIsPerformed(sutProvider);
}
[Theory]
[BitAutoData]
public async Task SignUpResellerClientAsync_WhenInvitationSendingFails_PerformsCleanup(
Organization organization,
string ownerEmail,
SutProvider<ResellerClientOrganizationSignUpCommand> sutProvider)
{
sutProvider.GetDependency<ISendOrganizationInvitesCommand>()
.When(x => x.SendInvitesAsync(Arg.Any<SendInvitesRequest>()))
.Do(_ => throw new Exception());
await Assert.ThrowsAsync<Exception>(
() => sutProvider.Sut.SignUpResellerClientAsync(organization, ownerEmail));
await sutProvider.GetDependency<IOrganizationRepository>()
.Received(1)
.CreateAsync(Arg.Any<Organization>());
await sutProvider.GetDependency<IOrganizationUserRepository>()
.Received(1)
.CreateAsync(Arg.Any<OrganizationUser>());
await AssertCleanupIsPerformed(sutProvider);
}
[Theory]
[BitAutoData]
public async Task SignUpResellerClientAsync_WhenEventLoggingFails_PerformsCleanup(
Organization organization,
string ownerEmail,
SutProvider<ResellerClientOrganizationSignUpCommand> sutProvider)
{
sutProvider.GetDependency<IEventService>()
.When(x => x.LogOrganizationUserEventAsync(Arg.Any<OrganizationUser>(), Arg.Any<EventType>()))
.Do(_ => throw new Exception());
await Assert.ThrowsAsync<Exception>(
() => sutProvider.Sut.SignUpResellerClientAsync(organization, ownerEmail));
await sutProvider.GetDependency<IOrganizationRepository>()
.Received(1)
.CreateAsync(Arg.Any<Organization>());
await sutProvider.GetDependency<IOrganizationUserRepository>()
.Received(1)
.CreateAsync(Arg.Any<OrganizationUser>());
await sutProvider.GetDependency<ISendOrganizationInvitesCommand>()
.Received(1)
.SendInvitesAsync(Arg.Any<SendInvitesRequest>());
await AssertCleanupIsPerformed(sutProvider);
}
private static async Task AssertCleanupIsPerformed(SutProvider<ResellerClientOrganizationSignUpCommand> sutProvider)
{
await sutProvider.GetDependency<IPaymentService>()
.Received(1)
.CancelAndRecoverChargesAsync(Arg.Any<Organization>());
await sutProvider.GetDependency<IOrganizationRepository>()
.Received(1)
.DeleteAsync(Arg.Any<Organization>());
await sutProvider.GetDependency<IApplicationCacheService>()
.Received(1)
.DeleteOrganizationAbilityAsync(Arg.Any<Guid>());
}
}
| ResellerClientOrganizationSignUpCommandTests |
csharp | ServiceStack__ServiceStack | ServiceStack.Text/src/ServiceStack.Text/HttpUtils.WebRequest.cs | {
"start": 51188,
"end": 51450
} | public interface ____ : IDisposable
{
string GetString(HttpWebRequest webReq, string reqBody);
byte[] GetBytes(HttpWebRequest webReq, byte[] reqBody);
void UploadStream(HttpWebRequest webRequest, Stream fileStream, string fileName);
}
| IHttpResultsFilter |
csharp | abpframework__abp | framework/src/Volo.Abp.AspNetCore.Components.Web.Theming/Toolbars/ToolbarConfigurationContext.cs | {
"start": 306,
"end": 1598
} | public class ____ : IToolbarConfigurationContext
{
public IServiceProvider ServiceProvider { get; }
private readonly IAbpLazyServiceProvider _lazyServiceProvider;
public IAuthorizationService AuthorizationService => _lazyServiceProvider.LazyGetRequiredService<IAuthorizationService>();
public IStringLocalizerFactory StringLocalizerFactory => _lazyServiceProvider.LazyGetRequiredService<IStringLocalizerFactory>();
public Toolbar Toolbar { get; }
public ToolbarConfigurationContext(Toolbar toolbar, IServiceProvider serviceProvider)
{
Toolbar = toolbar;
ServiceProvider = serviceProvider;
_lazyServiceProvider = ServiceProvider.GetRequiredService<IAbpLazyServiceProvider>();
}
public Task<bool> IsGrantedAsync(string policyName)
{
return AuthorizationService.IsGrantedAsync(policyName);
}
public IStringLocalizer? GetDefaultLocalizer()
{
return StringLocalizerFactory.CreateDefaultOrNull();
}
[NotNull]
public IStringLocalizer GetLocalizer<T>()
{
return StringLocalizerFactory.Create<T>();
}
[NotNull]
public IStringLocalizer GetLocalizer(Type resourceType)
{
return StringLocalizerFactory.Create(resourceType);
}
}
| ToolbarConfigurationContext |
csharp | dotnet__efcore | test/EFCore.Specification.Tests/ModelBuilding/ModelBuilderTest.OwnedTypes.cs | {
"start": 78528,
"end": 98555
} | protected class ____
{
public int Id { get; protected set; }
public List<DepartmentId> DepartmentIds { get; set; }
}
[ConditionalFact]
public virtual void Can_configure_property_and_owned_entity_of_same_type()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<Department>(b =>
{
b.Property(d => d.Id)
.HasConversion(
id => id.Value,
value => new DepartmentId(value));
b.OwnsMany(d => d.DepartmentIds);
});
modelBuilder.Entity<Office>()
.OwnsMany(o => o.DepartmentIds);
var model = modelBuilder.FinalizeModel();
var departmentType = model.FindEntityType(typeof(Department))!;
var departmentNestedType = model.FindEntityType(typeof(DepartmentId), nameof(Department.DepartmentIds), departmentType)!;
var officeType = model.FindEntityType(typeof(Office))!;
var officeNestedType = model.FindEntityType(typeof(DepartmentId), nameof(Office.DepartmentIds), officeType)!;
var departmentIdProperty = departmentType.FindProperty(nameof(Department.Id));
Assert.NotNull(departmentIdProperty);
Assert.NotNull(departmentNestedType);
Assert.NotNull(officeNestedType);
var departmentIdFkProperty = departmentNestedType.GetForeignKeys().Single().Properties[0];
Assert.Same(departmentIdProperty.GetValueConverter(), departmentIdFkProperty.GetValueConverter());
}
[ConditionalFact]
public virtual void Can_configure_owned_entity_and_property_of_same_type()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<Office>()
.OwnsMany(o => o.DepartmentIds);
modelBuilder.Entity<Department>(b =>
{
b.Property(d => d.Id)
.HasConversion(
id => id.Value,
value => new DepartmentId(value));
b.OwnsMany(d => d.DepartmentIds);
});
var model = modelBuilder.FinalizeModel();
var departmentType = model.FindEntityType(typeof(Department))!;
var departmentNestedType = model.FindEntityType(typeof(DepartmentId), nameof(Department.DepartmentIds), departmentType)!;
var officeType = model.FindEntityType(typeof(Office))!;
var officeNestedType = model.FindEntityType(typeof(DepartmentId), nameof(Office.DepartmentIds), officeType)!;
var departmentIdProperty = departmentType.FindProperty(nameof(Department.Id));
Assert.NotNull(departmentIdProperty);
Assert.NotNull(departmentIdProperty.GetValueConverter());
Assert.NotNull(departmentNestedType);
Assert.NotNull(officeNestedType);
var departmentIdFkProperty = departmentNestedType.GetForeignKeys().Single().Properties[0];
Assert.Same(departmentIdProperty.GetValueConverter(), departmentIdFkProperty.GetValueConverter());
}
[ConditionalFact]
public virtual void Reconfiguring_entity_type_as_owned_throws()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Ignore<Customer>();
modelBuilder.Entity<CustomerDetails>();
Assert.Equal(
CoreStrings.ClashingNonOwnedEntityType(nameof(CustomerDetails)),
Assert.Throws<InvalidOperationException>(() => modelBuilder.Entity<SpecialCustomer>().OwnsOne(c => c.Details)).Message);
}
[ConditionalFact]
public virtual void Reconfiguring_owned_type_as_non_owned_throws()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Ignore<Customer>();
modelBuilder.Entity<SpecialCustomer>().OwnsOne(c => c.Details);
Assert.Equal(
CoreStrings.ClashingOwnedEntityType(nameof(CustomerDetails)),
Assert.Throws<InvalidOperationException>(() => modelBuilder.Entity<SpecialCustomer>().HasOne(c => c.Details)).Message);
}
[ConditionalFact]
public virtual void Deriving_from_owned_type_throws()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<Book>()
.Ignore(b => b.AlternateLabel)
.Ignore(b => b.Details)
.OwnsOne(
b => b.Label, lb =>
{
lb.Ignore(l => l.AnotherBookLabel);
lb.Ignore(l => l.SpecialBookLabel);
});
Assert.Equal(
modelBuilder.Model.IsShared(typeof(BookLabel))
? CoreStrings.ClashingSharedType(nameof(BookLabel))
: CoreStrings.ClashingOwnedEntityType(nameof(BookLabel)),
Assert.Throws<InvalidOperationException>(() => modelBuilder.Entity<AnotherBookLabel>().HasBaseType<BookLabel>()).Message);
}
[ConditionalFact]
public virtual void Configuring_base_type_as_owned_throws()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<AnotherBookLabel>();
modelBuilder.Entity<Book>()
.Ignore(b => b.AlternateLabel)
.Ignore(b => b.Details);
Assert.Equal(
CoreStrings.ClashingNonOwnedDerivedEntityType(nameof(BookLabel), nameof(AnotherBookLabel)),
Assert.Throws<InvalidOperationException>(() => modelBuilder.Entity<Book>().OwnsOne(c => c.Label)).Message);
}
[ConditionalFact]
public virtual void CLR_base_type_can_be_owned_when_not_in_hierarchy()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<AnotherBookLabel>()
.HasBaseType(null)
.Ignore(l => l.Book)
.Ignore(l => l.SpecialBookLabel)
.Ignore(l => l.AnotherBookLabel);
modelBuilder.Entity<Book>()
.Ignore(b => b.AlternateLabel)
.Ignore(b => b.Details)
.OwnsOne(
c => c.Label, lb =>
{
lb.Ignore(l => l.AnotherBookLabel);
lb.Ignore(l => l.SpecialBookLabel);
});
var model = modelBuilder.FinalizeModel();
var bookLabelOwnership = model.FindEntityType(typeof(Book)).FindNavigation(nameof(Book.Label))
.ForeignKey;
Assert.True(bookLabelOwnership.IsOwnership);
Assert.Equal(nameof(BookLabel.Book), bookLabelOwnership.DependentToPrincipal.Name);
Assert.Null(model.FindEntityType(typeof(AnotherBookLabel)).BaseType);
}
[ConditionalFact]
public virtual void OwnedType_can_derive_from_Collection()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<PrincipalEntity>().OwnsOne(o => o.InverseNav);
var model = modelBuilder.FinalizeModel();
Assert.Single(model.FindEntityTypes(typeof(MyList<DependentEntity>)));
}
[ConditionalFact]
public virtual void Shared_type_entity_types_with_FK_to_another_entity_works()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<Country>();
var ownerEntityTypeBuilder = modelBuilder.Entity<BillingOwner>();
ownerEntityTypeBuilder.OwnsOne(
e => e.Bill1,
o =>
{
o.HasOne<Country>().WithMany().HasPrincipalKey(c => c.Name).HasForeignKey(d => d.Country);
o.HasIndex(c => c.Country);
});
ownerEntityTypeBuilder.OwnsOne(
e => e.Bill2,
o => o.HasOne<Country>().WithMany().HasPrincipalKey(c => c.Name).HasForeignKey(d => d.Country));
var model = modelBuilder.FinalizeModel();
Assert.Equal(4, model.GetEntityTypes().Count());
var owner = model.FindEntityType(typeof(BillingOwner));
var bill1 = owner.FindNavigation(nameof(BillingOwner.Bill1)).TargetEntityType;
Assert.Equal(2, bill1.GetForeignKeys().Count());
Assert.Single(bill1.GetIndexes());
var bill2 = owner.FindNavigation(nameof(BillingOwner.Bill2)).TargetEntityType;
Assert.Equal(2, bill2.GetForeignKeys().Count());
Assert.Equal(Fixture.ForeignKeysHaveIndexes ? 1 : 0, bill2.GetIndexes().Count());
}
[ConditionalFact]
public virtual void Can_have_multiple_owned_types_on_base()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<BaseOwner>().OwnsOne(o => o.OwnedWithRef1);
modelBuilder.Entity<DerivedOwner>();
var model = modelBuilder.FinalizeModel();
Assert.Equal(6, model.GetEntityTypes().Count());
var owner = model.FindEntityType(typeof(BaseOwner));
var ownership1 = owner.FindNavigation(nameof(BaseOwner.Owned1)).ForeignKey;
var owned1 = ownership1.DeclaringEntityType;
Assert.True(ownership1.IsOwnership);
Assert.Same(owner, ownership1.PrincipalEntityType);
var ownership3 = owner.FindNavigation(nameof(BaseOwner.OwnedWithRef1)).ForeignKey;
var owned3 = ownership3.DeclaringEntityType;
Assert.True(ownership3.IsOwnership);
Assert.Same(owner, ownership3.DependentToPrincipal.TargetEntityType);
}
[ConditionalFact]
public virtual void Navigations_on_owned_type_can_set_access_mode_using_expressions()
{
var modelBuilder = CreateModelBuilder();
var model = modelBuilder.Model;
modelBuilder.Entity<OneToOneNavPrincipalOwner>()
.OwnsOne(
e => e.OwnedDependent,
a =>
{
a.WithOwner(owned => owned.OneToOneOwner);
a.Navigation(owned => owned.OneToOneOwner)
.UsePropertyAccessMode(PropertyAccessMode.Property);
});
modelBuilder.Entity<OneToOneNavPrincipalOwner>()
.Navigation(e => e.OwnedDependent)
.UsePropertyAccessMode(PropertyAccessMode.Field);
var principal = (IReadOnlyEntityType)model.FindEntityType(typeof(OneToOneNavPrincipalOwner));
var dependent = (IReadOnlyEntityType)model.FindEntityType(typeof(OwnedNavDependent));
Assert.Equal(PropertyAccessMode.Field, principal.FindNavigation("OwnedDependent").GetPropertyAccessMode());
Assert.Equal(PropertyAccessMode.Property, dependent.FindNavigation("OneToOneOwner").GetPropertyAccessMode());
}
[ConditionalFact]
public virtual void Navigations_on_owned_type_collection_can_set_access_mode()
{
var modelBuilder = CreateModelBuilder();
var model = modelBuilder.Model;
modelBuilder.Entity<OneToManyNavPrincipalOwner>()
.OwnsMany(
e => e.OwnedDependents,
a =>
{
a.WithOwner(owned => owned.OneToManyOwner);
a.Navigation(owned => owned.OneToManyOwner)
.UsePropertyAccessMode(PropertyAccessMode.Property);
});
modelBuilder.Entity<OneToManyNavPrincipalOwner>()
.Navigation(e => e.OwnedDependents)
.UsePropertyAccessMode(PropertyAccessMode.Field);
var principal = (IReadOnlyEntityType)model.FindEntityType(typeof(OneToManyNavPrincipalOwner));
var dependent = (IReadOnlyEntityType)model.FindEntityType(typeof(OwnedOneToManyNavDependent));
Assert.Equal(PropertyAccessMode.Field, principal.FindNavigation("OwnedDependents").GetPropertyAccessMode());
Assert.Equal(PropertyAccessMode.Property, dependent.FindNavigation("OneToManyOwner").GetPropertyAccessMode());
}
[ConditionalFact]
public virtual void Attempt_to_create_OwnsMany_on_a_reference_throws()
{
var modelBuilder = CreateModelBuilder();
Assert.Equal(
CoreStrings.UnableToSetIsUnique(
false,
"OwnedDependent",
nameof(OneToOneNavPrincipalOwner)),
Assert.Throws<InvalidOperationException>(() => modelBuilder
.Entity<OneToOneNavPrincipalOwner>()
.OwnsMany<OwnedNavDependent>("OwnedDependent")).Message
);
}
[ConditionalFact]
public virtual void Attempt_to_create_OwnsOne_on_a_collection_throws()
{
var modelBuilder = CreateModelBuilder();
Assert.Equal(
CoreStrings.UnableToSetIsUnique(
true,
"OwnedDependents",
nameof(OneToManyNavPrincipalOwner)),
Assert.Throws<InvalidOperationException>(() => modelBuilder
.Entity<OneToManyNavPrincipalOwner>()
.OwnsOne<OwnedOneToManyNavDependent>("OwnedDependents")).Message);
}
[ConditionalFact]
public virtual void Shared_type_can_be_used_as_owned_type()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<OwnerOfSharedType>(b =>
{
b.OwnsOne(
"Shared1", e => e.Reference, sb =>
{
sb.IndexerProperty<int>("Value");
});
b.OwnsMany("Shared2", e => e.Collection).IndexerProperty<bool>("IsDeleted");
b.OwnsOne(
e => e.OwnedNavigation,
o =>
{
o.OwnsOne(
"Shared3", e => e.Reference, sb =>
{
sb.IndexerProperty<int>("NestedValue");
});
o.OwnsMany("Shared4", e => e.Collection).IndexerProperty<long>("NestedLong");
});
});
var model = modelBuilder.FinalizeModel();
Assert.Collection(
model.GetEntityTypes().OrderBy(e => e.Name),
t => { Assert.Equal(typeof(NestedOwnerOfSharedType), t.ClrType); },
t => { Assert.Equal(typeof(OwnerOfSharedType), t.ClrType); },
t =>
{
Assert.Equal("Shared1", t.Name);
Assert.NotNull(t.FindProperty("Value"));
},
t =>
{
Assert.Equal("Shared2", t.Name);
Assert.NotNull(t.FindProperty("IsDeleted"));
},
t =>
{
Assert.Equal("Shared3", t.Name);
Assert.NotNull(t.FindProperty("NestedValue"));
},
t =>
{
Assert.Equal("Shared4", t.Name);
Assert.NotNull(t.FindProperty("NestedLong"));
});
}
[ConditionalFact]
public virtual void Shared_type_used_as_owned_type_throws_for_same_name()
{
var modelBuilder = CreateModelBuilder();
modelBuilder.Entity<OwnerOfSharedType>(b =>
{
b.OwnsOne("Shared1", e => e.Reference);
b.OwnsOne("Shared1", e => e.Reference);
Assert.Equal(
CoreStrings.ClashingNamedOwnedType(
"Shared1", nameof(OwnerOfSharedType), nameof(OwnerOfSharedType.Collection)),
Assert.Throws<InvalidOperationException>(() => b.OwnsMany("Shared1", e => e.Collection)).Message);
});
}
[ConditionalFact]
public virtual void PrimitiveCollectionBuilder_methods_can_be_chained()
=> CreateModelBuilder()
.Entity<ComplexProperties>()
.OwnsOne(e => e.CollectionQuarks)
.PrimitiveCollection(e => e.Up)
.ElementType(t => t
.HasAnnotation("B", "C")
.HasConversion(typeof(long))
.HasConversion(new CastingConverter<int, long>())
.HasConversion(typeof(long), typeof(CustomValueComparer<int>))
.HasConversion(typeof(long), new CustomValueComparer<int>())
.HasConversion(new CastingConverter<int, long>())
.HasConversion(new CastingConverter<int, long>(), new CustomValueComparer<int>())
.HasConversion<long>()
.HasConversion<long>(new CustomValueComparer<int>())
.HasConversion<long, CustomValueComparer<int>>()
.HasMaxLength(2)
.HasPrecision(1)
.HasPrecision(1, 2)
.IsRequired()
.IsUnicode())
.IsRequired()
.HasAnnotation("A", "V")
.IsConcurrencyToken()
.ValueGeneratedNever()
.ValueGeneratedOnAdd()
.ValueGeneratedOnAddOrUpdate()
.ValueGeneratedOnUpdate()
.IsUnicode()
.HasMaxLength(100)
.HasSentinel(null)
.HasValueGenerator<CustomValueGenerator>()
.HasValueGenerator(typeof(CustomValueGenerator))
.HasValueGeneratorFactory<CustomValueGeneratorFactory>()
.HasValueGeneratorFactory(typeof(CustomValueGeneratorFactory))
.IsRequired();
[ConditionalFact]
public virtual void PrimitiveCollectionBuilder_methods_can_be_chained_on_collection()
=> CreateModelBuilder()
.Entity<Customer>()
.OwnsMany(e => e.Orders)
.PrimitiveCollection<List<int>>("List")
.ElementType(t => t
.HasAnnotation("B", "C")
.HasConversion(typeof(long))
.HasConversion(new CastingConverter<int, long>())
.HasConversion(typeof(long), typeof(CustomValueComparer<int>))
.HasConversion(typeof(long), new CustomValueComparer<int>())
.HasConversion(new CastingConverter<int, long>())
.HasConversion(new CastingConverter<int, long>(), new CustomValueComparer<int>())
.HasConversion<long>()
.HasConversion<long>(new CustomValueComparer<int>())
.HasConversion<long, CustomValueComparer<int>>()
.HasMaxLength(2)
.HasPrecision(1)
.HasPrecision(1, 2)
.IsRequired()
.IsUnicode())
.IsRequired()
.HasAnnotation("A", "V")
.IsConcurrencyToken()
.ValueGeneratedNever()
.ValueGeneratedOnAdd()
.ValueGeneratedOnAddOrUpdate()
.ValueGeneratedOnUpdate()
.IsUnicode()
.HasMaxLength(100)
.HasSentinel(null)
.HasValueGenerator<CustomValueGenerator>()
.HasValueGenerator(typeof(CustomValueGenerator))
.HasValueGeneratorFactory<CustomValueGeneratorFactory>()
.HasValueGeneratorFactory(typeof(CustomValueGeneratorFactory))
.IsRequired();
| Office |
csharp | dotnet__maui | src/Controls/src/SourceGen/IMethodSymbolExtensions.cs | {
"start": 1473,
"end": 1693
} | interface ____ first (interfaces don't use inheritance)
if (paramType.IsInterface())
{
if (!argType.Implements(paramType))
{
parameters = null;
return false;
}
}
// Then check | implementation |
csharp | AvaloniaUI__Avalonia | samples/Previewer/Program.cs | {
"start": 44,
"end": 321
} | class ____
{
public static AppBuilder BuildAvaloniaApp()
=> AppBuilder.Configure<App>()
.UsePlatformDetect();
public static int Main(string[] args)
=> BuildAvaloniaApp().StartWithClassicDesktopLifetime(args);
}
}
| Program |
csharp | dotnetcore__WTM | src/WalkingTec.Mvvm.Core/Models/BasePoco.cs | {
"start": 442,
"end": 1288
} | public abstract class ____ : TopBasePoco, IBasePoco
{
/// <summary>
/// CreateTime
/// </summary>
[Display(Name = "_Admin.CreateTime")]
public DateTime? CreateTime { get; set; }
/// <summary>
/// CreateBy
/// </summary>
[Display(Name = "_Admin.CreateBy")]
[StringLength(50,ErrorMessage = "Validate.{0}stringmax{1}")]
public string CreateBy { get; set; }
/// <summary>
/// UpdateTime
/// </summary>
[Display(Name = "_Admin.UpdateTime")]
public DateTime? UpdateTime { get; set; }
/// <summary>
/// UpdateBy
/// </summary>
[Display(Name = "_Admin.UpdateBy")]
[StringLength(50,ErrorMessage = "Validate.{0}stringmax{1}")]
public string UpdateBy { get; set; }
}
}
| BasePoco |
csharp | MassTransit__MassTransit | src/Transports/MassTransit.RabbitMqTransport/RabbitMqTransport/Topology/IExchangeTypeSelector.cs | {
"start": 276,
"end": 833
} | public interface ____
{
/// <summary>
/// The default exchange type
/// </summary>
string DefaultExchangeType { get; }
/// <summary>
/// Returns the exchange type for the send context
/// </summary>
/// <typeparam name="T">The message type</typeparam>
/// <param name="exchangeName">The exchange name</param>
/// <returns>The exchange type for the send</returns>
string GetExchangeType<T>(string exchangeName)
where T : class;
}
}
| IExchangeTypeSelector |
csharp | nopSolutions__nopCommerce | src/Libraries/Nop.Data/Extensions/AsyncIEnumerableExtensions.cs | {
"start": 25,
"end": 16221
} | public static class ____
{
/// <summary>
/// Projects each element of an async-enumerable sequence into a new form by applying
/// an asynchronous selector function to each member of the source sequence and awaiting
/// the result.
/// </summary>
/// <typeparam name="TSource"> The type of the elements in the source sequence</typeparam>
/// <typeparam name="TResult">
/// The type of the elements in the result sequence, obtained by running the selector
/// function for each element in the source sequence and awaiting the result.
/// </typeparam>
/// <param name="source">A sequence of elements to invoke a transform function on</param>
/// <param name="predicate">An asynchronous transform function to apply to each source element</param>
/// <returns>
/// An async-enumerable sequence whose elements are the result of invoking the transform
/// function on each element of the source sequence and awaiting the result
/// </returns>
public static IAsyncEnumerable<TResult> SelectAwait<TSource, TResult>(this IEnumerable<TSource> source,
Func<TSource, ValueTask<TResult>> predicate)
{
return source.ToAsyncEnumerable().SelectAwait(predicate);
}
/// <summary>
/// Returns the first element of an async-enumerable sequence that satisfies the
/// condition in the predicate, or a default value if no element satisfies the condition
/// in the predicate
/// </summary>
/// <typeparam name="TSource">The type of element in the sequence</typeparam>
/// <param name="source">Source sequence</param>
/// <param name="predicate">An asynchronous predicate to invoke and await on each element of the sequence</param>
/// <returns>
/// A Task containing the first element in the sequence that satisfies the predicate,
/// or a default value if no element satisfies the predicate
/// </returns>
/// <returns>A task that represents the asynchronous operation</returns>
public static Task<TSource> FirstOrDefaultAwaitAsync<TSource>(this IEnumerable<TSource> source,
Func<TSource, ValueTask<bool>> predicate)
{
return source.ToAsyncEnumerable().FirstOrDefaultAwaitAsync(predicate).AsTask();
}
/// <summary>
/// Determines whether all elements in an async-enumerable sequence satisfy a condition
/// </summary>
/// <typeparam name="TSource">The type of element in the sequence</typeparam>
/// <param name="source">An sequence whose elements to apply the predicate to</param>
/// <param name="predicate">An asynchronous predicate to apply to each element of the source sequence</param>
/// <returns>
/// A Task containing a value indicating whether all elements in the sequence
/// pass the test in the specified predicate
/// </returns>
/// <returns>A task that represents the asynchronous operation</returns>
public static Task<bool> AllAwaitAsync<TSource>(this IEnumerable<TSource> source,
Func<TSource, ValueTask<bool>> predicate)
{
return source.ToAsyncEnumerable().AllAwaitAsync(predicate).AsTask();
}
/// <summary>
/// Projects each element of an async-enumerable sequence into an async-enumerable
/// sequence and merges the resulting async-enumerable sequences into one async-enumerable
/// sequence
/// </summary>
/// <typeparam name="TSource">The type of elements in the source sequence</typeparam>
/// <typeparam name="TResult">The type of elements in the projected inner sequences and the merged result sequence</typeparam>
/// <param name="source">An async-enumerable sequence of elements to project</param>
/// <param name="predicate">An asynchronous selector function to apply to each element of the source sequence</param>
/// <returns>
/// An async-enumerable sequence whose elements are the result of invoking the one-to-many
/// transform function on each element of the source sequence and awaiting the result
/// </returns>
public static IAsyncEnumerable<TResult> SelectManyAwait<TSource, TResult>(this IEnumerable<TSource> source,
Func<TSource, Task<IList<TResult>>> predicate)
{
async ValueTask<IAsyncEnumerable<TResult>> getAsyncEnumerable(TSource items)
{
var rez = await predicate(items);
return rez.ToAsyncEnumerable();
}
return source.ToAsyncEnumerable().SelectManyAwait(getAsyncEnumerable);
}
/// <summary>
/// Projects each element of an async-enumerable sequence into an async-enumerable
/// sequence and merges the resulting async-enumerable sequences into one async-enumerable
/// sequence
/// </summary>
/// <typeparam name="TSource">The type of elements in the source sequence</typeparam>
/// <typeparam name="TResult">The type of elements in the projected inner sequences and the merged result sequence</typeparam>
/// <param name="source">An async-enumerable sequence of elements to project</param>
/// <param name="predicate">An asynchronous selector function to apply to each element of the source sequence</param>
/// <returns>
/// An async-enumerable sequence whose elements are the result of invoking the one-to-many
/// transform function on each element of the source sequence and awaiting the result
/// </returns>
public static IAsyncEnumerable<TResult> SelectManyAwait<TSource, TResult>(this IEnumerable<TSource> source,
Func<TSource, Task<IEnumerable<TResult>>> predicate)
{
async ValueTask<IAsyncEnumerable<TResult>> getAsyncEnumerable(TSource items)
{
var rez = await predicate(items);
return rez.ToAsyncEnumerable();
}
return source.ToAsyncEnumerable().SelectManyAwait(getAsyncEnumerable);
}
/// <summary>
/// Filters the elements of an async-enumerable sequence based on an asynchronous
/// predicate
/// </summary>
/// <typeparam name="TSource"></typeparam>
/// <param name="source">An async-enumerable sequence whose elements to filter</param>
/// <param name="predicate">An asynchronous predicate to test each source element for a condition</param>
/// <returns>
/// An async-enumerable sequence that contains elements from the input sequence that
/// satisfy the condition
/// </returns>
public static IAsyncEnumerable<TSource> WhereAwait<TSource>(this IEnumerable<TSource> source,
Func<TSource, ValueTask<bool>> predicate)
{
return source.ToAsyncEnumerable().WhereAwait(predicate);
}
/// <summary>
/// Determines whether any element in an async-enumerable sequence satisfies a condition
/// </summary>
/// <typeparam name="TSource">The type of element in the sequence</typeparam>
/// <param name="source">An async-enumerable sequence whose elements to apply the predicate to</param>
/// <param name="predicate">An asynchronous predicate to apply to each element of the source sequence</param>
/// <returns>
/// A Task containing a value indicating whether any elements in the source
/// sequence pass the test in the specified predicate
/// </returns>
/// <returns>A task that represents the asynchronous operation</returns>
public static Task<bool> AnyAwaitAsync<TSource>(this IEnumerable<TSource> source,
Func<TSource, ValueTask<bool>> predicate)
{
return source.ToAsyncEnumerable().AnyAwaitAsync(predicate).AsTask();
}
/// <summary>
/// Returns the only element of an async-enumerable sequence that satisfies the condition
/// in the asynchronous predicate, or a default value if no such element exists,
/// and reports an exception if there is more than one element in the async-enumerable
/// sequence that matches the predicate
/// </summary>
/// <typeparam name="TSource">The type of elements in the source sequence</typeparam>
/// <param name="source">Source async-enumerable sequence</param>
/// <param name="predicate">An asynchronous predicate that will be applied to each element of the source sequence</param>
/// <returns>
/// Task containing the only element in the async-enumerable sequence that satisfies
/// the condition in the asynchronous predicate, or a default value if no such element
/// exists
/// </returns>
/// <returns>A task that represents the asynchronous operation</returns>
public static Task<TSource> SingleOrDefaultAwaitAsync<TSource>(this IEnumerable<TSource> source,
Func<TSource, ValueTask<bool>> predicate)
{
return source.ToAsyncEnumerable().SingleOrDefaultAwaitAsync(predicate).AsTask();
}
/// <summary>
/// Creates a list from an async-enumerable sequence
/// </summary>
/// <typeparam name="TSource">The type of the elements in the source sequence</typeparam>
/// <param name="source">The source async-enumerable sequence to get a list of elements for</param>
/// <returns>
/// An async-enumerable sequence containing a single element with a list containing
/// all the elements of the source sequence
/// </returns>
/// <returns>A task that represents the asynchronous operation</returns>
public static Task<List<TSource>> ToListAsync<TSource>(this IEnumerable<TSource> source)
{
return source.ToAsyncEnumerable().ToListAsync().AsTask();
}
/// <summary>
/// Sorts the elements of a sequence in descending order according to a key obtained
/// by invoking a transform function on each element and awaiting the result
/// </summary>
/// <typeparam name="TSource">The type of the elements of source</typeparam>
/// <typeparam name="TKey">The type of the key returned by keySelector</typeparam>
/// <param name="source">An async-enumerable sequence of values to order</param>
/// <param name="keySelector">An asynchronous function to extract a key from an element</param>
/// <returns>
/// An ordered async-enumerable sequence whose elements are sorted in descending
/// order according to a key
/// </returns>
public static IOrderedAsyncEnumerable<TSource> OrderByDescendingAwait<TSource, TKey>(
this IEnumerable<TSource> source, Func<TSource, ValueTask<TKey>> keySelector)
{
return source.ToAsyncEnumerable().OrderByDescendingAwait(keySelector);
}
/// <summary>
/// Groups the elements of an async-enumerable sequence and selects the resulting
/// elements by using a specified function
/// </summary>
/// <typeparam name="TSource">The type of the elements in the source sequence</typeparam>
/// <typeparam name="TKey">The type of the grouping key computed for each element in the source sequence</typeparam>
/// <typeparam name="TElement">The type of the elements within the groups computed for each element in the source sequence</typeparam>
/// <param name="source">An async-enumerable sequence whose elements to group</param>
/// <param name="keySelector">An asynchronous function to extract the key for each element</param>
/// <param name="elementSelector">An asynchronous function to map each source element to an element in an async-enumerable group</param>
/// <returns>
/// A sequence of async-enumerable groups, each of which corresponds to a unique
/// key value, containing all elements that share that same key value
/// </returns>
public static IAsyncEnumerable<IAsyncGrouping<TKey, TElement>> GroupByAwait<TSource, TKey, TElement>(
this IEnumerable<TSource> source, Func<TSource, ValueTask<TKey>> keySelector,
Func<TSource, ValueTask<TElement>> elementSelector)
{
return source.ToAsyncEnumerable().GroupByAwait(keySelector, elementSelector);
}
/// <summary>
/// Applies an accumulator function over an async-enumerable sequence, returning
/// the result of the aggregation as a single element in the result sequence. The
/// specified seed value is used as the initial accumulator value
/// </summary>
/// <typeparam name="TSource">specified seed value is used as the initial accumulator value</typeparam>
/// <typeparam name="TAccumulate">The type of the result of aggregation</typeparam>
/// <param name="source">An async-enumerable sequence to aggregate over</param>
/// <param name="seed">The initial accumulator value</param>
/// <param name="accumulator">An asynchronous accumulator function to be invoked and awaited on each element</param>
/// <returns>A Task containing the final accumulator value</returns>
public static ValueTask<TAccumulate> AggregateAwaitAsync<TSource, TAccumulate>(
this IEnumerable<TSource> source, TAccumulate seed,
Func<TAccumulate, TSource, ValueTask<TAccumulate>> accumulator)
{
return source.ToAsyncEnumerable().AggregateAwaitAsync(seed, accumulator);
}
/// <summary>
/// Creates a dictionary from an async-enumerable sequence using the specified asynchronous
/// key and element selector functions
/// </summary>
/// <typeparam name="TSource">The type of the elements in the source sequence</typeparam>
/// <typeparam name="TKey">The type of the dictionary key computed for each element in the source sequence</typeparam>
/// <typeparam name="TElement">The type of the dictionary value computed for each element in the source sequence</typeparam>
/// <param name="source">An async-enumerable sequence to create a dictionary for</param>
/// <param name="keySelector">An asynchronous function to extract a key from each element</param>
/// <param name="elementSelector">An asynchronous transform function to produce a result element value from each element</param>
/// <returns>
/// A Task containing a dictionary mapping unique key values onto the corresponding
/// source sequence's element
/// </returns>
public static ValueTask<Dictionary<TKey, TElement>> ToDictionaryAwaitAsync<TSource, TKey, TElement>(
this IEnumerable<TSource> source, Func<TSource, ValueTask<TKey>> keySelector,
Func<TSource, ValueTask<TElement>> elementSelector) where TKey : notnull
{
return source.ToAsyncEnumerable().ToDictionaryAwaitAsync(keySelector, elementSelector);
}
/// <summary>
/// Groups the elements of an async-enumerable sequence according to a specified
/// key selector function
/// </summary>
/// <typeparam name="TSource">The type of the elements in the source sequence</typeparam>
/// <typeparam name="TKey">The type of the grouping key computed for each element in the source sequence</typeparam>
/// <param name="source">An async-enumerable sequence whose elements to group</param>
/// <param name="keySelector">An asynchronous function to extract the key for each element</param>
/// <returns>
/// A sequence of async-enumerable groups, each of which corresponds to a unique
/// key value, containing all elements that share that same key value
/// </returns>
public static IAsyncEnumerable<IAsyncGrouping<TKey, TSource>> GroupByAwait<TSource, TKey>(this IEnumerable<TSource> source, Func<TSource, ValueTask<TKey>> keySelector)
{
return source.ToAsyncEnumerable().GroupByAwait(keySelector);
}
/// <summary>
/// Computes the sum of a sequence of System.Decimal values that are obtained by
/// invoking a transform function on each element of the source sequence and awaiting
/// the result
/// </summary>
/// <typeparam name="TSource">The type of elements in the source sequence</typeparam>
/// <param name="source">A sequence of values that are used to calculate a sum</param>
/// <param name="selector">An asynchronous transform function to apply to each element</param>
/// <returns>A Task containing the sum of the values in the source sequence</returns>
public static ValueTask<decimal> SumAwaitAsync<TSource>(this IEnumerable<TSource> source,
Func<TSource, ValueTask<decimal>> selector)
{
return source.ToAsyncEnumerable().SumAwaitAsync(selector);
}
} | AsyncIEnumerableExtensions |
csharp | MassTransit__MassTransit | tests/MassTransit.Tests/Serialization/Redelivery_Specs.cs | {
"start": 590,
"end": 3280
} | public class ____
{
[Test]
public async Task Should_include_the_required_headers()
{
await using var provider = CreateServiceProvider();
var harness = await provider.StartTestHarness();
await harness.Bus.Publish(new FaultyMessage());
Assert.That(await harness.Published.Any<FinalMessage>());
IList<IReceivedMessage<FaultyMessage>> messages = await harness.Consumed.SelectAsync<FaultyMessage>().Take(2).ToListAsync();
IReceivedMessage<FaultyMessage> faulted = messages.First();
Assert.That(faulted, Is.Not.Null);
Assert.That(faulted.Context.SupportedMessageTypes, Does.Contain(MessageUrn.ForTypeString<FaultyMessage>()));
IReceivedMessage<FaultyMessage> consumed = messages.Last();
Assert.That(consumed, Is.Not.Null);
Assert.That(consumed.Context.SupportedMessageTypes, Does.Contain(MessageUrn.ForTypeString<FaultyMessage>()));
await harness.Stop();
}
ServiceProvider CreateServiceProvider()
{
return new ServiceCollection()
.AddMassTransitTestHarness(x =>
{
x.AddConsumer<FaultyConsumer>();
x.AddConfigureEndpointsCallback((provider, name, cfg) =>
{
cfg.UseDelayedRedelivery(r =>
{
r.Intervals(5, 10);
r.ReplaceMessageId = true;
});
});
x.UsingInMemory((context, cfg) =>
{
if (_serializerType == typeof(SystemTextJsonMessageSerializer))
{
}
else if (_serializerType == typeof(SystemTextJsonRawMessageSerializer))
{
cfg.ClearSerialization();
cfg.UseRawJsonSerializer();
}
else if (_serializerType == typeof(NewtonsoftJsonMessageSerializer))
{
cfg.ClearSerialization();
cfg.UseNewtonsoftJsonSerializer();
}
else if (_serializerType == typeof(NewtonsoftRawJsonMessageSerializer))
{
cfg.ClearSerialization();
cfg.UseNewtonsoftRawJsonSerializer();
}
cfg.ConfigureEndpoints(context);
});
})
.BuildServiceProvider(true);
}
readonly Type _serializerType;
public Redelivery_Specs(Type serializerType)
{
_serializerType = serializerType;
}
| Redelivery_Specs |
csharp | rabbitmq__rabbitmq-dotnet-client | projects/RabbitMQ.Client/Impl/ConnectionStartDetails.cs | {
"start": 1456,
"end": 2110
} | internal sealed class ____
{
public byte[] m_locales;
public byte[] m_mechanisms;
public IDictionary<string, object?>? m_serverProperties;
public byte m_versionMajor;
public byte m_versionMinor;
public ConnectionStartDetails(byte[] locales, byte[] mechanisms, IDictionary<string, object?>? serverProperties, byte versionMajor, byte versionMinor)
{
m_locales = locales;
m_mechanisms = mechanisms;
m_serverProperties = serverProperties;
m_versionMajor = versionMajor;
m_versionMinor = versionMinor;
}
}
}
| ConnectionStartDetails |
csharp | icsharpcode__ILSpy | ICSharpCode.Decompiler/TypeSystem/ITypeDefinition.cs | {
"start": 1413,
"end": 3450
} | public interface ____ : ITypeDefinitionOrUnknown, IType, IEntity
{
ExtensionInfo? ExtensionInfo { get; }
IReadOnlyList<ITypeDefinition> NestedTypes { get; }
IReadOnlyList<IMember> Members { get; }
IEnumerable<IField> Fields { get; }
IEnumerable<IMethod> Methods { get; }
IEnumerable<IProperty> Properties { get; }
IEnumerable<IEvent> Events { get; }
/// <summary>
/// Gets the known type code for this type definition.
/// </summary>
KnownTypeCode KnownTypeCode { get; }
/// <summary>
/// For enums: returns the underlying primitive type.
/// For all other types: returns <see langword="null"/>.
/// </summary>
IType? EnumUnderlyingType { get; }
/// <summary>
/// For structs: returns whether this is a readonly struct.
/// For all other types: returns false.
/// </summary>
bool IsReadOnly { get; }
/// <summary>
/// Gets the short type name as stored in metadata.
/// That is, the short type name including the generic arity (`N) appended.
/// </summary>
/// <remarks>
/// "Int32" for int
/// "List`1" for List<T>
/// "List`1" for List<string>
/// </remarks>
string MetadataName { get; }
/// <summary>
/// Gets/Sets the declaring type (incl. type arguments, if any).
/// This property will return null for top-level types.
/// </summary>
new IType? DeclaringType { get; } // solves ambiguity between IType.DeclaringType and IEntity.DeclaringType
/// <summary>
/// Gets whether this type contains extension methods or C# 14 extensions.
/// </summary>
/// <remarks>This property is used to speed up the search for extension members.</remarks>
bool HasExtensions { get; }
/// <summary>
/// The nullability specified in the [NullableContext] attribute on the type.
/// This serves as default nullability for members of the type that do not have a [Nullable] attribute.
/// </summary>
Nullability NullableContext { get; }
/// <summary>
/// Gets whether the type has the necessary members to be considered a C# 9 | ITypeDefinition |
csharp | ChilliCream__graphql-platform | src/HotChocolate/Language/src/Language.Utf8/TokenHelper.cs | {
"start": 112,
"end": 1539
} | internal static class ____
{
private static readonly bool[] s_isString = new bool[22];
private static readonly bool[] s_isScalar = new bool[22];
static TokenHelper()
{
s_isString[(int)TokenKind.BlockString] = true;
s_isString[(int)TokenKind.String] = true;
s_isScalar[(int)TokenKind.BlockString] = true;
s_isScalar[(int)TokenKind.String] = true;
s_isScalar[(int)TokenKind.Integer] = true;
s_isScalar[(int)TokenKind.Float] = true;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static bool IsDescription(ref Utf8GraphQLReader reader)
{
ref var searchSpace = ref MemoryMarshal.GetReference(s_isString.AsSpan());
var index = (int)reader.Kind;
return Unsafe.Add(ref searchSpace, index);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static bool IsString(ref Utf8GraphQLReader reader)
{
ref var searchSpace = ref MemoryMarshal.GetReference(s_isString.AsSpan());
var index = (int)reader.Kind;
return Unsafe.Add(ref searchSpace, index);
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static bool IsScalarValue(ref Utf8GraphQLReader reader)
{
ref var searchSpace = ref MemoryMarshal.GetReference(s_isScalar.AsSpan());
var index = (int)reader.Kind;
return Unsafe.Add(ref searchSpace, index);
}
}
| TokenHelper |
csharp | smartstore__Smartstore | src/Smartstore.Modules/Smartstore.ShippingByWeight/Domain/ShippingRateByWeight.cs | {
"start": 162,
"end": 2017
} | public partial class ____ : BaseEntity
{
/// <summary>
/// Gets or sets the store identifier
/// </summary>
public int StoreId { get; set; }
/// <summary>
/// Gets or sets the country identifier
/// </summary>
public int CountryId { get; set; }
///// <summary>
///// Gets or sets the state/province identifier
///// </summary>
//public int StateProvinceId { get; set; }
/// <summary>
/// Gets or sets the zip
/// </summary>
[StringLength(100)]
public string Zip { get; set; }
/// <summary>
/// Gets or sets the shipping method identifier
/// </summary>
public int ShippingMethodId { get; set; }
/// <summary>
/// Gets or sets the "from" value
/// </summary>
public decimal From { get; set; }
/// <summary>
/// Gets or sets the "to" value
/// </summary>
public decimal To { get; set; }
/// <summary>
/// Gets or sets a value indicating whether to use percentage
/// </summary>
public bool UsePercentage { get; set; }
/// <summary>
/// Gets or sets the shipping charge percentage
/// </summary>
public decimal ShippingChargePercentage { get; set; }
/// <summary>
/// Gets or sets the shipping charge amount
/// </summary>
public decimal ShippingChargeAmount { get; set; }
/// <summary>
/// Gets or sets the shipping charge amount
/// </summary>
public decimal SmallQuantitySurcharge { get; set; }
/// <summary>
/// Gets or sets the shipping charge percentage
/// </summary>
public decimal SmallQuantityThreshold { get; set; }
}
}
| ShippingRateByWeight |
csharp | microsoft__PowerToys | src/common/UITestAutomation/WindowHelper.cs | {
"start": 11607,
"end": 12544
} | public enum ____
{
ScreenWidth = 0, // Width of the primary screen in pixels (SM_CXSCREEN)
ScreenHeight = 1, // Height of the primary screen in pixels (SM_CYSCREEN)
VirtualScreenWidth = 78, // Width of the virtual screen that includes all monitors (SM_CXVIRTUALSCREEN)
VirtualScreenHeight = 79, // Height of the virtual screen that includes all monitors (SM_CYVIRTUALSCREEN)
MonitorCount = 80, // Number of display monitors (SM_CMONITORS, available on Windows XP+)
}
public static (int CenterX, int CenterY) GetScreenCenter()
{
int width = GetSystemMetrics((int)SystemMetric.ScreenWidth);
int height = GetSystemMetrics((int)SystemMetric.ScreenHeight);
return (width / 2, height / 2);
}
}
}
}
| SystemMetric |
csharp | microsoft__garnet | libs/server/Objects/Types/GarnetObjectBase.cs | {
"start": 365,
"end": 10246
} | public abstract class ____ : IGarnetObject
{
int serializationState;
public byte[] serialized;
/// <inheritdoc />
public abstract byte Type { get; }
/// <inheritdoc />
public long Expiration { get; set; }
/// <inheritdoc />
public long Size { get; set; }
protected GarnetObjectBase(long expiration, long size)
{
Debug.Assert(size >= 0);
this.Expiration = expiration;
this.Size = size;
}
protected GarnetObjectBase(BinaryReader reader, long size)
: this(expiration: reader.ReadInt64(), size: size)
{
}
/// <inheritdoc />
public void Serialize(BinaryWriter writer)
{
while (true)
{
if (serializationState == (int)SerializationPhase.REST && MakeTransition(SerializationPhase.REST, SerializationPhase.SERIALIZING))
{
// Directly serialize to wire, do not cache serialized state
writer.Write(Type);
DoSerialize(writer);
serializationState = (int)SerializationPhase.REST;
return;
}
if (serializationState == (int)SerializationPhase.SERIALIZED)
{
// If serialized state is cached, use that
var _serialized = serialized;
if (_serialized != null)
{
writer.Write(Type);
writer.Write(_serialized);
}
else
{
// Write null object to stream
writer.Write((byte)GarnetObjectType.Null);
}
return;
}
Thread.Yield();
}
}
/// <inheritdoc />
public void CopyUpdate(ref IGarnetObject oldValue, ref IGarnetObject newValue, bool isInNewVersion)
{
newValue = Clone();
newValue.Expiration = Expiration;
// If we are not currently taking a checkpoint, we can delete the old version
// since the new version of the object is already created.
if (!isInNewVersion)
{
// Wait for any concurrent ongoing serialization of oldValue to complete
while (true)
{
if (serializationState == (int)SerializationPhase.REST && MakeTransition(SerializationPhase.REST, SerializationPhase.SERIALIZED))
break;
if (serializationState >= (int)SerializationPhase.SERIALIZED)
break;
_ = Thread.Yield();
}
oldValue = null;
return;
}
// Create a serialized version for checkpoint version (v)
while (true)
{
if (serializationState == (int)SerializationPhase.REST && MakeTransition(SerializationPhase.REST, SerializationPhase.SERIALIZING))
{
using var ms = new MemoryStream();
using var writer = new BinaryWriter(ms, Encoding.UTF8);
DoSerialize(writer);
serialized = ms.ToArray();
serializationState = (int)SerializationPhase.SERIALIZED;
return;
}
if (serializationState >= (int)SerializationPhase.SERIALIZED)
return;
Thread.Yield();
}
}
/// <summary>
/// Clone object (shallow copy)
/// </summary>
/// <returns></returns>
public abstract GarnetObjectBase Clone();
/// <inheritdoc />
public abstract bool Operate(ref ObjectInput input, ref GarnetObjectStoreOutput output, byte respProtocolVersion, out long sizeChange);
/// <inheritdoc />
public abstract void Dispose();
/// <summary>
/// Serialize to given writer
/// NOTE: Make sure to first call base.DoSerialize(writer) in all derived classes.
/// </summary>
public virtual void DoSerialize(BinaryWriter writer)
{
writer.Write(Expiration);
}
private bool MakeTransition(SerializationPhase expectedPhase, SerializationPhase nextPhase)
{
if (Interlocked.CompareExchange(ref serializationState, (int)nextPhase, (int)expectedPhase) != (int)expectedPhase) return false;
return true;
}
/// <summary>
/// Scan the items of the collection
/// </summary>
/// <param name="start">Shift the scan to this index</param>
/// <param name="items">The matching items in the collection</param>
/// <param name="cursor">The cursor in the current page</param>
/// <param name="count">The number of items being taken in one iteration</param>
/// <param name="pattern">A patter used to match the members of the collection</param>
/// <param name="patternLength">The number of characters in the pattern</param>
/// <returns></returns>
public abstract unsafe void Scan(long start, out List<byte[]> items, out long cursor, int count = 10, byte* pattern = default, int patternLength = 0, bool isNoValue = false);
/// <summary>
/// Implement Scan command
/// </summary>
/// <param name="input"></param>
/// <param name="output"></param>
/// <param name="respProtocolVersion"></param>
protected unsafe void Scan(ref ObjectInput input, ref GarnetObjectStoreOutput output, byte respProtocolVersion)
{
using var writer = new RespMemoryWriter(respProtocolVersion, ref output.SpanByteAndMemory);
if (ReadScanInput(ref input, ref output.SpanByteAndMemory, out var cursorInput, out var pattern,
out var patternLength, out var limitCount, out var isNoValue, out var error))
{
Scan(cursorInput, out var items, out var cursorOutput, limitCount, pattern,
patternLength, isNoValue);
writer.WriteArrayLength(2);
writer.WriteInt64AsBulkString(cursorOutput);
if (items.Count == 0)
{
// Empty array
writer.WriteEmptyArray();
}
else
{
// Write size of the array
writer.WriteArrayLength(items.Count);
foreach (var item in items)
{
if (item != null)
{
writer.WriteBulkString(item);
}
else
{
writer.WriteNull();
}
}
}
output.Header.result1 = items.Count;
}
else
{
writer.WriteError(error);
}
}
/// <summary>
/// Reads and parses scan parameters from RESP format
/// </summary>
/// <param name="input"></param>
/// <param name="output"></param>
/// <param name="cursorInput"></param>
/// <param name="pattern"></param>
/// <param name="patternLength"></param>
/// <param name="countInInput"></param>
/// <param name="error"></param>
/// <returns></returns>
private static unsafe bool ReadScanInput(ref ObjectInput input, ref SpanByteAndMemory output,
out long cursorInput, out byte* pattern, out int patternLength, out int countInInput, out bool isNoValue, out ReadOnlySpan<byte> error)
{
// Largest number of items to print
var limitCountInOutput = input.arg2;
patternLength = 0;
pattern = default;
// Default of items in output
countInInput = 10;
error = default;
isNoValue = false;
// Cursor
if (!input.parseState.TryGetLong(0, out cursorInput) || cursorInput < 0)
{
error = CmdStrings.RESP_ERR_GENERIC_INVALIDCURSOR;
return false;
}
var currTokenIdx = 1;
while (currTokenIdx < input.parseState.Count)
{
var sbParam = input.parseState.GetArgSliceByRef(currTokenIdx++).ReadOnlySpan;
if (sbParam.EqualsUpperCaseSpanIgnoringCase(CmdStrings.MATCH))
{
// Read pattern for keys filter
var sbPattern = input.parseState.GetArgSliceByRef(currTokenIdx++).SpanByte;
pattern = sbPattern.ToPointer();
patternLength = sbPattern.Length;
}
else if (sbParam.EqualsUpperCaseSpanIgnoringCase(CmdStrings.COUNT))
{
if (!input.parseState.TryGetInt(currTokenIdx++, out countInInput))
{
error = CmdStrings.RESP_ERR_GENERIC_VALUE_IS_NOT_INTEGER;
return false;
}
// Limiting number of items to send to the output
if (countInInput > limitCountInOutput)
countInInput = limitCountInOutput;
}
else if (sbParam.EqualsUpperCaseSpanIgnoringCase(CmdStrings.NOVALUES))
{
isNoValue = true;
}
}
return true;
}
}
} | GarnetObjectBase |
csharp | microsoft__semantic-kernel | dotnet/samples/Demos/ModelContextProtocolClientServer/MCPClient/Samples/BaseSample.cs | {
"start": 375,
"end": 4926
} | internal abstract class ____
{
/// <summary>
/// Creates an MCP client and connects it to the MCPServer server.
/// </summary>
/// <param name="kernel">Optional kernel instance to use for the MCP client.</param>
/// <param name="samplingRequestHandler">Optional handler for MCP sampling requests.</param>
/// <returns>An instance of <see cref="IMcpClient"/>.</returns>
protected static Task<McpClient> CreateMcpClientAsync(
Kernel? kernel = null,
Func<Kernel, CreateMessageRequestParams?, IProgress<ProgressNotificationValue>, CancellationToken, Task<CreateMessageResult>>? samplingRequestHandler = null)
{
KernelFunction? skSamplingHandler = null;
// Create and return the MCP client
return McpClient.CreateAsync(
clientTransport: new StdioClientTransport(new StdioClientTransportOptions
{
Name = "MCPServer",
Command = GetMCPServerPath(), // Path to the MCPServer executable
}),
clientOptions: samplingRequestHandler != null ? new McpClientOptions()
{
Handlers = new()
{
SamplingHandler = InvokeHandlerAsync,
},
} : null
);
async ValueTask<CreateMessageResult> InvokeHandlerAsync(CreateMessageRequestParams? request, IProgress<ProgressNotificationValue> progress, CancellationToken cancellationToken)
{
if (request is null)
{
throw new ArgumentNullException(nameof(request));
}
skSamplingHandler ??= KernelFunctionFactory.CreateFromMethod(
(CreateMessageRequestParams? request, IProgress<ProgressNotificationValue> progress, CancellationToken ct) =>
{
return samplingRequestHandler(kernel!, request, progress, ct);
},
"MCPSamplingHandler"
);
// The argument names must match the parameter names of the delegate the SK Function is created from
KernelArguments kernelArguments = new()
{
["request"] = request,
["progress"] = progress
};
FunctionResult functionResult = await skSamplingHandler.InvokeAsync(kernel!, kernelArguments, cancellationToken);
return functionResult.GetValue<CreateMessageResult>()!;
}
}
/// <summary>
/// Creates an instance of <see cref="Kernel"/> with the OpenAI chat completion service registered.
/// </summary>
/// <returns>An instance of <see cref="Kernel"/>.</returns>
protected static Kernel CreateKernelWithChatCompletionService()
{
// Load and validate configuration
IConfigurationRoot config = new ConfigurationBuilder()
.AddUserSecrets<Program>()
.AddEnvironmentVariables()
.Build();
if (config["OpenAI:ApiKey"] is not { } apiKey)
{
const string Message = "Please provide a valid OpenAI:ApiKey to run this sample. See the associated README.md for more details.";
Console.Error.WriteLine(Message);
throw new InvalidOperationException(Message);
}
string modelId = config["OpenAI:ChatModelId"] ?? "gpt-4o-mini";
// Create kernel
var kernelBuilder = Kernel.CreateBuilder();
kernelBuilder.Services.AddOpenAIChatCompletion(modelId: modelId, apiKey: apiKey);
return kernelBuilder.Build();
}
/// <summary>
/// Displays the list of available MCP tools.
/// </summary>
/// <param name="tools">The list of the tools to display.</param>
protected static void DisplayTools(IList<McpClientTool> tools)
{
Console.WriteLine("Available MCP tools:");
foreach (var tool in tools)
{
Console.WriteLine($"- Name: {tool.Name}, Description: {tool.Description}");
}
Console.WriteLine();
}
/// <summary>
/// Returns the path to the MCPServer server executable.
/// </summary>
/// <returns>The path to the MCPServer server executable.</returns>
private static string GetMCPServerPath()
{
// Determine the configuration (Debug or Release)
string configuration;
#if DEBUG
configuration = "Debug";
#else
configuration = "Release";
#endif
return Path.Combine("..", "..", "..", "..", "MCPServer", "bin", configuration, "net8.0", "MCPServer.exe");
}
}
| BaseSample |
csharp | microsoft__PowerToys | src/modules/launcher/Plugins/Microsoft.PowerToys.Run.Plugin.PowerToys/Components/UtilityProvider.cs | {
"start": 595,
"end": 11833
} | public class ____ : IDisposable
{
private const int MaxNumberOfRetry = 5;
private readonly List<Utility> _utilities;
private readonly FileSystemWatcher _watcher;
private readonly Lock _loadingSettingsLock = new();
private bool _disposed;
public UtilityProvider()
{
var settingsUtils = new SettingsUtils();
var generalSettings = settingsUtils.GetSettings<GeneralSettings>();
_utilities = new List<Utility>();
if (GPOWrapper.GetConfiguredColorPickerEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.ColorPicker,
Resources.Color_Picker,
generalSettings.Enabled.ColorPicker,
(_) =>
{
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.ShowColorPickerSharedEvent());
eventHandle.Set();
return true;
}));
}
if (GPOWrapper.GetConfiguredFancyZonesEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.FancyZones,
Resources.FancyZones_Editor,
generalSettings.Enabled.FancyZones,
(_) =>
{
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.FZEToggleEvent());
eventHandle.Set();
return true;
}));
}
if (GPOWrapper.GetConfiguredHostsFileEditorEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.Hosts,
Resources.Hosts_File_Editor,
generalSettings.Enabled.Hosts,
(_) =>
{
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.ShowHostsSharedEvent());
eventHandle.Set();
return true;
}));
}
if (GPOWrapper.GetConfiguredScreenRulerEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.MeasureTool,
Resources.Screen_Ruler,
generalSettings.Enabled.MeasureTool,
(_) =>
{
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.MeasureToolTriggerEvent());
eventHandle.Set();
return true;
}));
}
if (GPOWrapper.GetConfiguredTextExtractorEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.PowerOCR,
Resources.Text_Extractor,
generalSettings.Enabled.PowerOcr,
(_) =>
{
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.ShowPowerOCRSharedEvent());
eventHandle.Set();
return true;
}));
}
if (GPOWrapper.GetConfiguredShortcutGuideEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.ShortcutGuide,
Resources.Shortcut_Guide,
generalSettings.Enabled.ShortcutGuide,
(_) =>
{
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.ShortcutGuideTriggerEvent());
eventHandle.Set();
return true;
}));
}
if (GPOWrapper.GetConfiguredRegistryPreviewEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.RegistryPreview,
Resources.Registry_Preview,
generalSettings.Enabled.RegistryPreview,
(_) =>
{
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.RegistryPreviewTriggerEvent());
eventHandle.Set();
return true;
}));
}
if (GPOWrapper.GetConfiguredCropAndLockEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.CropAndLock,
Resources.Crop_And_Lock_Thumbnail,
generalSettings.Enabled.CropAndLock,
(_) =>
{
// Wait for the Launcher window to be hidden and activate Crop And Lock in the correct window
var timer = new System.Timers.Timer(TimeSpan.FromMilliseconds(500));
timer.Elapsed += (_, _) =>
{
timer.Stop();
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.CropAndLockThumbnailEvent());
eventHandle.Set();
};
timer.Start();
return true;
}));
_utilities.Add(new Utility(
UtilityKey.CropAndLock,
Resources.Crop_And_Lock_Reparent,
generalSettings.Enabled.CropAndLock,
(_) =>
{
// Wait for the Launcher window to be hidden and activate Crop And Lock in the correct window
var timer = new System.Timers.Timer(TimeSpan.FromMilliseconds(500));
timer.Elapsed += (_, _) =>
{
timer.Stop();
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.CropAndLockReparentEvent());
eventHandle.Set();
};
timer.Start();
return true;
}));
}
if (GPOWrapper.GetConfiguredEnvironmentVariablesEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.EnvironmentVariables,
Resources.Environment_Variables,
generalSettings.Enabled.EnvironmentVariables,
(_) =>
{
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.ShowEnvironmentVariablesSharedEvent());
eventHandle.Set();
return true;
}));
}
if (GPOWrapper.GetConfiguredWorkspacesEnabledValue() != GpoRuleConfigured.Disabled)
{
_utilities.Add(new Utility(
UtilityKey.Workspaces,
Resources.Workspaces_Editor,
generalSettings.Enabled.Workspaces,
(_) =>
{
using var eventHandle = new EventWaitHandle(false, EventResetMode.AutoReset, Constants.WorkspacesLaunchEditorEvent());
eventHandle.Set();
return true;
}));
}
_watcher = new FileSystemWatcher
{
Path = Path.GetDirectoryName(settingsUtils.GetSettingsFilePath()),
Filter = Path.GetFileName(settingsUtils.GetSettingsFilePath()),
NotifyFilter = NotifyFilters.LastWrite,
};
_watcher.Changed += (s, e) => ReloadUtilities();
_watcher.EnableRaisingEvents = true;
}
public IEnumerable<Utility> GetEnabledUtilities()
{
return _utilities.Where(u => u.Enabled);
}
private void ReloadUtilities()
{
lock (_loadingSettingsLock)
{
var retry = true;
var retryCount = 0;
while (retry)
{
try
{
retryCount++;
var settingsUtils = new SettingsUtils();
var generalSettings = settingsUtils.GetSettings<GeneralSettings>();
foreach (var u in _utilities)
{
switch (u.Key)
{
case UtilityKey.ColorPicker: u.Enable(generalSettings.Enabled.ColorPicker); break;
case UtilityKey.FancyZones: u.Enable(generalSettings.Enabled.FancyZones); break;
case UtilityKey.Hosts: u.Enable(generalSettings.Enabled.Hosts); break;
case UtilityKey.PowerOCR: u.Enable(generalSettings.Enabled.PowerOcr); break;
case UtilityKey.MeasureTool: u.Enable(generalSettings.Enabled.MeasureTool); break;
case UtilityKey.ShortcutGuide: u.Enable(generalSettings.Enabled.ShortcutGuide); break;
case UtilityKey.RegistryPreview: u.Enable(generalSettings.Enabled.RegistryPreview); break;
case UtilityKey.CropAndLock: u.Enable(generalSettings.Enabled.CropAndLock); break;
case UtilityKey.EnvironmentVariables: u.Enable(generalSettings.Enabled.EnvironmentVariables); break;
case UtilityKey.Workspaces: u.Enable(generalSettings.Enabled.Workspaces); break;
}
}
retry = false;
}
catch (Exception ex)
{
if (retryCount > MaxNumberOfRetry)
{
Log.Exception("Failed to read changed settings", ex, typeof(UtilityProvider));
retry = false;
}
Thread.Sleep(500);
}
}
}
}
public void Dispose()
{
Dispose(disposing: true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (!_disposed)
{
if (disposing)
{
_watcher?.Dispose();
_disposed = true;
}
}
}
}
}
| UtilityProvider |
csharp | AutoFixture__AutoFixture | Src/AutoFixture/OmitOnRecursionBehavior.cs | {
"start": 835,
"end": 2059
} | class ____ specific recursion depth.
/// </summary>
/// <param name="recursionDepth">The recursion depth at which the request will be omitted.</param>
/// <exception cref="System.ArgumentOutOfRangeException">recursionDepth is less than one.</exception>
public OmitOnRecursionBehavior(int recursionDepth)
{
if (recursionDepth < 1)
throw new ArgumentOutOfRangeException(nameof(recursionDepth), "Recursion depth must be greater than 0.");
this.recursionDepth = recursionDepth;
}
/// <summary>
/// Decorates the supplied <see cref="ISpecimenBuilder" /> with an
/// <see cref="RecursionGuard"/>.
/// </summary>
/// <param name="builder">The builder to decorate.</param>
/// <returns>
/// <paramref name="builder" /> decorated with an
/// <see cref="RecursionGuard" />.
/// </returns>
public ISpecimenBuilderNode Transform(ISpecimenBuilder builder)
{
if (builder == null) throw new ArgumentNullException(nameof(builder));
return new RecursionGuard(builder, new OmitOnRecursionHandler(), this.recursionDepth);
}
}
}
| with |
csharp | GtkSharp__GtkSharp | Source/Libs/GLibSharp/IOChannel.cs | {
"start": 20271,
"end": 20334
} | public enum ____ {
Error,
Normal,
Eof,
Again,
}
| IOStatus |
csharp | dotnet__efcore | test/EFCore.Specification.Tests/Query/AdHocMiscellaneousQueryTestBase.cs | {
"start": 33559,
"end": 33752
} | protected class ____(DbContextOptions options) : DbContext(options)
{
public DbSet<Table1> Tables1 { get; set; }
public DbSet<Table2> Tables2 { get; set; }
| Context12549 |
csharp | dotnet__orleans | src/api/Azure/Orleans.Streaming.EventHubs/Orleans.Streaming.EventHubs.cs | {
"start": 25709,
"end": 28161
} | public partial class ____ : IEventHubQueueCacheFactory
{
public EventHubQueueCacheFactory(Configuration.EventHubStreamCachePressureOptions cacheOptions, Configuration.StreamCacheEvictionOptions evictionOptions, Configuration.StreamStatisticOptions statisticOptions, IEventHubDataAdapter dataAdater, EventHubMonitorAggregationDimensions sharedDimensions, System.Func<EventHubCacheMonitorDimensions, Microsoft.Extensions.Logging.ILoggerFactory, Providers.Streams.Common.ICacheMonitor> cacheMonitorFactory = null, System.Func<EventHubBlockPoolMonitorDimensions, Microsoft.Extensions.Logging.ILoggerFactory, Providers.Streams.Common.IBlockPoolMonitor> blockPoolMonitorFactory = null) { }
public System.Func<EventHubBlockPoolMonitorDimensions, Microsoft.Extensions.Logging.ILoggerFactory, Providers.Streams.Common.IBlockPoolMonitor> BlockPoolMonitorFactory { get { throw null; } set { } }
public System.Func<EventHubCacheMonitorDimensions, Microsoft.Extensions.Logging.ILoggerFactory, Providers.Streams.Common.ICacheMonitor> CacheMonitorFactory { get { throw null; } set { } }
protected virtual void AddCachePressureMonitors(IEventHubQueueCache cache, Configuration.EventHubStreamCachePressureOptions providerOptions, Microsoft.Extensions.Logging.ILogger cacheLogger) { }
protected virtual Providers.Streams.Common.IObjectPool<Providers.Streams.Common.FixedSizeBuffer> CreateBufferPool(Configuration.StreamStatisticOptions statisticOptions, Microsoft.Extensions.Logging.ILoggerFactory loggerFactory, EventHubMonitorAggregationDimensions sharedDimensions, out string blockPoolId) { throw null; }
protected virtual IEventHubQueueCache CreateCache(string partition, IEventHubDataAdapter dataAdatper, Configuration.StreamStatisticOptions statisticOptions, Configuration.StreamCacheEvictionOptions streamCacheEvictionOptions, Streams.IStreamQueueCheckpointer<string> checkpointer, Microsoft.Extensions.Logging.ILoggerFactory loggerFactory, Providers.Streams.Common.IObjectPool<Providers.Streams.Common.FixedSizeBuffer> bufferPool, string blockPoolId, Providers.Streams.Common.TimePurgePredicate timePurge, EventHubMonitorAggregationDimensions sharedDimensions) { throw null; }
public IEventHubQueueCache CreateCache(string partition, Streams.IStreamQueueCheckpointer<string> checkpointer, Microsoft.Extensions.Logging.ILoggerFactory loggerFactory) { throw null; }
}
| EventHubQueueCacheFactory |
csharp | unoplatform__uno | src/Uno.UWP/Devices/Haptics/VibrationDevice.wasm.cs | {
"start": 168,
"end": 588
} | public partial class ____
{
private static Task<VibrationAccessStatus> RequestAccessTaskAsync() =>
Task.FromResult(VibrationAccessStatus.Allowed);
private static Task<VibrationDevice?> GetDefaultTaskAsync()
{
if (PhoneVibrationDevice.GetDefault() != null)
{
return Task.FromResult<VibrationDevice?>(new VibrationDevice());
}
return Task.FromResult<VibrationDevice?>(null);
}
}
}
| VibrationDevice |
csharp | grandnode__grandnode2 | src/Web/Grand.Web.Admin/Controllers/DownloadController.cs | {
"start": 963,
"end": 3909
} | record ____ with the specified id");
if (download.UseDownloadUrl)
return new RedirectResult(download.DownloadUrl);
//use stored data
if (download.DownloadBinary == null)
return Content($"Download data is not available any more. Download GD={download.Id}");
var fileName = !string.IsNullOrWhiteSpace(download.Filename) ? download.Filename : download.Id;
var contentType = !string.IsNullOrWhiteSpace(download.ContentType)
? download.ContentType
: "application/octet-stream";
return new FileContentResult(download.DownloadBinary, contentType) {
FileDownloadName = fileName + download.Extension
};
}
[HttpPost]
//do not validate request token (XSRF)
[IgnoreAntiforgeryToken]
public async Task<IActionResult> SaveDownloadUrl(string downloadUrl, DownloadType downloadType = DownloadType.None,
string referenceId = "")
{
if (string.IsNullOrEmpty(downloadUrl)) return Json(new { success = false, error = "URL can't be empty" });
//insert
var download = new Download {
DownloadGuid = Guid.NewGuid(),
UseDownloadUrl = true,
DownloadUrl = downloadUrl,
DownloadType = downloadType,
ReferenceId = referenceId
};
await _downloadService.InsertDownload(download);
return Json(new { downloadId = download.Id, success = true });
}
[HttpPost]
//do not validate request token (XSRF)
[IgnoreAntiforgeryToken]
public virtual async Task<IActionResult> AsyncUpload(IFormFile file, DownloadType downloadType = DownloadType.None,
string referenceId = "")
{
if (file == null)
return Json(new {
success = false,
message = "No file uploaded",
downloadGuid = Guid.Empty
});
var fileBinary = file.GetDownloadBits();
var download = new Download {
DownloadGuid = Guid.NewGuid(),
CustomerId = _contextAccessor.WorkContext.CurrentCustomer.Id,
UseDownloadUrl = false,
DownloadUrl = "",
DownloadBinary = fileBinary,
ContentType = file.ContentType,
Filename = Path.GetFileNameWithoutExtension(file.FileName),
Extension = Path.GetExtension(file.FileName),
DownloadType = downloadType,
ReferenceId = referenceId
};
await _downloadService.InsertDownload(download);
//when returning JSON the mime-type must be set to text/plain
//otherwise some browsers will pop-up a "Save As" dialog.
return Json(new {
success = true,
downloadId = download.Id,
downloadUrl = Url.Action("DownloadFile",
new { downloadGuid = download.DownloadGuid, area = Constants.AreaAdmin })
});
}
} | found |
csharp | dotnet__aspnetcore | src/Http/Http.Abstractions/test/UseMiddlewareTest.cs | {
"start": 16415,
"end": 16609
} | private class ____
{
public MiddlewareAsyncNoParametersStub(RequestDelegate next) { }
public Task InvokeAsync() => Task.CompletedTask;
}
| MiddlewareAsyncNoParametersStub |
csharp | dotnet__orleans | src/api/Orleans.Core/Orleans.Core.cs | {
"start": 37132,
"end": 37436
} | partial class ____
{
public LeaseRequest(string resourceKey, System.TimeSpan duration) { }
[Id(1)]
public System.TimeSpan Duration { get { throw null; } }
[Id(0)]
public string ResourceKey { get { throw null; } }
}
[GenerateSerializer]
| LeaseRequest |
csharp | ServiceStack__ServiceStack | ServiceStack.Redis/tests/ServiceStack.Redis.Tests/Integration/MultiThreadedRedisClientIntegrationTests.cs | {
"start": 307,
"end": 3733
} | public class ____
: IntegrationTestBase
{
private static string testData;
[OneTimeSetUp]
public void onBeforeTestFixture()
{
var results = 100.Times(x => ModelWithFieldsOfDifferentTypes.Create(x));
testData = TypeSerializer.SerializeToString(results);
}
[Test]
public void Can_support_64_threads_using_the_client_simultaneously()
{
var before = Stopwatch.GetTimestamp();
const int noOfConcurrentClients = 64; //WaitHandle.WaitAll limit is <= 64
#if NETCORE
List<Task> tasks = new List<Task>();
#else
var clientAsyncResults = new List<IAsyncResult>();
#endif
using (var redisClient = new RedisClient(TestConfig.SingleHost))
{
for (var i = 0; i < noOfConcurrentClients; i++)
{
var clientNo = i;
var action = (Action)(() => UseClientAsync(redisClient, clientNo));
#if NETCORE
tasks.Add(Task.Run(action));
#else
clientAsyncResults.Add(action.BeginInvoke(null, null));
#endif
}
}
#if NETCORE
Task.WaitAll(tasks.ToArray());
#else
WaitHandle.WaitAll(clientAsyncResults.ConvertAll(x => x.AsyncWaitHandle).ToArray());
#endif
Debug.WriteLine(String.Format("Time Taken: {0}", (Stopwatch.GetTimestamp() - before) / 1000));
}
[Test]
public void Can_support_64_threads_using_the_client_sequentially()
{
var before = Stopwatch.GetTimestamp();
const int noOfConcurrentClients = 64; //WaitHandle.WaitAll limit is <= 64
using (var redisClient = new RedisClient(TestConfig.SingleHost))
{
for (var i = 0; i < noOfConcurrentClients; i++)
{
var clientNo = i;
UseClient(redisClient, clientNo);
}
}
Debug.WriteLine(String.Format("Time Taken: {0}", (Stopwatch.GetTimestamp() - before) / 1000));
}
private void UseClientAsync(RedisClient client, int clientNo)
{
lock (this)
{
UseClient(client, clientNo);
}
}
private static void UseClient(RedisClient client, int clientNo)
{
var host = "";
try
{
host = client.Host;
Log("Client '{0}' is using '{1}'", clientNo, client.Host);
var testClientKey = "test:" + host + ":" + clientNo;
client.SetValue(testClientKey, testData);
var result = client.GetValue(testClientKey) ?? "";
Log("\t{0} => {1} len {2} {3} len", testClientKey,
testData.Length, testData.Length == result.Length ? "==" : "!=", result.Length);
}
catch (NullReferenceException ex)
{
Debug.WriteLine("NullReferenceException StackTrace: \n" + ex.StackTrace);
}
catch (Exception ex)
{
Debug.WriteLine(String.Format("\t[ERROR@{0}]: {1} => {2}",
host, ex.GetType().Name, ex.Message));
}
}
}
} | MultiThreadedRedisClientIntegrationTests |
csharp | unoplatform__uno | src/SamplesApp/UITests.Shared/Windows_UI_Xaml_Controls/AutoSuggestBoxTests/AutoSuggestBoxTestData.cs | {
"start": 671,
"end": 889
} | public class ____
{
private Author author;
public Author Author
{
get => author;
set
{
author = value;
AuthorChanged?.Invoke(this, EventArgs.Empty);
}
}
public event EventHandler AuthorChanged;
}
| Book |
csharp | MonoGame__MonoGame | Tools/MonoGame.Effect.Compiler/Effect/TPGParser/SamplerStateInfo.cs | {
"start": 302,
"end": 6747
} | public class ____
{
private SamplerState _state;
private bool _dirty;
private TextureFilterType _minFilter;
private TextureFilterType _magFilter;
private TextureFilterType _mipFilter;
private TextureAddressMode _addressU;
private TextureAddressMode _addressV;
private TextureAddressMode _addressW;
private Color _borderColor;
private int _maxAnisotropy;
private int _maxMipLevel;
private float _mipMapLevelOfDetailBias;
public SamplerStateInfo()
{
// NOTE: These match the defaults of SamplerState.
_minFilter = TextureFilterType.Linear;
_magFilter = TextureFilterType.Linear;
_mipFilter = TextureFilterType.Linear;
_addressU = TextureAddressMode.Wrap;
_addressV = TextureAddressMode.Wrap;
_addressW = TextureAddressMode.Wrap;
_borderColor = Color.White;
_maxAnisotropy = 4;
_maxMipLevel = 0;
_mipMapLevelOfDetailBias = 0.0f;
}
public string Name { get; set; }
public string TextureName { get; set; }
public TextureFilterType MinFilter
{
set
{
_minFilter = value;
_dirty = true;
}
}
public TextureFilterType MagFilter
{
set
{
_magFilter = value;
_dirty = true;
}
}
public TextureFilterType MipFilter
{
set
{
_mipFilter = value;
_dirty = true;
}
}
public TextureFilterType Filter
{
set
{
_minFilter = _magFilter = _mipFilter = value;
_dirty = true;
}
}
public TextureAddressMode AddressU
{
set
{
_addressU = value;
_dirty = true;
}
}
public TextureAddressMode AddressV
{
set
{
_addressV = value;
_dirty = true;
}
}
public TextureAddressMode AddressW
{
set
{
_addressW = value;
_dirty = true;
}
}
public Color BorderColor
{
set
{
_borderColor = value;
_dirty = true;
}
}
public int MaxAnisotropy
{
set
{
_maxAnisotropy = value;
_dirty = true;
}
}
public int MaxMipLevel
{
set
{
_maxMipLevel = value;
_dirty = true;
}
}
public float MipMapLevelOfDetailBias
{
set
{
_mipMapLevelOfDetailBias = value;
_dirty = true;
}
}
private void UpdateSamplerState()
{
// Get the existing state or create it.
if (_state == null)
_state = new SamplerState();
_state.AddressU = _addressU;
_state.AddressV = _addressV;
_state.AddressW = _addressW;
_state.BorderColor = _borderColor;
_state.MaxAnisotropy = _maxAnisotropy;
_state.MaxMipLevel = _maxMipLevel;
_state.MipMapLevelOfDetailBias = _mipMapLevelOfDetailBias;
// Figure out what kind of filter to set based on each
// individual min, mag, and mip filter settings.
//
// NOTE: We're treating "None" and "Point" the same here
// and disabling mipmapping further below.
//
if (_minFilter == TextureFilterType.Anisotropic)
_state.Filter = TextureFilter.Anisotropic;
else if (_minFilter == TextureFilterType.Linear && _magFilter == TextureFilterType.Linear && _mipFilter == TextureFilterType.Linear)
_state.Filter = TextureFilter.Linear;
else if (_minFilter == TextureFilterType.Linear && _magFilter == TextureFilterType.Linear && _mipFilter <= TextureFilterType.Point)
_state.Filter = TextureFilter.LinearMipPoint;
else if (_minFilter == TextureFilterType.Linear && _magFilter <= TextureFilterType.Point && _mipFilter == TextureFilterType.Linear)
_state.Filter = TextureFilter.MinLinearMagPointMipLinear;
else if (_minFilter == TextureFilterType.Linear && _magFilter <= TextureFilterType.Point && _mipFilter <= TextureFilterType.Point)
_state.Filter = TextureFilter.MinLinearMagPointMipPoint;
else if (_minFilter <= TextureFilterType.Point && _magFilter == TextureFilterType.Linear && _mipFilter == TextureFilterType.Linear)
_state.Filter = TextureFilter.MinPointMagLinearMipLinear;
else if (_minFilter <= TextureFilterType.Point && _magFilter == TextureFilterType.Linear && _mipFilter <= TextureFilterType.Point)
_state.Filter = TextureFilter.MinPointMagLinearMipPoint;
else if (_minFilter <= TextureFilterType.Point && _magFilter <= TextureFilterType.Point && _mipFilter <= TextureFilterType.Point)
_state.Filter = TextureFilter.Point;
else if (_minFilter <= TextureFilterType.Point && _magFilter <= TextureFilterType.Point && _mipFilter == TextureFilterType.Linear)
_state.Filter = TextureFilter.PointMipLinear;
// Do we need to disable mipmapping?
if (_mipFilter == TextureFilterType.None)
{
// TODO: This is the only option we have right now for
// disabling mipmapping. We should add support for MinLod
// and MaxLod which potentially does a better job at this.
_state.MipMapLevelOfDetailBias = -16.0f;
_state.MaxMipLevel = 0;
}
_dirty = false;
}
public SamplerState State
{
get
{
if (_dirty)
UpdateSamplerState();
return _state;
}
}
}
}
| SamplerStateInfo |
csharp | mongodb__mongo-csharp-driver | tests/MongoDB.Driver.Tests/Linq/Linq3Implementation/Translators/ExpressionToPipelineTranslators/OfTypeMethodToPipelineTranslatorTests.cs | {
"start": 2562,
"end": 2870
} | public sealed class ____ : MongoCollectionFixture<Entity>
{
protected override IEnumerable<Entity> InitialData =>
[
new Company { Id = 1 },
new Company { Id = 2 },
new Contact { Id = 3 }
];
}
}
}
| ClassFixture |
csharp | AvaloniaUI__Avalonia | src/Avalonia.X11/X11Structs.cs | {
"start": 50019,
"end": 50351
} | struct ____ {
public IntPtr Name;
public int Primary;
public int Automatic;
public int NOutput;
public int X;
public int Y;
public int Width;
public int Height;
public int MWidth;
public int MHeight;
public IntPtr* Outputs;
}
}
| XRRMonitorInfo |
csharp | mongodb__mongo-csharp-driver | tests/MongoDB.Driver.Tests/ClusterTests.cs | {
"start": 1342,
"end": 8642
} | public class ____ : LoggableTestClass
{
private static readonly HashSet<string> __commandsToNotCapture = new HashSet<string>
{
"hello",
OppressiveLanguageConstants.LegacyHelloCommandName,
"getLastError",
"authenticate",
"saslStart",
"saslContinue",
"getnonce"
};
private const string _collectionName = "test";
private const string _databaseName = "test";
public ClusterTests(ITestOutputHelper output) : base(output)
{
}
/// <summary>
/// Test that starting a new transaction on a pinned ClientSession unpins the
/// session and normal server selection is performed for the next operation.
/// </summary>
[Theory]
[ParameterAttributeData]
public void SelectServer_loadbalancing_prose_test([Values(false, true)] bool async)
{
RequireServer.Check()
.Supports(Feature.ShardedTransactions, Feature.FailPointsBlockConnection)
.ClusterType(ClusterType.Sharded)
.MultipleMongoses(true);
// temporary disable the test on Auth envs due to operations timings irregularities
RequireServer.Check().Authentication(false);
var applicationName = FailPoint.DecorateApplicationName("loadBalancingTest", async);
const int threadsCount = 10;
const int commandsFailPointPerThreadCount = 10;
const int commandsPerThreadCount = 100;
const double maxCommandsOnSlowServerRatio = 0.3; // temporary set slow server load to 30% from 25% until find timings are investigated
const double operationsCountTolerance = 0.10;
var failCommand = BsonDocument.Parse($"{{ configureFailPoint: 'failCommand', mode : {{ times : 10000 }}, data : {{ failCommands : [\"find\"], blockConnection: true, blockTimeMS: 500, appName: '{applicationName}' }} }}");
DropCollection();
var eventCapturer = CreateEventCapturer();
using (var client = CreateMongoClient(eventCapturer, applicationName))
{
var slowServer = client.GetClusterInternal().SelectServer(OperationContext.NoTimeout, WritableServerSelector.Instance);
var fastServer = client.GetClusterInternal().SelectServer(OperationContext.NoTimeout, new DelegateServerSelector((_, servers) => servers.Where(s => s.ServerId != slowServer.ServerId)));
using var failPoint = FailPoint.Configure(slowServer, NoCoreSession.NewHandle(), failCommand, async);
var database = client.GetDatabase(_databaseName);
CreateCollection();
var collection = database.GetCollection<BsonDocument>(_collectionName);
// warm up connections
var channels = new ConcurrentBag<IChannelHandle>();
ThreadingUtilities.ExecuteOnNewThreads(threadsCount, i =>
{
channels.Add(slowServer.GetChannel(OperationContext.NoTimeout));
channels.Add(fastServer.GetChannel(OperationContext.NoTimeout));
});
foreach (var channel in channels)
{
channel.Dispose();
}
var (allCount, eventsOnSlowServerCount) = ExecuteFindOperations(collection, slowServer.ServerId, commandsFailPointPerThreadCount);
eventsOnSlowServerCount.Should().BeLessThan((int)(allCount * maxCommandsOnSlowServerRatio));
failPoint.Dispose();
(allCount, eventsOnSlowServerCount) = ExecuteFindOperations(collection, slowServer.ServerId, commandsPerThreadCount);
var singleServerOperationsPortion = allCount / 2;
var singleServerOperationsRange = (int)Math.Ceiling(allCount * operationsCountTolerance);
eventsOnSlowServerCount.Should().BeInRange(singleServerOperationsPortion - singleServerOperationsRange, singleServerOperationsPortion + singleServerOperationsRange);
}
(int allCount, int slowServerCount) ExecuteFindOperations(IMongoCollection<BsonDocument> collection, ServerId serverId, int operationsCount)
{
eventCapturer.Clear();
ThreadingUtilities.ExecuteOnNewThreads(threadsCount, __ =>
{
for (int i = 0; i < operationsCount; i++)
{
if (async)
{
var cursor = collection.FindAsync(new BsonDocument()).GetAwaiter().GetResult();
_ = cursor.FirstOrDefaultAsync().GetAwaiter().GetResult();
}
else
{
_ = collection.Find(new BsonDocument()).FirstOrDefault();
}
}
});
var events = eventCapturer.Events
.Where(e => e is CommandStartedEvent)
.Cast<CommandStartedEvent>()
.ToArray();
var eventsOnSlowServerCountActual = events.Where(e => e.ConnectionId.ServerId == serverId).Count();
return (events.Length, eventsOnSlowServerCountActual);
}
}
private EventCapturer CreateEventCapturer() =>
new EventCapturer()
.Capture<CommandStartedEvent>(e => !__commandsToNotCapture.Contains(e.CommandName));
private void CreateCollection()
{
var client = DriverTestConfiguration.Client;
var database = client.GetDatabase(_databaseName).WithWriteConcern(WriteConcern.WMajority);
var collection = database.GetCollection<BsonDocument>(_collectionName);
collection.InsertOne(new BsonDocument());
}
private IMongoClient CreateMongoClient(EventCapturer eventCapturer, string applicationName)
{
// Increase localThresholdMS and wait until all nodes are discovered to avoid false positives.
var client = DriverTestConfiguration.CreateMongoClient((MongoClientSettings settings) =>
{
settings.Servers = settings.Servers.Take(2).ToArray();
settings.ApplicationName = applicationName;
settings.ClusterConfigurator = c => c.Subscribe(eventCapturer);
settings.LocalThreshold = TimeSpan.FromMilliseconds(1000);
settings.LoggingSettings = LoggingSettings;
},
true);
var timeOut = TimeSpan.FromSeconds(60);
bool AllServersConnected() => client.Cluster.Description.Servers.All(s => s.State == ServerState.Connected);
SpinWait.SpinUntil(AllServersConnected, timeOut).Should().BeTrue();
return client;
}
private void DropCollection()
{
var client = DriverTestConfiguration.Client;
var database = client.GetDatabase(_databaseName).WithWriteConcern(WriteConcern.WMajority);
database.DropCollection(_collectionName);
}
}
}
| ClusterTests |
csharp | microsoft__semantic-kernel | dotnet/src/IntegrationTests/Connectors/Memory/SqliteVec/SqliteVectorStoreCollectionFixture.cs | {
"start": 191,
"end": 290
} | public class ____ : ICollectionFixture<SqliteVectorStoreFixture>
{ }
| SqliteVectorStoreCollectionFixture |
csharp | abpframework__abp | framework/test/Volo.Abp.ObjectExtending.Tests/Volo/Abp/ObjectExtending/ExtensibleObject_Tests.cs | {
"start": 133,
"end": 1397
} | public class ____ : AbpObjectExtendingTestBase
{
[Fact]
public void Should_Set_Default_Values_For_Defined_Properties_On_Create()
{
var person = new ExtensibleTestPerson();
person.HasProperty("Name").ShouldBeTrue();
person.HasProperty("Age").ShouldBeTrue();
person.HasProperty("NoPairCheck").ShouldBeTrue();
person.HasProperty("CityName").ShouldBeTrue();
person.HasProperty("EnumProperty").ShouldBeTrue();
person.GetProperty<string>("Name").ShouldBeNull();
person.GetProperty<int>("Age").ShouldBe(0);
person.GetProperty<string>("NoPairCheck").ShouldBeNull();
person.GetProperty<string>("CityName").ShouldBeNull();
person.GetProperty<ExtensibleTestEnumProperty>("EnumProperty").ShouldBe(default);
}
[Fact]
public void Should_Not_Set_Default_Values_For_Defined_Properties_If_Requested()
{
var person = new ExtensibleTestPerson(false);
person.HasProperty("Name").ShouldBeFalse();
person.HasProperty("Age").ShouldBeFalse();
person.HasProperty("NoPairCheck").ShouldBeFalse();
person.HasProperty("CityName").ShouldBeFalse();
person.HasProperty("EnumProperty").ShouldBeFalse();
}
}
| ExtensibleObject_Tests |
csharp | dotnet__reactive | Rx.NET/Samples/Portable/Net40ConsoleApp_NuGet/Program.cs | {
"start": 156,
"end": 1467
} | class ____
{
static void Main(string[] args)
{
var portableClass = new PortableClassLibrary.PortableClass();
var scheduler = System.Reactive.Concurrency.CurrentThreadScheduler.Instance;
// Create timer and route output to console
portableClass.CreateTimer(10, TimeSpan.FromSeconds(1.5))
.Buffer(2)
.ObserveOn(scheduler)
.Subscribe(items =>
{
Console.WriteLine(" 1: Received items {0}", string.Join(", ", items));
}, onCompleted: () =>
{
Console.WriteLine(" 1: Finished ");
});
// Create list observer and route output to console, but specify scheduler instead of using SubscribeOnDispatcher
portableClass.CreateList(scheduler)
.Delay(TimeSpan.FromSeconds(1))
.Subscribe(item =>
{
Console.WriteLine(" 2: Received item {0}", item);
}, onCompleted: () =>
{
Console.WriteLine(" 2: Finished ");
});
Console.WriteLine("Press enter to exit");
Console.ReadLine();
}
}
}
| Program |
csharp | JamesNK__Newtonsoft.Json | Src/Newtonsoft.Json.Tests/Serialization/JsonSerializerCollectionsTests.cs | {
"start": 11004,
"end": 11652
} | public class ____ : ArrayList
{
[JsonConstructor]
public TestCollectionNonGeneric(IEnumerable l)
: base(l.Cast<object>().ToList())
{
}
}
[Test]
public void CollectionJsonConstructorNonGeneric()
{
string json = @"[1,2,3]";
TestCollectionNonGeneric l = JsonConvert.DeserializeObject<TestCollectionNonGeneric>(json);
Assert.AreEqual(3, l.Count);
Assert.AreEqual(1L, l[0]);
Assert.AreEqual(2L, l[1]);
Assert.AreEqual(3L, l[2]);
}
#endif
| TestCollectionNonGeneric |
csharp | icsharpcode__SharpZipLib | src/ICSharpCode.SharpZipLib/Zip/ZipFile.cs | {
"start": 536,
"end": 1863
} | public class ____ : EventArgs
{
#region Constructors
/// <summary>
/// Initialise a new instance of <see cref="KeysRequiredEventArgs"></see>
/// </summary>
/// <param name="name">The name of the file for which keys are required.</param>
public KeysRequiredEventArgs(string name)
{
fileName = name;
}
/// <summary>
/// Initialise a new instance of <see cref="KeysRequiredEventArgs"></see>
/// </summary>
/// <param name="name">The name of the file for which keys are required.</param>
/// <param name="keyValue">The current key value.</param>
public KeysRequiredEventArgs(string name, byte[] keyValue)
{
fileName = name;
key = keyValue;
}
#endregion Constructors
#region Properties
/// <summary>
/// Gets the name of the file for which keys are required.
/// </summary>
public string FileName
{
get { return fileName; }
}
/// <summary>
/// Gets or sets the key value
/// </summary>
public byte[] Key
{
get { return key; }
set { key = value; }
}
#endregion Properties
#region Instance Fields
private readonly string fileName;
private byte[] key;
#endregion Instance Fields
}
#endregion Keys Required Event Args
#region Test Definitions
/// <summary>
/// The strategy to apply to testing.
/// </summary>
| KeysRequiredEventArgs |
csharp | DuendeSoftware__IdentityServer | identity-server/test/IdentityServer.UnitTests/Stores/Default/CachingResourceStoreTests.cs | {
"start": 294,
"end": 7782
} | public class ____
{
private List<IdentityResource> _identityResources = new List<IdentityResource>();
private List<ApiResource> _apiResources = new List<ApiResource>();
private List<ApiScope> _apiScopes = new List<ApiScope>();
private InMemoryResourcesStore _store;
private IdentityServerOptions _options = new IdentityServerOptions();
private MockCache<ApiResource> _apiCache = new MockCache<ApiResource>();
private MockCache<IdentityResource> _identityCache = new MockCache<IdentityResource>();
private MockCache<ApiScope> _scopeCache = new MockCache<ApiScope>();
private MockCache<Resources> _resourceCache = new MockCache<Resources>();
private MockCache<CachingResourceStore<InMemoryResourcesStore>.ApiResourceNames> _apiResourceNamesCache = new MockCache<CachingResourceStore<InMemoryResourcesStore>.ApiResourceNames>();
private CachingResourceStore<InMemoryResourcesStore> _subject;
public CachingResourceStoreTests()
{
_store = new InMemoryResourcesStore(_identityResources, _apiResources, _apiScopes);
_subject = new CachingResourceStore<InMemoryResourcesStore>(
_options,
_store,
_identityCache,
_apiCache,
_scopeCache,
_resourceCache,
_apiResourceNamesCache);
}
[Fact]
public async Task FindApiScopesByNameAsync_should_populate_cache()
{
_apiScopes.Add(new ApiScope("scope1"));
_apiScopes.Add(new ApiScope("scope2"));
_apiScopes.Add(new ApiScope("scope3"));
_apiScopes.Add(new ApiScope("scope4"));
_scopeCache.Items.Count.ShouldBe(0);
var items = await _subject.FindApiScopesByNameAsync(new[] { "scope3", "scope1", "scope2", "invalid" });
items.Count().ShouldBe(3);
_scopeCache.Items.Count.ShouldBe(3);
}
[Fact]
public async Task FindApiScopesByNameAsync_should_populate_missing_cache_items()
{
_apiScopes.Add(new ApiScope("scope1"));
_apiScopes.Add(new ApiScope("scope2"));
_apiScopes.Add(new ApiScope("scope3"));
_apiScopes.Add(new ApiScope("scope4"));
_scopeCache.Items.Count.ShouldBe(0);
var items = await _subject.FindApiScopesByNameAsync(new[] { "scope1" });
items.Count().ShouldBe(1);
_scopeCache.Items.Count.ShouldBe(1);
_apiScopes.Remove(_apiScopes.Single(x => x.Name == "scope1"));
items = await _subject.FindApiScopesByNameAsync(new[] { "scope1", "scope2" });
items.Count().ShouldBe(2);
_scopeCache.Items.Count.ShouldBe(2);
_apiScopes.Remove(_apiScopes.Single(x => x.Name == "scope2"));
items = await _subject.FindApiScopesByNameAsync(new[] { "scope3", "scope2", "scope4" });
items.Count().ShouldBe(3);
_scopeCache.Items.Count.ShouldBe(4);
// this shows we will find it in the cache, even if removed from the DB
_apiScopes.Remove(_apiScopes.Single(x => x.Name == "scope3"));
items = await _subject.FindApiScopesByNameAsync(new[] { "scope3", "scope1", "scope2" });
items.Count().ShouldBe(3);
_scopeCache.Items.Count.ShouldBe(4);
}
[Fact]
public async Task FindApiResourcesByScopeNameAsync_should_populate_cache()
{
_apiResources.Add(new ApiResource("foo") { Scopes = { "foo2", "foo1" } });
_apiResources.Add(new ApiResource("bar") { Scopes = { "bar2", "bar1" } });
_apiScopes.Add(new ApiScope("foo2"));
_apiScopes.Add(new ApiScope("foo1"));
_apiScopes.Add(new ApiScope("bar2"));
_apiScopes.Add(new ApiScope("bar1"));
{
_apiCache.Items.Count.ShouldBe(0);
_apiResourceNamesCache.Items.Count.ShouldBe(0);
var items = await _subject.FindApiResourcesByScopeNameAsync(new[] { "invalid" });
items.Count().ShouldBe(0);
_apiCache.Items.Count.ShouldBe(0);
_apiResourceNamesCache.Items.Count.ShouldBe(1);
}
{
_apiCache.Items.Clear();
_apiResourceNamesCache.Items.Clear();
_resourceCache.Items.Clear();
_apiCache.Items.Count.ShouldBe(0);
_apiResourceNamesCache.Items.Count.ShouldBe(0);
var items = await _subject.FindApiResourcesByScopeNameAsync(new[] { "foo1" });
items.Count().ShouldBe(1);
items.Select(x => x.Name).ShouldBe(new[] { "foo" });
_apiCache.Items.Count.ShouldBe(1);
_apiResourceNamesCache.Items.Count.ShouldBe(1);
}
{
var items = await _subject.FindApiResourcesByScopeNameAsync(new[] { "foo2" });
items.Count().ShouldBe(1);
items.Select(x => x.Name).ShouldBe(["foo"]);
_apiCache.Items.Count.ShouldBe(1);
_apiResourceNamesCache.Items.Count.ShouldBe(2);
}
{
var items = await _subject.FindApiResourcesByScopeNameAsync(new[] { "foo1", "bar1" });
items.Count().ShouldBe(2);
items.Select(x => x.Name).ShouldBe(["foo", "bar"]);
_apiCache.Items.Count.ShouldBe(2);
_apiResourceNamesCache.Items.Count.ShouldBe(3);
}
{
var items = await _subject.FindApiResourcesByScopeNameAsync(new[] { "foo2", "foo1", "bar2", "bar1" });
items.Count().ShouldBe(2);
items.Select(x => x.Name).ShouldBe(["foo", "bar"]);
_apiCache.Items.Count.ShouldBe(2);
_apiResourceNamesCache.Items.Count.ShouldBe(4);
}
{
_apiCache.Items.Clear();
_apiResourceNamesCache.Items.Clear();
_resourceCache.Items.Clear();
var items = await _subject.FindApiResourcesByScopeNameAsync(new[] { "foo2", "foo1", "bar2", "bar1" });
items.Count().ShouldBe(2);
items.Select(x => x.Name).ShouldBe(["foo", "bar"]);
_apiCache.Items.Count.ShouldBe(2);
_apiResourceNamesCache.Items.Count.ShouldBe(4);
}
{
// should not need go to db
_apiResources.Clear();
_apiScopes.Clear();
_identityResources.Clear();
var items = await _subject.FindApiResourcesByScopeNameAsync(new[] { "foo2", "foo1", "bar2", "bar1" });
items.Count().ShouldBe(2);
items.Select(x => x.Name).ShouldBe(["foo", "bar"]);
_apiCache.Items.Count.ShouldBe(2);
_apiResourceNamesCache.Items.Count.ShouldBe(4);
}
}
[Fact]
public async Task FindApiResourcesByScopeNameAsync_should_return_same_results_twice()
{
_apiResources.Add(new ApiResource("foo") { Scopes = { "foo", "foo1" } });
_apiResources.Add(new ApiResource("bar") { Scopes = { "bar", "bar1" } });
_apiScopes.Add(new ApiScope("foo"));
_apiScopes.Add(new ApiScope("foo1"));
_apiScopes.Add(new ApiScope("bar"));
_apiScopes.Add(new ApiScope("bar1"));
{
var items = await _subject.FindApiResourcesByScopeNameAsync(new[] { "foo", "foo1", "bar", "bar1" });
items.Count().ShouldBe(2);
items.Select(x => x.Name).ShouldBe(["foo", "bar"], true);
}
{
var items = await _subject.FindApiResourcesByScopeNameAsync(new[] { "foo", "foo1", "bar", "bar1" });
items.Count().ShouldBe(2);
items.Select(x => x.Name).ShouldBe(["foo", "bar"]);
}
}
}
| CachingResourceStoreTests |
csharp | VerifyTests__Verify | src/Verify.Fixie/Verifier_Json.cs | {
"start": 38,
"end": 2569
} | partial class ____
{
[Pure]
public static SettingsTask VerifyJson(
[StringSyntax(StringSyntaxAttribute.Json)]
StringBuilder? target,
VerifySettings? settings = null,
[CallerFilePath] string sourceFile = "") =>
Verify(settings, sourceFile, _ => _.VerifyJson(target));
[Pure]
public static SettingsTask VerifyJson(
[StringSyntax(StringSyntaxAttribute.Json)]
Task<StringBuilder> target,
VerifySettings? settings = null,
[CallerFilePath] string sourceFile = "") =>
Verify(settings, sourceFile, _ => _.VerifyJson(target));
[Pure]
public static SettingsTask VerifyJson(
[StringSyntax(StringSyntaxAttribute.Json)]
ValueTask<StringBuilder> target,
VerifySettings? settings = null,
[CallerFilePath] string sourceFile = "") =>
Verify(settings, sourceFile, _ => _.VerifyJson(target));
[Pure]
public static SettingsTask VerifyJson(
[StringSyntax(StringSyntaxAttribute.Json)]
string? target,
VerifySettings? settings = null,
[CallerFilePath] string sourceFile = "") =>
Verify(settings, sourceFile, _ => _.VerifyJson(target));
[Pure]
public static SettingsTask VerifyJson(
[StringSyntax(StringSyntaxAttribute.Json)]
Task<string> target,
VerifySettings? settings = null,
[CallerFilePath] string sourceFile = "") =>
Verify(settings, sourceFile, _ => _.VerifyJson(target));
[Pure]
public static SettingsTask VerifyJson(
[StringSyntax(StringSyntaxAttribute.Json)]
ValueTask<string> target,
VerifySettings? settings = null,
[CallerFilePath] string sourceFile = "") =>
Verify(settings, sourceFile, _ => _.VerifyJson(target));
[Pure]
public static SettingsTask VerifyJson(
Stream? target,
VerifySettings? settings = null,
[CallerFilePath] string sourceFile = "") =>
Verify(settings, sourceFile, _ => _.VerifyJson(target));
[Pure]
public static SettingsTask VerifyJson(
Task<Stream> target,
VerifySettings? settings = null,
[CallerFilePath] string sourceFile = "") =>
Verify(settings, sourceFile, _ => _.VerifyJson(target));
[Pure]
public static SettingsTask VerifyJson(
ValueTask<Stream> target,
VerifySettings? settings = null,
[CallerFilePath] string sourceFile = "") =>
Verify(settings, sourceFile, _ => _.VerifyJson(target));
} | Verifier |
csharp | VerifyTests__Verify | src/Verify.Tests/DanglingSnapshotsCheckTests.cs | {
"start": 48,
"end": 1222
} | public class ____
{
[Fact]
public Task Untracked()
{
var filesOnDisk = new List<string>
{
"path/to/untracked.verified.txt"
};
var trackedFiles = new ConcurrentBag<string>
{
"path/to/tracked.verified.txt"
};
return Throws(() => DanglingSnapshotsCheck.CheckFiles(filesOnDisk, trackedFiles, "path/to"))
.IgnoreStackTrace();
}
[Fact]
public Task IncorrectCase()
{
var filesOnDisk = new List<string>
{
"path/to/Tracked.verified.txt"
};
var trackedFiles = new ConcurrentBag<string>
{
"path/to/tracked.verified.txt"
};
return Throws(() => DanglingSnapshotsCheck.CheckFiles(filesOnDisk, trackedFiles, "path/to"))
.IgnoreStackTrace();
}
[Fact]
public void AllTracked()
{
var filesOnDisk = new List<string> { "path/to/tracked.verified.txt" };
var trackedFiles = new ConcurrentBag<string> { "path/to/tracked.verified.txt" };
DanglingSnapshotsCheck.CheckFiles(filesOnDisk, trackedFiles, "path/to");
}
} | DanglingSnapshotsCheckTests |
csharp | smartstore__Smartstore | src/Smartstore.Core/Data/Migrations/DbMigrator.cs | {
"start": 241,
"end": 3876
} | public abstract class ____
{
private readonly ILifetimeScope _scope;
protected DbMigrator(ILifetimeScope scope, IMigrationTable migrationTable)
{
_scope = scope;
MigrationTable = migrationTable;
}
public abstract HookingDbContext Context { get; }
public virtual IMigrationTable MigrationTable { get; }
/// <summary>
/// Migrates the database to the latest version.
/// </summary>
/// <param name="assembly">
/// Pass an <see cref="Assembly"/> instance to reduce the set of processed migrations to migration classes found in the given assembly only.
/// </param>
/// <returns>The number of applied migrations.</returns>
public abstract Task<int> RunPendingMigrationsAsync(Assembly assembly = null, CancellationToken cancelToken = default);
/// <summary>
/// Migrates the database to <paramref name="targetVersion"/> or to the latest version if no version was specified.
/// </summary>
/// <param name="targetVersion">The target migration version. Pass -1 to perform a full rollback.</param>
/// <param name="assembly">
/// Pass an <see cref="Assembly"/> instance to reduce the set of processed migrations to migration classes found in the given assembly only.
/// </param>
/// <returns>The number of applied migrations.</returns>
public abstract Task<int> MigrateAsync(long? targetVersion = null, Assembly assembly = null, CancellationToken cancelToken = default);
/// <summary>
/// Creates an instance of the migration class.
/// </summary>
/// <param name="migrationClass">
/// The <see cref="Type" /> for the migration class, as obtained from the <see cref="MigrationTable" /> dictionary.
/// </param>
/// <returns>The migration instance.</returns>
protected IMigration CreateMigration(Type migrationClass)
{
Guard.NotNull(migrationClass);
return (IMigration)_scope.ResolveUnregistered(migrationClass);
}
#region Database initialization
/// <summary>
/// Determines whether the database contains ALL tables specified by <see cref="CheckTablesAttribute"/>.
/// If the DbContext is not annotated with <see cref="CheckTablesAttribute"/> this method will return
/// <c>true</c> if at least one user table is present in the database, otherwise <c>false</c>.
/// </summary>
/// <returns> A value indicating whether the required tables are present in the database. </returns>
public bool HasTables()
{
var tablesToCheck = Context.GetType().GetAttribute<CheckTablesAttribute>(true)?.TableNames;
if (tablesToCheck != null && tablesToCheck.Length > 0)
{
var dbTables = Context.DataProvider.GetTableNames();
// True when ALL required tables are present in the database
return dbTables.Intersect(tablesToCheck, StringComparer.InvariantCultureIgnoreCase).Count() == tablesToCheck.Length;
}
return (Context.Database.GetFacadeDependencies().DatabaseCreator as RelationalDatabaseCreator)?.HasTables() ?? false;
}
/// <summary>
/// Creates the schema for the current model in the database. The database must exist physically or this method
/// will raise an exception. To specify the table names that the database should contain in order to satisfy the model, annotate
/// the DbContext | DbMigrator |
csharp | microsoft__PowerToys | src/common/FilePreviewCommon/Formatters/XmlFormatter.cs | {
"start": 295,
"end": 1191
} | public class ____ : IFormatter
{
/// <inheritdoc/>
public string LangSet => "xml";
/// <inheritdoc/>
public string Format(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return string.Empty;
}
var xmlDocument = new XmlDocument();
xmlDocument.LoadXml(value);
var stringBuilder = new StringBuilder();
var xmlWriterSettings = new XmlWriterSettings()
{
OmitXmlDeclaration = xmlDocument.FirstChild?.NodeType != XmlNodeType.XmlDeclaration,
Indent = true,
};
using (var xmlWriter = XmlWriter.Create(stringBuilder, xmlWriterSettings))
{
xmlDocument.Save(xmlWriter);
}
return stringBuilder.ToString();
}
}
}
| XmlFormatter |
csharp | dotnetcore__FreeSql | FreeSql.DbContext/UnitOfWork/UnitOfWorkManager.cs | {
"start": 952,
"end": 7390
} | public class ____ : IUnitOfWorkManager
{
internal DbContextScopedFreeSql _ormScoped;
internal IFreeSql OrmOriginal => _ormScoped?._originalFsql;
public IFreeSql Orm => _ormScoped;
List<UowInfo> _rawUows = new List<UowInfo>();
List<UowInfo> _allUows = new List<UowInfo>();
List<BindInfo> _binds = new List<BindInfo>();
public UnitOfWorkManager(IFreeSql fsql)
{
if (fsql == null) throw new ArgumentNullException(DbContextErrorStrings.UnitOfWorkManager_Construction_CannotBeNull(nameof(UnitOfWorkManager), nameof(fsql)));
_ormScoped = DbContextScopedFreeSql.Create(fsql, null, () => this.Current);
}
#region Dispose
~UnitOfWorkManager() => this.Dispose();
int _disposeCounter;
public void Dispose()
{
if (Interlocked.Increment(ref _disposeCounter) != 1) return;
try
{
Exception exception = null;
for (var a = _rawUows.Count - 1; a >= 0; a--)
{
try
{
if (exception == null) _rawUows[a].Uow.Commit();
else _rawUows[a].Uow.Rollback();
}
catch (Exception ex)
{
if (exception == null) exception = ex;
}
}
if (exception != null) throw exception;
}
finally
{
_rawUows.Clear();
_allUows.Clear();
_binds.Clear();
GC.SuppressFinalize(this);
}
}
#endregion
/// <summary>
/// 当前的工作单元
/// </summary>
public IUnitOfWork Current => _allUows.LastOrDefault()?.Uow;
/// <summary>
/// 将仓储的事务交给我管理
/// </summary>
/// <param name="repository"></param>
public void Binding(IBaseRepository repository)
{
var bind = new BindInfo(repository);
repository.UnitOfWork = Current;
if (_binds.Any(a => a.Repository == repository)) return;
_binds.Add(bind);
}
/// <summary>
/// 将DbContext的事务交给我管理
/// </summary>
/// <param name="dbContext"></param>
public void Binding(DbContext dbContext)
{
var bind = new BindInfo(dbContext);
dbContext._isUseUnitOfWork = false;
dbContext.UnitOfWork = Current;
if (_binds.Any(a => a.DbContext == dbContext)) return;
_binds.Add(bind);
}
void SetAllBindsUow()
{
foreach (var bind in _binds)
{
if (bind.Repository != null) bind.Repository.UnitOfWork = Current ?? bind.OrginalUow;
if (bind.DbContext != null) bind.DbContext.UnitOfWork = Current ?? bind.OrginalUow;
}
}
/// <summary>
/// 创建工作单元
/// </summary>
/// <param name="propagation">事务传播方式</param>
/// <param name="isolationLevel">事务隔离级别</param>
/// <returns></returns>
public IUnitOfWork Begin(Propagation propagation = Propagation.Required, IsolationLevel? isolationLevel = null)
{
switch (propagation)
{
case Propagation.Required: return FindedUowCreateVirtual() ?? CreateUow(isolationLevel);
case Propagation.Supports: return FindedUowCreateVirtual() ?? CreateUowNothing(_allUows.LastOrDefault()?.IsNotSupported ?? false);
case Propagation.Mandatory: return FindedUowCreateVirtual() ?? throw new Exception(DbContextErrorStrings.Propagation_Mandatory);
case Propagation.NotSupported: return CreateUowNothing(true);
case Propagation.Never:
var isNotSupported = _allUows.LastOrDefault()?.IsNotSupported ?? false;
if (isNotSupported == false)
{
for (var a = _rawUows.Count - 1; a >= 0; a--)
if (_rawUows[a].Uow.GetOrBeginTransaction(false) != null)
throw new Exception(DbContextErrorStrings.Propagation_Never);
}
return CreateUowNothing(isNotSupported);
case Propagation.Nested: return CreateUow(isolationLevel);
default: throw new NotImplementedException();
}
}
IUnitOfWork FindedUowCreateVirtual()
{
var isNotSupported = _allUows.LastOrDefault()?.IsNotSupported ?? false;
if (isNotSupported == false)
{
for (var a = _rawUows.Count - 1; a >= 0; a--)
if (_rawUows[a].Uow.GetOrBeginTransaction(false) != null)
{
var uow = new UnitOfWorkVirtual(_rawUows[a].Uow);
var uowInfo = new UowInfo(uow, UowInfo.UowType.Virtual, isNotSupported);
uow.OnDispose = () => _allUows.Remove(uowInfo);
_allUows.Add(uowInfo);
SetAllBindsUow();
return uow;
}
}
return null;
}
IUnitOfWork CreateUowNothing(bool isNotSupported)
{
var uow = new UnitOfWorkNothing(Orm);
var uowInfo = new UowInfo(uow, UowInfo.UowType.Nothing, isNotSupported);
uow.OnDispose = () => _allUows.Remove(uowInfo);
_allUows.Add(uowInfo);
SetAllBindsUow();
return uow;
}
IUnitOfWork CreateUow(IsolationLevel? isolationLevel)
{
var uow = new UnitOfWorkOrginal(new UnitOfWork(OrmOriginal));
var uowInfo = new UowInfo(uow, UowInfo.UowType.Orginal, false);
if (isolationLevel != null) uow.IsolationLevel = isolationLevel.Value;
try { uow.GetOrBeginTransaction(); }
catch { uow.Dispose(); throw; }
uow.OnDispose = () =>
{
_rawUows.Remove(uowInfo);
_allUows.Remove(uowInfo);
SetAllBindsUow();
};
_rawUows.Add(uowInfo);
_allUows.Add(uowInfo);
SetAllBindsUow();
return uow;
}
| UnitOfWorkManager |
csharp | dotnet__maui | src/Controls/src/Core/Platform/Windows/Extensions/TextBlockExtensions.cs | {
"start": 391,
"end": 3986
} | internal static class ____
{
public static void UpdateLineBreakMode(this TextBlock textBlock, Label label) =>
textBlock.SetLineBreakMode(label.LineBreakMode, label.MaxLines);
public static void UpdateLineBreakMode(this TextBlock textBlock, LineBreakMode lineBreakMode)
{
textBlock.SetLineBreakMode(lineBreakMode, null);
}
static void DetermineTruncatedTextWrapping(TextBlock textBlock) =>
textBlock.TextWrapping = textBlock.MaxLines > 1 ? TextWrapping.Wrap : TextWrapping.NoWrap;
public static void UpdateText(this TextBlock platformControl, Label label)
{
string text = TextTransformUtilities.GetTransformedText(label.Text, label.TextTransform);
switch (label.TextType)
{
case TextType.Html:
platformControl.UpdateTextHtml(label, text);
break;
default:
if (label.FormattedText != null)
{
platformControl.UpdateInlines(label);
}
else
{
if (!label.IsConnectingHandler() && platformControl.TextHighlighters.Count > 0)
{
platformControl.TextHighlighters.Clear();
}
platformControl.Text = text;
}
break;
}
}
public static double FindDefaultLineHeight(this TextBlock control, Inline inline)
{
control.Inlines.Add(inline);
control.Measure(new WSize(double.PositiveInfinity, double.PositiveInfinity));
var height = control.DesiredSize.Height;
control.Inlines.Remove(inline);
return height;
}
public static void UpdateMaxLines(this TextBlock platformControl, Label label)
{
// Linebreak mode also handles setting MaxLines
platformControl.SetLineBreakMode(label.LineBreakMode, label.MaxLines);
}
public static void UpdateDetectReadingOrderFromContent(this TextBlock platformControl, Label label)
{
if (label.IsSet(Specifics.DetectReadingOrderFromContentProperty))
{
platformControl.SetTextReadingOrder(label.OnThisPlatform().GetDetectReadingOrderFromContent());
}
}
internal static void SetLineBreakMode(this TextBlock textBlock, LineBreakMode lineBreakMode, int? maxLines = null)
{
if (maxLines.HasValue && maxLines >= 0)
{
textBlock.MaxLines = maxLines.Value;
}
else
{
textBlock.MaxLines = 0;
}
switch (lineBreakMode)
{
case LineBreakMode.NoWrap:
textBlock.TextTrimming = TextTrimming.Clip;
textBlock.TextWrapping = TextWrapping.NoWrap;
break;
case LineBreakMode.WordWrap:
textBlock.TextTrimming = TextTrimming.None;
textBlock.TextWrapping = TextWrapping.Wrap;
break;
case LineBreakMode.CharacterWrap:
textBlock.TextTrimming = TextTrimming.WordEllipsis;
textBlock.TextWrapping = TextWrapping.Wrap;
break;
case LineBreakMode.HeadTruncation:
// TODO: This truncates at the end.
textBlock.TextTrimming = TextTrimming.WordEllipsis;
DetermineTruncatedTextWrapping(textBlock);
break;
case LineBreakMode.TailTruncation:
textBlock.TextTrimming = TextTrimming.CharacterEllipsis;
DetermineTruncatedTextWrapping(textBlock);
break;
case LineBreakMode.MiddleTruncation:
// TODO: This truncates at the end.
textBlock.TextTrimming = TextTrimming.WordEllipsis;
DetermineTruncatedTextWrapping(textBlock);
break;
default:
throw new ArgumentOutOfRangeException();
}
}
internal static void SetTextReadingOrder(this TextBlock platformControl, bool detectReadingOrderFromContent) =>
platformControl.TextReadingOrder = detectReadingOrderFromContent
? TextReadingOrder.DetectFromContent
: TextReadingOrder.UseFlowDirection;
}
} | TextBlockExtensions |
csharp | dotnet__aspnetcore | src/Framework/AspNetCoreAnalyzers/test/RouteEmbeddedLanguage/RouteParameterUnusedParameterFixerTest.cs | {
"start": 12300,
"end": 13004
} | class ____
{
static void Main()
{
EndpointRouteBuilderExtensions.MapGet(null, @""{id?}"", (string? id) => ""test"");
}
}
";
var expectedDiagnostics = new[]
{
new DiagnosticResult(DiagnosticDescriptors.RoutePatternUnusedParameter).WithArguments("id").WithLocation(0)
};
// Act & Assert
await VerifyCS.VerifyCodeFixAsync(source, expectedDiagnostics, fixedSource, expectedIterations: 1);
}
[Fact]
public async Task MapGet_UnusedParameter_IntAndDecimalPolicy_AddStringToLambda()
{
// Arrange
var source = @"
using System;
using System.Diagnostics.CodeAnalysis;
using Microsoft.AspNetCore.Builder;
| Program |
csharp | dotnet__aspire | src/Aspire.Dashboard/Components/Dialogs/SettingsDialog.razor.cs | {
"start": 433,
"end": 3780
} | public partial class ____ : IDialogContentComponent, IDisposable
{
private string? _currentSetting;
private List<CultureInfo> _languageOptions = null!;
private CultureInfo? _selectedUiCulture;
private IDisposable? _themeChangedSubscription;
[Inject]
public required ThemeManager ThemeManager { get; init; }
[Inject]
public required TelemetryRepository TelemetryRepository { get; init; }
[Inject]
public required NavigationManager NavigationManager { get; init; }
[Inject]
public required ConsoleLogsManager ConsoleLogsManager { get; init; }
[Inject]
public required BrowserTimeProvider TimeProvider { get; init; }
[Inject]
public required DashboardTelemetryService TelemetryService { get; init; }
protected override void OnInitialized()
{
_languageOptions = GlobalizationHelpers.OrderedLocalizedCultures;
_selectedUiCulture = GlobalizationHelpers.TryGetKnownParentCulture(CultureInfo.CurrentUICulture, out var matchedCulture)
? matchedCulture :
// Otherwise, Blazor has fallen back to a supported language
CultureInfo.CurrentUICulture;
_currentSetting = ThemeManager.SelectedTheme ?? ThemeManager.ThemeSettingSystem;
// Handle value being changed in a different browser window.
_themeChangedSubscription = ThemeManager.OnThemeChanged(async () =>
{
var newValue = ThemeManager.SelectedTheme!;
if (_currentSetting != newValue)
{
_currentSetting = newValue;
await InvokeAsync(StateHasChanged);
}
});
}
private async Task ThemeChangedAsync()
{
// The field is being transiently set to null when the value changes. Maybe a bug in FluentUI?
// This should never be set to null by the dashboard so we can ignore null values.
if (_currentSetting != null)
{
// The theme isn't changed here. Instead, the MainLayout subscribes to the change event
// and applies the new theme to the browser window.
await ThemeManager.RaiseThemeChangedAsync(_currentSetting);
}
}
private void OnLanguageChanged()
{
if (_selectedUiCulture is null || StringComparers.CultureName.Equals(CultureInfo.CurrentUICulture.Name, _selectedUiCulture.Name))
{
return;
}
var uri = new Uri(NavigationManager.Uri)
.GetComponents(UriComponents.PathAndQuery, UriFormat.Unescaped);
// A cookie (CookieRequestCultureProvider.DefaultCookieName) must be set and the page reloaded to use the new culture set by the localization middleware.
NavigationManager.NavigateTo(
DashboardUrls.SetLanguageUrl(_selectedUiCulture.Name, uri),
forceLoad: true);
}
private static void ValueChanged(string? value)
{
// Do nothing. Required for FluentUI Blazor to trigger SelectedOptionChanged.
}
private async Task ClearAllSignals()
{
TelemetryRepository.ClearAllSignals();
await ConsoleLogsManager.UpdateFiltersAsync(new ConsoleLogsFilters { FilterAllLogsDate = TimeProvider.GetUtcNow().UtcDateTime });
}
public void Dispose()
{
_themeChangedSubscription?.Dispose();
}
}
| SettingsDialog |
csharp | CommunityToolkit__WindowsCommunityToolkit | Microsoft.Toolkit.Uwp.UI.Controls.Layout/BladeView/BladeItemAutomationPeer.cs | {
"start": 2202,
"end": 4492
} | class ____
/// </returns>
protected override string GetNameCore()
{
string name = AutomationProperties.GetName(this.OwnerBladeItem);
if (!string.IsNullOrEmpty(name))
{
return name;
}
name = this.OwnerBladeItem.Name;
if (!string.IsNullOrEmpty(name))
{
return name;
}
name = this.OwnerBladeItem.Header?.ToString();
if (!string.IsNullOrEmpty(name))
{
return name;
}
TextBlock textBlock = this.OwnerBladeItem.FindDescendant<TextBlock>();
if (textBlock != null)
{
return textBlock.Text;
}
name = base.GetNameCore();
if (!string.IsNullOrEmpty(name))
{
return name;
}
return string.Empty;
}
/// <summary>
/// Returns the size of the set where the element that is associated with the automation peer is located.
/// </summary>
/// <returns>
/// The size of the set.
/// </returns>
protected override int GetSizeOfSetCore()
{
int sizeOfSet = base.GetSizeOfSetCore();
if (sizeOfSet != -1)
{
return sizeOfSet;
}
BladeItem owner = this.OwnerBladeItem;
BladeView parent = owner.ParentBladeView;
sizeOfSet = parent.Items.Count;
return sizeOfSet;
}
/// <summary>
/// Returns the ordinal position in the set for the element that is associated with the automation peer.
/// </summary>
/// <returns>
/// The ordinal position in the set.
/// </returns>
protected override int GetPositionInSetCore()
{
int positionInSet = base.GetPositionInSetCore();
if (positionInSet != -1)
{
return positionInSet;
}
BladeItem owner = this.OwnerBladeItem;
BladeView parent = owner.ParentBladeView;
positionInSet = parent.IndexFromContainer(owner);
return positionInSet;
}
}
} | name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.